remove obsolete test code
authorWalther Neuper <walther.neuper@jku.at>
Fri, 27 Nov 2020 20:25:58 +0100
changeset 60116abf3853fa84d
parent 60115 1613406ad3f3
child 60117 3ea616c84845
remove obsolete test code
src/Tools/isac/BridgeJEdit/Calculation.thy
     1.1 --- a/src/Tools/isac/BridgeJEdit/Calculation.thy	Fri Nov 27 18:19:04 2020 +0100
     1.2 +++ b/src/Tools/isac/BridgeJEdit/Calculation.thy	Fri Nov 27 20:25:58 2020 +0100
     1.3 @@ -20,1079 +20,17 @@
     1.4  begin
     1.5  
     1.6  ML_file parseC.sml
     1.7 -ML_file "~~/test/HOL/SPARK/test-data.sml" (*..:*)ML \<open>g_c_d_siv_content\<close>
     1.8  
     1.9 +section \<open>new code for Outer_Syntax.command command_keyword>\<open>Example\<close>\<close>
    1.10  ML \<open>
    1.11  \<close> ML \<open>
    1.12  \<close> ML \<open>
    1.13  \<close> ML \<open>
    1.14  \<close>
    1.15  
    1.16 -section \<open>code for spark_open: cp.to cp_spark_vcs.ML, cp_spark_vcs.ML\<close>
    1.17 -text \<open>
    1.18 -  We minimally change code for Example, until all works.
    1.19 -
    1.20 -  The code is separated from Calculation.thy into files cp_spark_vcs.ML and cp_spark_commands.ML, 
    1.21 -  because text\<open>...\<close> and (*...*) for const_name>\<open>...\<close> etc and @ {thms ...} causes errors.
    1.22 -
    1.23 -  Thus for Test_Isac (**)ML_file "cp_spark_vcs.ML", ""(*cp_spark_commands.ML*), "HOL-SPARK.SPARK"
    1.24 -  must be outcommented.
    1.25 -\<close>
    1.26 -
    1.27 -subsection \<open>cp. HOL/SPARK/Tools/spark_vcs.ML\<close>
    1.28 +subsection \<open>code for parsing and storing\<close>
    1.29  ML \<open>
    1.30  \<close> ML \<open>
    1.31 -(*  Title:      src/Tools/isac/BridgeJEdit/cp_spark_vcs.ML
    1.32 -    Author:     Walther Neuper, JKU Linz
    1.33 -    (c) due to copyright terms
    1.34 -
    1.35 -Preliminary code for developing Outer_Syntax.command..Example from spark_open as a model.
    1.36 -The original signatures from this subsection are:
    1.37 -*)
    1.38 -\<close> ML \<open>
    1.39 -signature SPARK_VCS_PARTIALLY =
    1.40 -sig
    1.41 -  val err_unfinished: unit -> 'a
    1.42 -  val strip_number: string -> string * string
    1.43 -  val name_ord: string * string -> order
    1.44 -  structure VCtab: TABLE
    1.45 -  structure VCs: THEORY_DATA
    1.46 -  val to_lower: string -> string
    1.47 -  val partition_opt: ('a -> 'b option) -> 'a list -> 'b list * 'a list
    1.48 -  val dest_def: 'a * Fdl_Parser.fdl_rule -> ('a * (string * Fdl_Parser.expr)) option
    1.49 -  val builtin: unit Symtab.table
    1.50 -  val complex_expr: Fdl_Parser.expr -> bool
    1.51 -  val complex_rule: Fdl_Parser.fdl_rule -> bool
    1.52 -  val is_trivial_vc: 'a list * ('b * Fdl_Parser.expr) list -> bool
    1.53 -  val rulenames: ((string * int) * 'a) list -> string
    1.54 -  val is_pfun: Symtab.key -> bool
    1.55 -  val fold_opt: ('a -> 'b -> 'b) -> 'a option -> 'b -> 'b
    1.56 -  val fold_pair: ('a -> 'b -> 'c) -> ('d -> 'c -> 'e) -> 'a * 'd -> 'b -> 'e
    1.57 -  val fold_expr: (string -> 'a -> 'a) -> (string -> 'a -> 'a) -> Fdl_Parser.expr -> 'a -> 'a
    1.58 -  val add_expr_pfuns: 'a Fdl_Parser.tab -> Fdl_Parser.expr -> Symtab.key list -> Symtab.key list
    1.59 -  val lookup_prfx: string -> 'a Symtab.table -> Symtab.key -> 'a option
    1.60 -  val fold_vcs: ('a -> 'b -> 'b) -> ('c * 'd * 'a list * 'a list) VCtab.table -> 'b -> 'b
    1.61 -  val pfuns_of_vcs: string -> 'a Fdl_Parser.tab -> 'b Symtab.table -> ('c * 'd * ('e * Fdl_Parser.expr) list * ('e * Fdl_Parser.expr) list) VCtab.table -> Symtab.key list
    1.62 -  val booleanN: string
    1.63 -  val integerN: string
    1.64 -  val get_type: theory -> string -> Symtab.key -> (typ * (string * string) list) option
    1.65 -  val mk_type': theory -> string -> string -> typ * (string * string) list
    1.66 -  val mk_type: theory -> string -> string -> typ
    1.67 -  val check_no_assoc: theory -> string -> string -> unit
    1.68 -  val define_overloaded: bstring * term -> Proof.context -> thm * local_theory
    1.69 -  val add_enum_type: string -> string -> theory -> theory
    1.70 -  val lcase_eq: string * string -> bool
    1.71 -  val check_enum: string list -> (string * 'a) list -> string option
    1.72 -  val strip_prfx: string -> string * string
    1.73 -  val assoc_ty_err: theory -> typ -> string -> string -> 'a
    1.74 -  val unprefix_pfun: string -> string -> string
    1.75 -  val invert_map: (''a * ''a) list -> (''a * 'b) list -> (''a * 'b) list
    1.76 -  val get_record_info: theory -> typ -> Record.info option
    1.77 -  val add_type_def: string -> string * Fdl_Parser.fdl_type -> ((term * bstring) Symtab.table * Name.context) * theory -> ((term * bstring) Symtab.table * Name.context) * theory
    1.78 -  val add_const: string -> bstring * string -> ((term * string) Symtab.table * Name.context) * theory -> term * (((term * string) Symtab.table * Name.context) * theory)
    1.79 -  val mk_unop: string -> term -> term
    1.80 -  val strip_underscores: string -> string
    1.81 -  val strip_tilde: string -> string
    1.82 -  val mangle_name: string -> string
    1.83 -  val mk_variables: theory -> string -> string list -> string -> (term * string) Symtab.table * Name.context -> term list * ((term * string) Symtab.table * Name.context)
    1.84 -  val find_field: (string * string) list -> string -> (string * 'a) list -> (string * 'a) option
    1.85 -  val find_field': ''a -> (''a list * 'b) list -> 'b option
    1.86 -  val mk_times: term * term -> term
    1.87 -  val term_of_expr: theory -> string -> Fdl_Parser.fdl_type Fdl_Parser.tab -> (('a list * string) option * term) Symtab.table -> (term * string) Symtab.table * Name.context -> Fdl_Parser.expr -> term * string
    1.88 -  val mk_rulename: string * int -> binding
    1.89 -  val add_def:
    1.90 -     string ->
    1.91 -       Fdl_Parser.fdl_type Fdl_Parser.tab ->
    1.92 -         (('a list * string) option * term) Symtab.table ->
    1.93 -           string Fdl_Parser.tab -> (string * int) * (string * Fdl_Parser.expr) -> ((term * string) Symtab.table * Name.context) * theory -> (binding * thm) * (((term * string) Symtab.table * Name.context) * theory)
    1.94 -  val add_expr_idents: Fdl_Parser.expr -> string list -> string list
    1.95 -  val sort_defs:
    1.96 -     string ->
    1.97 -       'a Fdl_Parser.tab ->
    1.98 -         'b Symtab.table -> (string -> bool) -> ((string * int) * (string * Fdl_Parser.expr)) list -> ((string * int) * (string * Fdl_Parser.expr)) list -> ((string * int) * (string * Fdl_Parser.expr)) list
    1.99 -  val add_var: string -> string * string -> ((term * string) Symtab.table * Name.context) * theory -> (string * typ) * (((term * string) Symtab.table * Name.context) * theory)
   1.100 -  val add_init_vars:
   1.101 -     string ->
   1.102 -       ('a * 'b * ('c * Fdl_Parser.expr) list * ('c * Fdl_Parser.expr) list) VCtab.table ->
   1.103 -         ((term * string) Symtab.table * Name.context) * theory -> (string * typ) list * (((term * string) Symtab.table * Name.context) * theory)
   1.104 -  val term_of_rule: theory -> string -> Fdl_Parser.fdl_type Fdl_Parser.tab -> (('a list * string) option * term) Symtab.table -> (term * string) Symtab.table * Name.context -> Fdl_Parser.fdl_rule -> term
   1.105 -  val pfun_type: theory -> string -> string list * string -> typ
   1.106 -  val check_pfun_type: theory -> string -> string -> term -> (string list * string) option -> (string list * string) option -> unit
   1.107 -  val upd_option: 'a option -> 'a option -> 'a option
   1.108 -  val check_pfuns_types: theory -> string -> (string list * string) Fdl_Parser.tab -> ((string list * string) option * term) Symtab.table -> ((string list * string) option * term) Symtab.table
   1.109 -  val pp_vcs: string -> (string * 'a) list -> Pretty.T
   1.110 -  val pp_open_vcs: (string * 'a) list -> Pretty.T
   1.111 -  val partition_vcs: ('a * 'b option * 'c * 'd) VCtab.table -> (VCtab.key * ('a * 'b * 'c * 'd)) list * (VCtab.key * ('a * 'c * 'd)) list
   1.112 -  val print_open_vcs: (Pretty.T -> Pretty.T) -> ('a * 'b option * 'c * 'd) VCtab.table -> ('a * 'b option * 'c * 'd) VCtab.table
   1.113 -end
   1.114 -\<close> ML \<open>
   1.115 -
   1.116 -(** theory data **)
   1.117 -
   1.118 -fun err_unfinished () = error "An unfinished SPARK environment is still open."
   1.119 -
   1.120 -val strip_number = apply2 implode o chop_suffix Fdl_Lexer.is_digit o raw_explode;
   1.121 -
   1.122 -val name_ord = prod_ord string_ord (option_ord int_ord) o
   1.123 -  apply2 (strip_number ##> Int.fromString);
   1.124 -
   1.125 -structure VCtab = Table(type key = string val ord = name_ord);
   1.126 -
   1.127 -structure VCs = Theory_Data
   1.128 -(
   1.129 -  (*/----------------------------- this is stored in thy --------------------------------\*)
   1.130 -  type T = (*Pure/General/table.ML:             'a *)
   1.131 -    {pfuns: ((string list * string) option * term) Symtab.table,
   1.132 -     type_map: (typ * (string * string) list) Symtab.table,
   1.133 -     env:
   1.134 -       {ctxt: Element.context_i list,
   1.135 -        defs: (binding * thm) list,
   1.136 -        types: Fdl_Parser.fdl_type Fdl_Parser.tab,
   1.137 -        funs: (string list * string) Fdl_Parser.tab,
   1.138 -        pfuns: ((string list * string) option * term) Symtab.table,
   1.139 -        ids: (term * string) Symtab.table * Name.context,
   1.140 -        proving: bool,
   1.141 -        vcs: (string * thm list option *
   1.142 -          (string * Fdl_Parser.expr) list * (string * Fdl_Parser.expr) list) VCtab.table,
   1.143 -        path: Path.T,
   1.144 -        prefix: string} option}
   1.145 -  (*\----------------------------- this is stored in thy --------------------------------/*)
   1.146 -  val empty : T = {pfuns = Symtab.empty, type_map = Symtab.empty, env = NONE}
   1.147 -  val extend = I
   1.148 -  fun merge ({pfuns = pfuns1, type_map = type_map1, env = NONE},
   1.149 -        {pfuns = pfuns2, type_map = type_map2, env = NONE}) =
   1.150 -        {pfuns = Symtab.merge (eq_pair (op =) (op aconv)) (pfuns1, pfuns2),
   1.151 -         type_map = Symtab.merge (op =) (type_map1, type_map2),
   1.152 -         env = NONE}
   1.153 -    | merge _ = err_unfinished ()
   1.154 -)
   1.155 -
   1.156 -(** utilities **)
   1.157 -
   1.158 -val to_lower = raw_explode #> map Symbol.to_ascii_lower #> implode;
   1.159 -
   1.160 -
   1.161 -(** set up verification conditions **)
   1.162 -
   1.163 -fun partition_opt f =
   1.164 -  let
   1.165 -    fun part ys zs [] = (rev ys, rev zs)
   1.166 -      | part ys zs (x :: xs) = (case f x of
   1.167 -            SOME y => part (y :: ys) zs xs
   1.168 -          | NONE => part ys (x :: zs) xs)
   1.169 -  in part [] [] end;
   1.170 -
   1.171 -fun dest_def (id, (Fdl_Parser.Substitution_Rule ([], Fdl_Parser.Ident s, rhs))) = SOME (id, (s, rhs))
   1.172 -  | dest_def _ = NONE;
   1.173 -
   1.174 -
   1.175 -val builtin = Symtab.make (map (rpair ())
   1.176 -  ["->", "<->", "or", "and", "not", "=", "<>", "<", ">", "<=", ">=",
   1.177 -   "+", "-", "*", "/", "div", "mod", "**"]);
   1.178 -
   1.179 -fun complex_expr (Fdl_Parser.Number _) = false
   1.180 -  | complex_expr (Fdl_Parser.Ident _) = false
   1.181 -  | complex_expr (Fdl_Parser.Funct (s, es)) =
   1.182 -      not (Symtab.defined builtin s) orelse exists complex_expr es
   1.183 -  | complex_expr (Fdl_Parser.Quantifier (_, _, _, e)) = complex_expr e
   1.184 -  | complex_expr _ = true;
   1.185 -
   1.186 -fun complex_rule (Fdl_Parser.Inference_Rule (es, e)) =
   1.187 -      complex_expr e orelse exists complex_expr es
   1.188 -  | complex_rule (Fdl_Parser.Substitution_Rule (es, e, e')) =
   1.189 -      complex_expr e orelse complex_expr e' orelse
   1.190 -      exists complex_expr es;
   1.191 -
   1.192 -fun is_trivial_vc ([], [(_, Fdl_Parser.Ident "true")]) = true
   1.193 -  | is_trivial_vc _ = false;
   1.194 -
   1.195 -
   1.196 -fun rulenames rules = commas
   1.197 -  (map (fn ((s, i), _) => s ^ "(" ^ string_of_int i ^ ")") rules);
   1.198 -
   1.199 -
   1.200 -\<close> ML \<open>
   1.201 -val is_pfun =
   1.202 -  Symtab.defined builtin orf
   1.203 -  can (unprefix "fld_") orf can (unprefix "upf_") orf
   1.204 -  can (unsuffix "__pos") orf can (unsuffix "__val") orf
   1.205 -  Library.equal "succ" orf Library.equal "pred";
   1.206 -\<close> ML \<open>
   1.207 -\<close> ML \<open>
   1.208 -
   1.209 -fun fold_opt f = the_default I o Option.map f;
   1.210 -fun fold_pair f g (x, y) = f x #> g y;
   1.211 -
   1.212 -fun fold_expr f g (Fdl_Parser.Funct (s, es)) = f s #> fold (fold_expr f g) es
   1.213 -  | fold_expr f g (Fdl_Parser.Ident s) = g s
   1.214 -  | fold_expr f g (Fdl_Parser.Number _) = I
   1.215 -  | fold_expr f g (Fdl_Parser.Quantifier (_, _, _, e)) = fold_expr f g e
   1.216 -  | fold_expr f g (Fdl_Parser.Element (e, es)) =
   1.217 -      fold_expr f g e #> fold (fold_expr f g) es
   1.218 -  | fold_expr f g (Fdl_Parser.Update (e, es, e')) =
   1.219 -      fold_expr f g e #> fold (fold_expr f g) es #> fold_expr f g e'
   1.220 -  | fold_expr f g (Fdl_Parser.Record (_, flds)) = fold (fold_expr f g o snd) flds
   1.221 -  | fold_expr f g (Fdl_Parser.Array (_, default, assocs)) =
   1.222 -      fold_opt (fold_expr f g) default #>
   1.223 -      fold (fold_pair
   1.224 -        (fold (fold (fold_pair
   1.225 -          (fold_expr f g) (fold_opt (fold_expr f g)))))
   1.226 -        (fold_expr f g)) assocs;
   1.227 -
   1.228 -
   1.229 -fun add_expr_pfuns funs = fold_expr
   1.230 -  (fn s => if is_pfun s then I else insert (op =) s)
   1.231 -  (fn s => if is_none (Fdl_Parser.lookup funs s) then I else insert (op =) s);
   1.232 -
   1.233 -fun lookup_prfx "" tab s = Symtab.lookup tab s
   1.234 -  | lookup_prfx prfx tab s = (case Symtab.lookup tab s of
   1.235 -        NONE => Symtab.lookup tab (prfx ^ "__" ^ s)
   1.236 -      | x => x);
   1.237 -
   1.238 -fun fold_vcs f vcs =
   1.239 -  VCtab.fold (fn (_, (_, _, ps, cs)) => fold f ps #> fold f cs) vcs;
   1.240 -
   1.241 -fun pfuns_of_vcs prfx funs pfuns vcs =
   1.242 -  fold_vcs (add_expr_pfuns funs o snd) vcs [] |>
   1.243 -  filter (is_none o lookup_prfx prfx pfuns);
   1.244 -
   1.245 -val booleanN = "boolean";
   1.246 -val integerN = "integer";
   1.247 -
   1.248 -
   1.249 -fun get_type thy prfx ty =
   1.250 -  let val {type_map, ...} = VCs.get thy
   1.251 -  in lookup_prfx prfx type_map ty end;
   1.252 -
   1.253 -fun mk_type' _ _ "integer" = (HOLogic.intT, [])
   1.254 -  | mk_type' _ _ "boolean" = (HOLogic.boolT, [])
   1.255 -  | mk_type' thy prfx ty =
   1.256 -      (case get_type thy prfx ty of
   1.257 -         NONE =>
   1.258 -           (Syntax.check_typ (Proof_Context.init_global thy)
   1.259 -              (Type (Sign.full_name thy (Binding.name ty), [])),
   1.260 -            [])
   1.261 -       | SOME p => p);
   1.262 -
   1.263 -fun mk_type thy prfx ty = fst (mk_type' thy prfx ty);
   1.264 -
   1.265 -fun check_no_assoc thy prfx s = case get_type thy prfx s of
   1.266 -    NONE => ()
   1.267 -  | SOME _ => error ("Cannot associate a type with " ^ s ^
   1.268 -      "\nsince it is no record or enumeration type");
   1.269 -
   1.270 -
   1.271 -fun define_overloaded (def_name, eq) lthy =
   1.272 -  let
   1.273 -    val ((c, _), rhs) = eq |> Syntax.check_term lthy |>
   1.274 -      Logic.dest_equals |>> dest_Free;
   1.275 -    val ((_, (_, thm)), lthy') = Local_Theory.define
   1.276 -      ((Binding.name c, NoSyn), ((Binding.name def_name, []), rhs)) lthy
   1.277 -    val ctxt_thy = Proof_Context.init_global (Proof_Context.theory_of lthy');
   1.278 -    val thm' = singleton (Proof_Context.export lthy' ctxt_thy) thm
   1.279 -  in (thm', lthy') end;
   1.280 -
   1.281 -(** generate properties of enumeration types **)
   1.282 -
   1.283 -fun add_enum_type tyname tyname' thy =
   1.284 -  let
   1.285 -    val {case_name, ...} = the (BNF_LFP_Compat.get_info thy [BNF_LFP_Compat.Keep_Nesting] tyname');
   1.286 -    val cs = map Const (the (BNF_LFP_Compat.get_constrs thy tyname'));
   1.287 -    val k = length cs;
   1.288 -    val T = Type (tyname', []);
   1.289 -    val p = Const (\<^const_name>\<open>pos\<close>, T --> HOLogic.intT);
   1.290 -    val v = Const (\<^const_name>\<open>val\<close>, HOLogic.intT --> T);
   1.291 -    val card = Const (\<^const_name>\<open>card\<close>,
   1.292 -      HOLogic.mk_setT T --> HOLogic.natT) $ HOLogic.mk_UNIV T;
   1.293 -
   1.294 -    fun mk_binrel_def s f = Logic.mk_equals
   1.295 -      (Const (s, T --> T --> HOLogic.boolT),
   1.296 -       Abs ("x", T, Abs ("y", T,
   1.297 -         Const (s, HOLogic.intT --> HOLogic.intT --> HOLogic.boolT) $
   1.298 -           (f $ Bound 1) $ (f $ Bound 0))));
   1.299 -
   1.300 -    val (((def1, def2), def3), lthy) = thy |>
   1.301 -
   1.302 -      Class.instantiation ([tyname'], [], \<^sort>\<open>spark_enum\<close>) |>
   1.303 -
   1.304 -      define_overloaded ("pos_" ^ tyname ^ "_def", Logic.mk_equals
   1.305 -        (p,
   1.306 -         list_comb (Const (case_name, replicate k HOLogic.intT @
   1.307 -             [T] ---> HOLogic.intT),
   1.308 -           map (HOLogic.mk_number HOLogic.intT) (0 upto k - 1)))) ||>>
   1.309 -
   1.310 -      define_overloaded ("less_eq_" ^ tyname ^ "_def",
   1.311 -        mk_binrel_def \<^const_name>\<open>less_eq\<close> p) ||>>
   1.312 -      define_overloaded ("less_" ^ tyname ^ "_def",
   1.313 -        mk_binrel_def \<^const_name>\<open>less\<close> p);
   1.314 -
   1.315 -    val UNIV_eq = Goal.prove lthy [] []
   1.316 -      (HOLogic.mk_Trueprop (HOLogic.mk_eq
   1.317 -         (HOLogic.mk_UNIV T, HOLogic.mk_set T cs)))
   1.318 -      (fn {context = ctxt, ...} =>
   1.319 -         resolve_tac ctxt @{thms subset_antisym} 1 THEN
   1.320 -         resolve_tac ctxt @{thms subsetI} 1 THEN
   1.321 -         Old_Datatype_Aux.exh_tac ctxt (K (#exhaust (BNF_LFP_Compat.the_info
   1.322 -           (Proof_Context.theory_of ctxt) [BNF_LFP_Compat.Keep_Nesting] tyname'))) 1 THEN
   1.323 -         ALLGOALS (asm_full_simp_tac ctxt));
   1.324 -
   1.325 -    val finite_UNIV = Goal.prove lthy [] []
   1.326 -      (HOLogic.mk_Trueprop (Const (\<^const_name>\<open>finite\<close>,
   1.327 -         HOLogic.mk_setT T --> HOLogic.boolT) $ HOLogic.mk_UNIV T))
   1.328 -      (fn {context = ctxt, ...} => simp_tac (ctxt addsimps [UNIV_eq]) 1);
   1.329 -
   1.330 -    val card_UNIV = Goal.prove lthy [] []
   1.331 -      (HOLogic.mk_Trueprop (HOLogic.mk_eq
   1.332 -         (card, HOLogic.mk_number HOLogic.natT k)))
   1.333 -      (fn {context = ctxt, ...} => simp_tac (ctxt addsimps [UNIV_eq]) 1);
   1.334 -
   1.335 -    val range_pos = Goal.prove lthy [] []
   1.336 -      (HOLogic.mk_Trueprop (HOLogic.mk_eq
   1.337 -         (Const (\<^const_name>\<open>image\<close>, (T --> HOLogic.intT) -->
   1.338 -            HOLogic.mk_setT T --> HOLogic.mk_setT HOLogic.intT) $
   1.339 -              p $ HOLogic.mk_UNIV T,
   1.340 -          Const (\<^const_name>\<open>atLeastLessThan\<close>, HOLogic.intT -->
   1.341 -            HOLogic.intT --> HOLogic.mk_setT HOLogic.intT) $
   1.342 -              HOLogic.mk_number HOLogic.intT 0 $
   1.343 -              (\<^term>\<open>int\<close> $ card))))
   1.344 -      (fn {context = ctxt, ...} =>
   1.345 -         simp_tac (ctxt addsimps [card_UNIV]) 1 THEN
   1.346 -         simp_tac (ctxt addsimps [UNIV_eq, def1]) 1 THEN
   1.347 -         resolve_tac ctxt @{thms subset_antisym} 1 THEN
   1.348 -         simp_tac ctxt 1 THEN
   1.349 -         resolve_tac ctxt @{thms subsetI} 1 THEN
   1.350 -         asm_full_simp_tac (ctxt addsimps @{thms interval_expand}
   1.351 -           delsimps @{thms atLeastLessThan_iff}) 1);
   1.352 -
   1.353 -    val lthy' =
   1.354 -      Class.prove_instantiation_instance (fn ctxt =>
   1.355 -        Class.intro_classes_tac ctxt [] THEN
   1.356 -        resolve_tac ctxt [finite_UNIV] 1 THEN
   1.357 -        resolve_tac ctxt [range_pos] 1 THEN
   1.358 -        simp_tac (put_simpset HOL_basic_ss ctxt addsimps [def3]) 1 THEN
   1.359 -        simp_tac (put_simpset HOL_basic_ss ctxt addsimps [def2]) 1) lthy;
   1.360 -
   1.361 -    val (pos_eqs, val_eqs) = split_list (map_index (fn (i, c) =>
   1.362 -      let
   1.363 -        val n = HOLogic.mk_number HOLogic.intT i;
   1.364 -        val th = Goal.prove lthy' [] []
   1.365 -          (HOLogic.mk_Trueprop (HOLogic.mk_eq (p $ c, n)))
   1.366 -          (fn {context = ctxt, ...} => simp_tac (ctxt addsimps [def1]) 1);
   1.367 -        val th' = Goal.prove lthy' [] []
   1.368 -          (HOLogic.mk_Trueprop (HOLogic.mk_eq (v $ n, c)))
   1.369 -          (fn {context = ctxt, ...} =>
   1.370 -             resolve_tac ctxt [@{thm inj_pos} RS @{thm injD}] 1 THEN
   1.371 -             simp_tac (ctxt addsimps [@{thm pos_val}, range_pos, card_UNIV, th]) 1)
   1.372 -      in (th, th') end) cs);
   1.373 -
   1.374 -    val first_el = Goal.prove lthy' [] []
   1.375 -      (HOLogic.mk_Trueprop (HOLogic.mk_eq
   1.376 -         (Const (\<^const_name>\<open>first_el\<close>, T), hd cs)))
   1.377 -      (fn {context = ctxt, ...} => simp_tac (ctxt addsimps [@{thm first_el_def}, hd val_eqs]) 1);
   1.378 -
   1.379 -    val last_el = Goal.prove lthy' [] []
   1.380 -      (HOLogic.mk_Trueprop (HOLogic.mk_eq
   1.381 -         (Const (\<^const_name>\<open>last_el\<close>, T), List.last cs)))
   1.382 -      (fn {context = ctxt, ...} =>
   1.383 -        simp_tac (ctxt addsimps [@{thm last_el_def}, List.last val_eqs, card_UNIV]) 1);
   1.384 -  in
   1.385 -    lthy' |>
   1.386 -    Local_Theory.note
   1.387 -      ((Binding.name (tyname ^ "_card"), @{attributes [simp]}), [card_UNIV]) ||>>
   1.388 -    Local_Theory.note
   1.389 -      ((Binding.name (tyname ^ "_pos"), @{attributes [simp]}), pos_eqs) ||>>
   1.390 -    Local_Theory.note
   1.391 -      ((Binding.name (tyname ^ "_val"), @{attributes [simp]}), val_eqs) ||>>
   1.392 -    Local_Theory.note
   1.393 -      ((Binding.name (tyname ^ "_first_el"), @{attributes [simp]}), [first_el]) ||>>
   1.394 -    Local_Theory.note
   1.395 -      ((Binding.name (tyname ^ "_last_el"), @{attributes [simp]}), [last_el]) |> snd |>
   1.396 -    Local_Theory.exit_global
   1.397 -  end;
   1.398 -
   1.399 -
   1.400 -val lcase_eq = (op =) o apply2 (to_lower o Long_Name.base_name);
   1.401 -
   1.402 -fun check_enum [] [] = NONE
   1.403 -  | check_enum els [] = SOME ("has no element(s) " ^ commas els)
   1.404 -  | check_enum [] cs = SOME ("has extra element(s) " ^
   1.405 -      commas (map (Long_Name.base_name o fst) cs))
   1.406 -  | check_enum (el :: els) ((cname, _) :: cs) =
   1.407 -      if lcase_eq (el, cname) then check_enum els cs
   1.408 -      else SOME ("either has no element " ^ el ^
   1.409 -        " or it is at the wrong position");
   1.410 -
   1.411 -fun strip_prfx s =
   1.412 -  let
   1.413 -    fun strip ys [] = ("", implode ys)
   1.414 -      | strip ys ("_" :: "_" :: xs) = (implode (rev xs), implode ys)
   1.415 -      | strip ys (x :: xs) = strip (x :: ys) xs
   1.416 -  in strip [] (rev (raw_explode s)) end;
   1.417 -
   1.418 -fun assoc_ty_err thy T s msg =
   1.419 -  error ("Type " ^ Syntax.string_of_typ_global thy T ^
   1.420 -    " associated with SPARK type " ^ s ^ "\n" ^ msg);
   1.421 -
   1.422 -fun check_enum [] [] = NONE
   1.423 -  | check_enum els [] = SOME ("has no element(s) " ^ commas els)
   1.424 -  | check_enum [] cs = SOME ("has extra element(s) " ^
   1.425 -      commas (map (Long_Name.base_name o fst) cs))
   1.426 -  | check_enum (el :: els) ((cname, _) :: cs) =
   1.427 -      if lcase_eq (el, cname) then check_enum els cs
   1.428 -      else SOME ("either has no element " ^ el ^
   1.429 -        " or it is at the wrong position");
   1.430 -
   1.431 -fun unprefix_pfun "" s = s
   1.432 -  | unprefix_pfun prfx s =
   1.433 -      let val (prfx', s') = strip_prfx s
   1.434 -      in if prfx = prfx' then s' else s end;
   1.435 -
   1.436 -fun invert_map [] = I
   1.437 -  | invert_map cmap =
   1.438 -      map (apfst (the o AList.lookup (op =) (map swap cmap)));
   1.439 -
   1.440 -fun get_record_info thy T = (case Record.dest_recTs T of
   1.441 -    [(tyname, [\<^typ>\<open>unit\<close>])] =>
   1.442 -      Record.get_info thy (Long_Name.qualifier tyname)
   1.443 -  | _ => NONE);
   1.444 -
   1.445 -
   1.446 -
   1.447 -fun add_type_def prfx (s, Fdl_Parser.Basic_Type ty) (ids, thy) =
   1.448 -      (check_no_assoc thy prfx s;
   1.449 -       (ids,
   1.450 -        Typedecl.abbrev_global (Binding.name s, [], NoSyn)
   1.451 -          (mk_type thy prfx ty) thy |> snd))
   1.452 -
   1.453 -  | add_type_def prfx (s, Fdl_Parser.Enum_Type els) ((tab, ctxt), thy) =
   1.454 -      let
   1.455 -        val (thy', tyname) = (case get_type thy prfx s of
   1.456 -            NONE =>
   1.457 -              let
   1.458 -                val tyb = Binding.name s;
   1.459 -                val tyname = Sign.full_name thy tyb
   1.460 -              in
   1.461 -                (thy |>
   1.462 -                 BNF_LFP_Compat.add_datatype [BNF_LFP_Compat.Keep_Nesting]
   1.463 -                   [((tyb, [], NoSyn),
   1.464 -                     map (fn s => (Binding.name s, [], NoSyn)) els)] |> snd |>
   1.465 -                 add_enum_type s tyname,
   1.466 -                 tyname)
   1.467 -              end
   1.468 -          | SOME (T as Type (tyname, []), cmap) =>
   1.469 -              (case BNF_LFP_Compat.get_constrs thy tyname of
   1.470 -                 NONE => assoc_ty_err thy T s "is not a datatype"
   1.471 -               | SOME cs =>
   1.472 -                   let val (prfx', _) = strip_prfx s
   1.473 -                   in
   1.474 -                     case check_enum (map (unprefix_pfun prfx') els)
   1.475 -                         (invert_map cmap cs) of
   1.476 -                       NONE => (thy, tyname)
   1.477 -                     | SOME msg => assoc_ty_err thy T s msg
   1.478 -                   end)
   1.479 -          | SOME (T, _) => assoc_ty_err thy T s "is not a datatype");
   1.480 -        val cs = map Const (the (BNF_LFP_Compat.get_constrs thy' tyname));
   1.481 -      in
   1.482 -        ((fold (Symtab.update_new o apsnd (rpair s)) (els ~~ cs) tab,
   1.483 -          fold Name.declare els ctxt),
   1.484 -         thy')
   1.485 -      end
   1.486 -
   1.487 -  | add_type_def prfx (s, Fdl_Parser.Array_Type (argtys, resty)) (ids, thy) =
   1.488 -      (check_no_assoc thy prfx s;
   1.489 -       (ids,
   1.490 -        Typedecl.abbrev_global (Binding.name s, [], NoSyn)
   1.491 -          (foldr1 HOLogic.mk_prodT (map (mk_type thy prfx) argtys) -->
   1.492 -             mk_type thy prfx resty) thy |> snd))
   1.493 -
   1.494 -  | add_type_def prfx (s, Fdl_Parser.Record_Type fldtys) (ids, thy) =
   1.495 -      (ids,
   1.496 -       let val fldTs = maps (fn (flds, ty) =>
   1.497 -         map (rpair (mk_type thy prfx ty)) flds) fldtys
   1.498 -       in case get_type thy prfx s of
   1.499 -           NONE =>
   1.500 -             Record.add_record {overloaded = false} ([], Binding.name s) NONE
   1.501 -               (map (fn (fld, T) => (Binding.name fld, T, NoSyn)) fldTs) thy
   1.502 -         | SOME (rT, cmap) =>
   1.503 -             (case get_record_info thy rT of
   1.504 -                NONE => assoc_ty_err thy rT s "is not a record type"
   1.505 -              | SOME {fields, ...} =>
   1.506 -                  let val fields' = invert_map cmap fields
   1.507 -                  in
   1.508 -                    (case subtract (lcase_eq o apply2 fst) fldTs fields' of
   1.509 -                       [] => ()
   1.510 -                     | flds => assoc_ty_err thy rT s ("has extra field(s) " ^
   1.511 -                         commas (map (Long_Name.base_name o fst) flds));
   1.512 -                     map (fn (fld, T) =>
   1.513 -                       case AList.lookup lcase_eq fields' fld of
   1.514 -                         NONE => assoc_ty_err thy rT s ("has no field " ^ fld)
   1.515 -                       | SOME U => T = U orelse assoc_ty_err thy rT s
   1.516 -                           ("has field " ^
   1.517 -                            fld ^ " whose type\n" ^
   1.518 -                            Syntax.string_of_typ_global thy U ^
   1.519 -                            "\ndoes not match declared type\n" ^
   1.520 -                            Syntax.string_of_typ_global thy T)) fldTs;
   1.521 -                     thy)
   1.522 -                  end)
   1.523 -       end)
   1.524 -
   1.525 -  | add_type_def prfx (s, Fdl_Parser.Pending_Type) (ids, thy) =
   1.526 -      (ids,
   1.527 -       case get_type thy prfx s of
   1.528 -         SOME _ => thy
   1.529 -       | NONE => Typedecl.typedecl_global {final = true} (Binding.name s, [], NoSyn) thy |> snd);
   1.530 -
   1.531 -fun add_type_def prfx (s, Fdl_Parser.Basic_Type ty) (ids, thy) =
   1.532 -      (check_no_assoc thy prfx s;
   1.533 -       (ids,
   1.534 -        Typedecl.abbrev_global (Binding.name s, [], NoSyn)
   1.535 -          (mk_type thy prfx ty) thy |> snd))
   1.536 -
   1.537 -  | add_type_def prfx (s, Fdl_Parser.Enum_Type els) ((tab, ctxt), thy) =
   1.538 -      let
   1.539 -        val (thy', tyname) = (case get_type thy prfx s of
   1.540 -            NONE =>
   1.541 -              let
   1.542 -                val tyb = Binding.name s;
   1.543 -                val tyname = Sign.full_name thy tyb
   1.544 -              in
   1.545 -                (thy |>
   1.546 -                 BNF_LFP_Compat.add_datatype [BNF_LFP_Compat.Keep_Nesting]
   1.547 -                   [((tyb, [], NoSyn),
   1.548 -                     map (fn s => (Binding.name s, [], NoSyn)) els)] |> snd |>
   1.549 -                 add_enum_type s tyname,
   1.550 -                 tyname)
   1.551 -              end
   1.552 -          | SOME (T as Type (tyname, []), cmap) =>
   1.553 -              (case BNF_LFP_Compat.get_constrs thy tyname of
   1.554 -                 NONE => assoc_ty_err thy T s "is not a datatype"
   1.555 -               | SOME cs =>
   1.556 -                   let val (prfx', _) = strip_prfx s
   1.557 -                   in
   1.558 -                     case check_enum (map (unprefix_pfun prfx') els)
   1.559 -                         (invert_map cmap cs) of
   1.560 -                       NONE => (thy, tyname)
   1.561 -                     | SOME msg => assoc_ty_err thy T s msg
   1.562 -                   end)
   1.563 -          | SOME (T, _) => assoc_ty_err thy T s "is not a datatype");
   1.564 -        val cs = map Const (the (BNF_LFP_Compat.get_constrs thy' tyname));
   1.565 -      in
   1.566 -        ((fold (Symtab.update_new o apsnd (rpair s)) (els ~~ cs) tab,
   1.567 -          fold Name.declare els ctxt),
   1.568 -         thy')
   1.569 -      end
   1.570 -
   1.571 -  | add_type_def prfx (s, Fdl_Parser.Array_Type (argtys, resty)) (ids, thy) =
   1.572 -      (check_no_assoc thy prfx s;
   1.573 -       (ids,
   1.574 -        Typedecl.abbrev_global (Binding.name s, [], NoSyn)
   1.575 -          (foldr1 HOLogic.mk_prodT (map (mk_type thy prfx) argtys) -->
   1.576 -             mk_type thy prfx resty) thy |> snd))
   1.577 -
   1.578 -  | add_type_def prfx (s, Fdl_Parser.Record_Type fldtys) (ids, thy) =
   1.579 -      (ids,
   1.580 -       let val fldTs = maps (fn (flds, ty) =>
   1.581 -         map (rpair (mk_type thy prfx ty)) flds) fldtys
   1.582 -       in case get_type thy prfx s of
   1.583 -           NONE =>
   1.584 -             Record.add_record {overloaded = false} ([], Binding.name s) NONE
   1.585 -               (map (fn (fld, T) => (Binding.name fld, T, NoSyn)) fldTs) thy
   1.586 -         | SOME (rT, cmap) =>
   1.587 -             (case get_record_info thy rT of
   1.588 -                NONE => assoc_ty_err thy rT s "is not a record type"
   1.589 -              | SOME {fields, ...} =>
   1.590 -                  let val fields' = invert_map cmap fields
   1.591 -                  in
   1.592 -                    (case subtract (lcase_eq o apply2 fst) fldTs fields' of
   1.593 -                       [] => ()
   1.594 -                     | flds => assoc_ty_err thy rT s ("has extra field(s) " ^
   1.595 -                         commas (map (Long_Name.base_name o fst) flds));
   1.596 -                     map (fn (fld, T) =>
   1.597 -                       case AList.lookup lcase_eq fields' fld of
   1.598 -                         NONE => assoc_ty_err thy rT s ("has no field " ^ fld)
   1.599 -                       | SOME U => T = U orelse assoc_ty_err thy rT s
   1.600 -                           ("has field " ^
   1.601 -                            fld ^ " whose type\n" ^
   1.602 -                            Syntax.string_of_typ_global thy U ^
   1.603 -                            "\ndoes not match declared type\n" ^
   1.604 -                            Syntax.string_of_typ_global thy T)) fldTs;
   1.605 -                     thy)
   1.606 -                  end)
   1.607 -       end)
   1.608 -
   1.609 -  | add_type_def prfx (s, Fdl_Parser.Pending_Type) (ids, thy) =
   1.610 -      (ids,
   1.611 -       case get_type thy prfx s of
   1.612 -         SOME _ => thy
   1.613 -       | NONE => Typedecl.typedecl_global {final = true} (Binding.name s, [], NoSyn) thy |> snd);
   1.614 -
   1.615 -fun add_const prfx (s, ty) ((tab, ctxt), thy) =
   1.616 -  let
   1.617 -    val T = mk_type thy prfx ty;
   1.618 -    val b = Binding.name s;
   1.619 -    val c = Const (Sign.full_name thy b, T)
   1.620 -  in
   1.621 -    (c,
   1.622 -     ((Symtab.update (s, (c, ty)) tab, Name.declare s ctxt),
   1.623 -      Sign.add_consts [(b, T, NoSyn)] thy))
   1.624 -  end;
   1.625 -
   1.626 -fun mk_unop s t =
   1.627 -  let val T = fastype_of t
   1.628 -  in Const (s, T --> T) $ t end;
   1.629 -
   1.630 -fun strip_underscores s =
   1.631 -  strip_underscores (unsuffix "_" s) handle Fail _ => s;
   1.632 -
   1.633 -fun strip_tilde s =
   1.634 -  unsuffix "~" s ^ "_init" handle Fail _ => s;
   1.635 -
   1.636 -val mangle_name = strip_underscores #> strip_tilde;
   1.637 -
   1.638 -fun mk_variables thy prfx xs ty (tab, ctxt) =
   1.639 -  let
   1.640 -    val T = mk_type thy prfx ty;
   1.641 -    val (ys, ctxt') = fold_map Name.variant (map mangle_name xs) ctxt;
   1.642 -    val zs = map (Free o rpair T) ys;
   1.643 -  in (zs, (fold (Symtab.update o apsnd (rpair ty)) (xs ~~ zs) tab, ctxt')) end;
   1.644 -
   1.645 -fun find_field [] fname fields =
   1.646 -      find_first (curry lcase_eq fname o fst) fields
   1.647 -  | find_field cmap fname fields =
   1.648 -      (case AList.lookup (op =) cmap fname of
   1.649 -         NONE => NONE
   1.650 -       | SOME fname' => SOME (fname', the (AList.lookup (op =) fields fname')));
   1.651 -
   1.652 -fun find_field' fname = get_first (fn (flds, fldty) =>
   1.653 -  if member (op =) flds fname then SOME fldty else NONE);
   1.654 -
   1.655 -fun mk_times (t, u) =
   1.656 -  let
   1.657 -    val setT = fastype_of t;
   1.658 -    val T = HOLogic.dest_setT setT;
   1.659 -    val U = HOLogic.dest_setT (fastype_of u)
   1.660 -  in
   1.661 -    Const (\<^const_name>\<open>Sigma\<close>, setT --> (T --> HOLogic.mk_setT U) -->
   1.662 -      HOLogic.mk_setT (HOLogic.mk_prodT (T, U))) $ t $ Abs ("", T, u)
   1.663 -  end;
   1.664 -
   1.665 -
   1.666 -fun term_of_expr thy prfx types pfuns =
   1.667 -  let
   1.668 -    fun tm_of vs (Fdl_Parser.Funct ("->", [e, e'])) =
   1.669 -          (HOLogic.mk_imp (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.670 -
   1.671 -      | tm_of vs (Fdl_Parser.Funct ("<->", [e, e'])) =
   1.672 -          (HOLogic.mk_eq (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.673 -
   1.674 -      | tm_of vs (Fdl_Parser.Funct ("or", [e, e'])) =
   1.675 -          (HOLogic.mk_disj (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.676 -
   1.677 -      | tm_of vs (Fdl_Parser.Funct ("and", [e, e'])) =
   1.678 -          (HOLogic.mk_conj (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.679 -
   1.680 -      | tm_of vs (Fdl_Parser.Funct ("not", [e])) =
   1.681 -          (HOLogic.mk_not (fst (tm_of vs e)), booleanN)
   1.682 -
   1.683 -      | tm_of vs (Fdl_Parser.Funct ("=", [e, e'])) =
   1.684 -          (HOLogic.mk_eq (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.685 -
   1.686 -      | tm_of vs (Fdl_Parser.Funct ("<>", [e, e'])) = (HOLogic.mk_not
   1.687 -          (HOLogic.mk_eq (fst (tm_of vs e), fst (tm_of vs e'))), booleanN)
   1.688 -
   1.689 -      | tm_of vs (Fdl_Parser.Funct ("<", [e, e'])) = (HOLogic.mk_binrel \<^const_name>\<open>less\<close>
   1.690 -          (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.691 -
   1.692 -      | tm_of vs (Fdl_Parser.Funct (">", [e, e'])) = (HOLogic.mk_binrel \<^const_name>\<open>less\<close>
   1.693 -          (fst (tm_of vs e'), fst (tm_of vs e)), booleanN)
   1.694 -
   1.695 -      | tm_of vs (Fdl_Parser.Funct ("<=", [e, e'])) = (HOLogic.mk_binrel \<^const_name>\<open>less_eq\<close>
   1.696 -          (fst (tm_of vs e), fst (tm_of vs e')), booleanN)
   1.697 -
   1.698 -      | tm_of vs (Fdl_Parser.Funct (">=", [e, e'])) = (HOLogic.mk_binrel \<^const_name>\<open>less_eq\<close>
   1.699 -          (fst (tm_of vs e'), fst (tm_of vs e)), booleanN)
   1.700 -
   1.701 -      | tm_of vs (Fdl_Parser.Funct ("+", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>plus\<close>
   1.702 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.703 -
   1.704 -      | tm_of vs (Fdl_Parser.Funct ("-", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>minus\<close>
   1.705 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.706 -
   1.707 -      | tm_of vs (Fdl_Parser.Funct ("*", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>times\<close>
   1.708 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.709 -
   1.710 -      | tm_of vs (Fdl_Parser.Funct ("/", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>divide\<close>
   1.711 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.712 -
   1.713 -      | tm_of vs (Fdl_Parser.Funct ("div", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>sdiv\<close>
   1.714 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.715 -
   1.716 -      | tm_of vs (Fdl_Parser.Funct ("mod", [e, e'])) = (HOLogic.mk_binop \<^const_name>\<open>modulo\<close>
   1.717 -          (fst (tm_of vs e), fst (tm_of vs e')), integerN)
   1.718 -
   1.719 -      | tm_of vs (Fdl_Parser.Funct ("-", [e])) =
   1.720 -          (mk_unop \<^const_name>\<open>uminus\<close> (fst (tm_of vs e)), integerN)
   1.721 -
   1.722 -      | tm_of vs (Fdl_Parser.Funct ("**", [e, e'])) =
   1.723 -          (Const (\<^const_name>\<open>power\<close>, HOLogic.intT --> HOLogic.natT -->
   1.724 -             HOLogic.intT) $ fst (tm_of vs e) $
   1.725 -               (\<^const>\<open>nat\<close> $ fst (tm_of vs e')), integerN)
   1.726 -
   1.727 -      | tm_of (tab, _) (Fdl_Parser.Ident s) =
   1.728 -          (case Symtab.lookup tab s of
   1.729 -             SOME t_ty => t_ty
   1.730 -           | NONE => (case lookup_prfx prfx pfuns s of
   1.731 -               SOME (SOME ([], resty), t) => (t, resty)
   1.732 -             | _ => error ("Undeclared identifier " ^ s)))
   1.733 -
   1.734 -      | tm_of _ (Fdl_Parser.Number i) = (HOLogic.mk_number HOLogic.intT i, integerN)
   1.735 -
   1.736 -      | tm_of vs (Fdl_Parser.Quantifier (s, xs, ty, e)) =
   1.737 -          let
   1.738 -            val (ys, vs') = mk_variables thy prfx xs ty vs;
   1.739 -            val q = (case s of
   1.740 -                "for_all" => HOLogic.mk_all
   1.741 -              | "for_some" => HOLogic.mk_exists)
   1.742 -          in
   1.743 -            (fold_rev (fn Free (x, T) => fn t => q (x, T, t))
   1.744 -               ys (fst (tm_of vs' e)),
   1.745 -             booleanN)
   1.746 -          end
   1.747 -
   1.748 -      | tm_of vs (Fdl_Parser.Funct (s, es)) =
   1.749 -
   1.750 -          (* record field selection *)
   1.751 -          (case try (unprefix "fld_") s of
   1.752 -             SOME fname => (case es of
   1.753 -               [e] =>
   1.754 -                 let
   1.755 -                   val (t, rcdty) = tm_of vs e;
   1.756 -                   val (rT, cmap) = mk_type' thy prfx rcdty
   1.757 -                 in case (get_record_info thy rT, Fdl_Parser.lookup types rcdty) of
   1.758 -                     (SOME {fields, ...}, SOME (Fdl_Parser.Record_Type fldtys)) =>
   1.759 -                       (case (find_field cmap fname fields,
   1.760 -                            find_field' fname fldtys) of
   1.761 -                          (SOME (fname', fT), SOME fldty) =>
   1.762 -                            (Const (fname', rT --> fT) $ t, fldty)
   1.763 -                        | _ => error ("Record " ^ rcdty ^
   1.764 -                            " has no field named " ^ fname))
   1.765 -                   | _ => error (rcdty ^ " is not a record type")
   1.766 -                 end
   1.767 -             | _ => error ("Function " ^ s ^ " expects one argument"))
   1.768 -           | NONE =>
   1.769 -
   1.770 -          (* record field update *)
   1.771 -          (case try (unprefix "upf_") s of
   1.772 -             SOME fname => (case es of
   1.773 -               [e, e'] =>
   1.774 -                 let
   1.775 -                   val (t, rcdty) = tm_of vs e;
   1.776 -                   val (rT, cmap) = mk_type' thy prfx rcdty;
   1.777 -                   val (u, fldty) = tm_of vs e';
   1.778 -                   val fT = mk_type thy prfx fldty
   1.779 -                 in case get_record_info thy rT of
   1.780 -                     SOME {fields, ...} =>
   1.781 -                       (case find_field cmap fname fields of
   1.782 -                          SOME (fname', fU) =>
   1.783 -                            if fT = fU then
   1.784 -                              (Const (fname' ^ "_update",
   1.785 -                                 (fT --> fT) --> rT --> rT) $
   1.786 -                                   Abs ("x", fT, u) $ t,
   1.787 -                               rcdty)
   1.788 -                            else error ("Type\n" ^
   1.789 -                              Syntax.string_of_typ_global thy fT ^
   1.790 -                              "\ndoes not match type\n" ^
   1.791 -                              Syntax.string_of_typ_global thy fU ^
   1.792 -                              "\nof field " ^ fname)
   1.793 -                        | NONE => error ("Record " ^ rcdty ^
   1.794 -                            " has no field named " ^ fname))
   1.795 -                   | _ => error (rcdty ^ " is not a record type")
   1.796 -                 end
   1.797 -             | _ => error ("Function " ^ s ^ " expects two arguments"))
   1.798 -           | NONE =>
   1.799 -
   1.800 -          (* enumeration type to integer *)
   1.801 -          (case try (unsuffix "__pos") s of
   1.802 -             SOME tyname => (case es of
   1.803 -               [e] => (Const (\<^const_name>\<open>pos\<close>,
   1.804 -                   mk_type thy prfx tyname --> HOLogic.intT) $ fst (tm_of vs e),
   1.805 -                 integerN)
   1.806 -             | _ => error ("Function " ^ s ^ " expects one argument"))
   1.807 -           | NONE =>
   1.808 -
   1.809 -          (* integer to enumeration type *)
   1.810 -          (case try (unsuffix "__val") s of
   1.811 -             SOME tyname => (case es of
   1.812 -               [e] => (Const (\<^const_name>\<open>val\<close>,
   1.813 -                   HOLogic.intT --> mk_type thy prfx tyname) $ fst (tm_of vs e),
   1.814 -                 tyname)
   1.815 -             | _ => error ("Function " ^ s ^ " expects one argument"))
   1.816 -           | NONE =>
   1.817 -
   1.818 -          (* successor / predecessor of enumeration type element *)
   1.819 -          if s = "succ" orelse s = "pred" then (case es of
   1.820 -              [e] =>
   1.821 -                let
   1.822 -                  val (t, tyname) = tm_of vs e;
   1.823 -                  val T = mk_type thy prfx tyname
   1.824 -                in (Const
   1.825 -                  (if s = "succ" then \<^const_name>\<open>succ\<close>
   1.826 -                   else \<^const_name>\<open>pred\<close>, T --> T) $ t, tyname)
   1.827 -                end
   1.828 -            | _ => error ("Function " ^ s ^ " expects one argument"))
   1.829 -
   1.830 -          (* user-defined proof function *)
   1.831 -          else
   1.832 -            (case lookup_prfx prfx pfuns s of
   1.833 -               SOME (SOME (_, resty), t) =>
   1.834 -                 (list_comb (t, map (fst o tm_of vs) es), resty)
   1.835 -             | _ => error ("Undeclared proof function " ^ s))))))
   1.836 -
   1.837 -      | tm_of vs (Fdl_Parser.Element (e, es)) =
   1.838 -          let val (t, ty) = tm_of vs e
   1.839 -          in case Fdl_Parser.lookup types ty of
   1.840 -              SOME (Fdl_Parser.Array_Type (_, elty)) =>
   1.841 -                (t $ foldr1 HOLogic.mk_prod (map (fst o tm_of vs) es), elty)
   1.842 -            | _ => error (ty ^ " is not an array type")
   1.843 -          end
   1.844 -
   1.845 -      | tm_of vs (Fdl_Parser.Update (e, es, e')) =
   1.846 -          let val (t, ty) = tm_of vs e
   1.847 -          in case Fdl_Parser.lookup types ty of
   1.848 -              SOME (Fdl_Parser.Array_Type (idxtys, elty)) =>
   1.849 -                let
   1.850 -                  val T = foldr1 HOLogic.mk_prodT
   1.851 -                    (map (mk_type thy prfx) idxtys);
   1.852 -                  val U = mk_type thy prfx elty;
   1.853 -                  val fT = T --> U
   1.854 -                in
   1.855 -                  (Const (\<^const_name>\<open>fun_upd\<close>, fT --> T --> U --> fT) $
   1.856 -                     t $ foldr1 HOLogic.mk_prod (map (fst o tm_of vs) es) $
   1.857 -                       fst (tm_of vs e'),
   1.858 -                   ty)
   1.859 -                end
   1.860 -            | _ => error (ty ^ " is not an array type")
   1.861 -          end
   1.862 -
   1.863 -      | tm_of vs (Fdl_Parser.Record (s, flds)) =
   1.864 -          let
   1.865 -            val (T, cmap) = mk_type' thy prfx s;
   1.866 -            val {extension = (ext_name, _), fields, ...} =
   1.867 -              (case get_record_info thy T of
   1.868 -                 NONE => error (s ^ " is not a record type")
   1.869 -               | SOME info => info);
   1.870 -            val flds' = map (apsnd (tm_of vs)) flds;
   1.871 -            val fnames = fields |> invert_map cmap |>
   1.872 -              map (Long_Name.base_name o fst);
   1.873 -            val fnames' = map fst flds;
   1.874 -            val (fvals, ftys) = split_list (map (fn s' =>
   1.875 -              case AList.lookup lcase_eq flds' s' of
   1.876 -                SOME fval_ty => fval_ty
   1.877 -              | NONE => error ("Field " ^ s' ^ " missing in record " ^ s))
   1.878 -                  fnames);
   1.879 -            val _ = (case subtract lcase_eq fnames fnames' of
   1.880 -                [] => ()
   1.881 -              | xs => error ("Extra field(s) " ^ commas xs ^
   1.882 -                  " in record " ^ s));
   1.883 -            val _ = (case duplicates (op =) fnames' of
   1.884 -                [] => ()
   1.885 -              | xs => error ("Duplicate field(s) " ^ commas xs ^
   1.886 -                  " in record " ^ s))
   1.887 -          in
   1.888 -            (list_comb
   1.889 -               (Const (ext_name,
   1.890 -                  map (mk_type thy prfx) ftys @ [HOLogic.unitT] ---> T),
   1.891 -                fvals @ [HOLogic.unit]),
   1.892 -             s)
   1.893 -          end
   1.894 -
   1.895 -      | tm_of vs (Fdl_Parser.Array (s, default, assocs)) =
   1.896 -          (case Fdl_Parser.lookup types s of
   1.897 -             SOME (Fdl_Parser.Array_Type (idxtys, elty)) =>
   1.898 -               let
   1.899 -                 val Ts = map (mk_type thy prfx) idxtys;
   1.900 -                 val T = foldr1 HOLogic.mk_prodT Ts;
   1.901 -                 val U = mk_type thy prfx elty;
   1.902 -                 fun mk_idx' T (e, NONE) = HOLogic.mk_set T [fst (tm_of vs e)]
   1.903 -                   | mk_idx' T (e, SOME e') = Const (\<^const_name>\<open>atLeastAtMost\<close>,
   1.904 -                       T --> T --> HOLogic.mk_setT T) $
   1.905 -                         fst (tm_of vs e) $ fst (tm_of vs e');
   1.906 -                 fun mk_idx idx =
   1.907 -                   if length Ts <> length idx then
   1.908 -                     error ("Arity mismatch in construction of array " ^ s)
   1.909 -                   else foldr1 mk_times (map2 mk_idx' Ts idx);
   1.910 -                 fun mk_upd (idxs, e) t =
   1.911 -                   if length idxs = 1 andalso forall (is_none o snd) (hd idxs)
   1.912 -                   then
   1.913 -                     Const (\<^const_name>\<open>fun_upd\<close>, (T --> U) -->
   1.914 -                         T --> U --> T --> U) $ t $
   1.915 -                       foldl1 HOLogic.mk_prod
   1.916 -                         (map (fst o tm_of vs o fst) (hd idxs)) $
   1.917 -                       fst (tm_of vs e)
   1.918 -                   else
   1.919 -                     Const (\<^const_name>\<open>fun_upds\<close>, (T --> U) -->
   1.920 -                         HOLogic.mk_setT T --> U --> T --> U) $ t $
   1.921 -                       foldl1 (HOLogic.mk_binop \<^const_name>\<open>sup\<close>)
   1.922 -                         (map mk_idx idxs) $
   1.923 -                       fst (tm_of vs e)
   1.924 -               in
   1.925 -                 (fold mk_upd assocs
   1.926 -                    (case default of
   1.927 -                       SOME e => Abs ("x", T, fst (tm_of vs e))
   1.928 -                     | NONE => Const (\<^const_name>\<open>undefined\<close>, T --> U)),
   1.929 -                  s)
   1.930 -               end
   1.931 -           | _ => error (s ^ " is not an array type"))
   1.932 -
   1.933 -  in tm_of end;
   1.934 -
   1.935 -
   1.936 -fun mk_rulename (s, i) = Binding.name (s ^ string_of_int i);
   1.937 -
   1.938 -
   1.939 -fun add_def prfx types pfuns consts (id, (s, e)) (ids as (tab, ctxt), thy) =
   1.940 -  (case Fdl_Parser.lookup consts s of
   1.941 -    SOME ty =>
   1.942 -      let
   1.943 -        val (t, ty') = term_of_expr thy prfx types pfuns ids e;
   1.944 -        val T = mk_type thy prfx ty;
   1.945 -        val T' = mk_type thy prfx ty';
   1.946 -        val _ = T = T' orelse
   1.947 -          error ("Declared type " ^ ty ^ " of " ^ s ^
   1.948 -            "\ndoes not match type " ^ ty' ^ " in definition");
   1.949 -        val id' = mk_rulename id;
   1.950 -        val ((t', (_, th)), lthy') = Named_Target.theory_init thy
   1.951 -          |> Specification.definition NONE [] []
   1.952 -            ((id', []), HOLogic.mk_Trueprop (HOLogic.mk_eq (Free (s, T), t)));
   1.953 -        val phi =
   1.954 -          Proof_Context.export_morphism lthy'
   1.955 -            (Proof_Context.init_global (Proof_Context.theory_of lthy'));
   1.956 -      in
   1.957 -        ((id', Morphism.thm phi th),
   1.958 -          ((Symtab.update (s, (Morphism.term phi t', ty)) tab, Name.declare s ctxt),
   1.959 -            Local_Theory.exit_global lthy'))
   1.960 -       end
   1.961 -  | NONE => error ("Undeclared constant " ^ s));
   1.962 -
   1.963 -
   1.964 -
   1.965 -val add_expr_idents = fold_expr (K I) (insert (op =));
   1.966 -
   1.967 -(* sort definitions according to their dependency *)
   1.968 -fun sort_defs _ _ _ _ [] sdefs = rev sdefs
   1.969 -  | sort_defs prfx funs pfuns consts defs sdefs =
   1.970 -      (case find_first (fn (_, (_, e)) =>
   1.971 -         forall (is_some o lookup_prfx prfx pfuns)
   1.972 -           (add_expr_pfuns funs e []) andalso
   1.973 -         forall (fn id =>
   1.974 -           member (fn (s, (_, (s', _))) => s = s') sdefs id orelse
   1.975 -           consts id)
   1.976 -             (add_expr_idents e [])) defs of
   1.977 -         SOME d => sort_defs prfx funs pfuns consts
   1.978 -           (remove (op =) d defs) (d :: sdefs)
   1.979 -       | NONE => error ("Bad definitions: " ^ rulenames defs));
   1.980 -
   1.981 -fun add_var prfx (s, ty) (ids, thy) =
   1.982 -  let val ([Free p], ids') = mk_variables thy prfx [s] ty ids
   1.983 -  in (p, (ids', thy)) end;
   1.984 -
   1.985 -fun add_init_vars prfx vcs (ids_thy as ((tab, _), _)) =
   1.986 -  fold_map (add_var prfx)
   1.987 -    (map_filter
   1.988 -       (fn s => case try (unsuffix "~") s of
   1.989 -          SOME s' => (case Symtab.lookup tab s' of
   1.990 -            SOME (_, ty) => SOME (s, ty)
   1.991 -          | NONE => error ("Undeclared identifier " ^ s'))
   1.992 -        | NONE => NONE)
   1.993 -       (fold_vcs (add_expr_idents o snd) vcs []))
   1.994 -    ids_thy;
   1.995 -
   1.996 -fun term_of_rule thy prfx types pfuns ids rule =
   1.997 -  let val tm_of = fst o term_of_expr thy prfx types pfuns ids
   1.998 -  in case rule of
   1.999 -      Fdl_Parser.Inference_Rule (es, e) => Logic.list_implies
  1.1000 -        (map (HOLogic.mk_Trueprop o tm_of) es, HOLogic.mk_Trueprop (tm_of e))
  1.1001 -    | Fdl_Parser.Substitution_Rule (es, e, e') => Logic.list_implies
  1.1002 -        (map (HOLogic.mk_Trueprop o tm_of) es,
  1.1003 -         HOLogic.mk_Trueprop (HOLogic.mk_eq (tm_of e, tm_of e')))
  1.1004 -  end;
  1.1005 -
  1.1006 -
  1.1007 -fun pfun_type thy prfx (argtys, resty) =
  1.1008 -  map (mk_type thy prfx) argtys ---> mk_type thy prfx resty;
  1.1009 -
  1.1010 -fun check_pfun_type thy prfx s t optty1 optty2 =
  1.1011 -  let
  1.1012 -    val T = fastype_of t;
  1.1013 -    fun check ty =
  1.1014 -      let val U = pfun_type thy prfx ty
  1.1015 -      in
  1.1016 -        T = U orelse
  1.1017 -        error ("Type\n" ^
  1.1018 -          Syntax.string_of_typ_global thy T ^
  1.1019 -          "\nof function " ^
  1.1020 -          Syntax.string_of_term_global thy t ^
  1.1021 -          " associated with proof function " ^ s ^
  1.1022 -          "\ndoes not match declared type\n" ^
  1.1023 -          Syntax.string_of_typ_global thy U)
  1.1024 -      end
  1.1025 -  in (Option.map check optty1; Option.map check optty2; ()) end;
  1.1026 -
  1.1027 -fun upd_option x y = if is_some x then x else y;
  1.1028 -
  1.1029 -fun check_pfuns_types thy prfx funs =
  1.1030 -  Symtab.map (fn s => fn (optty, t) =>
  1.1031 -   let val optty' = Fdl_Parser.lookup funs (unprefix_pfun prfx s)
  1.1032 -   in
  1.1033 -     (check_pfun_type thy prfx s t optty optty';
  1.1034 -      (NONE |> upd_option optty |> upd_option optty', t))
  1.1035 -   end);
  1.1036 -
  1.1037 -
  1.1038 -
  1.1039 -
  1.1040 -fun pp_vcs msg vcs = Pretty.big_list msg (map (Pretty.str o fst) vcs);
  1.1041 -
  1.1042 -fun pp_open_vcs [] = Pretty.str "All verification conditions have been proved."
  1.1043 -  | pp_open_vcs vcs = pp_vcs
  1.1044 -      "The following verification conditions remain to be proved:" vcs;
  1.1045 -
  1.1046 -fun partition_vcs vcs = VCtab.fold_rev
  1.1047 -  (fn (name, (trace, SOME thms, ps, cs)) =>
  1.1048 -        apfst (cons (name, (trace, thms, ps, cs)))
  1.1049 -    | (name, (trace, NONE, ps, cs)) =>
  1.1050 -        apsnd (cons (name, (trace, ps, cs))))
  1.1051 -  vcs ([], []);
  1.1052 -
  1.1053 -fun print_open_vcs f vcs =
  1.1054 -  (Pretty.writeln (f (pp_open_vcs (snd (partition_vcs vcs)))); vcs);
  1.1055 -
  1.1056 -(*   set_env: Element.context_i list -> (binding * thm) list -> fdl_type tab ->
  1.1057 -       (string list * string) tab -> term * string) Symtab.table * Name.context ->
  1.1058 -         (string * thm list option * (string * expr) list * (string * expr) list) VCtab.table ->
  1.1059 -           Path.T -> string -> theory -> theory 
  1.1060 -  Context --vvvv is made Theory_Data by -------------------vvvvvvv, see.def. above *)
  1.1061 -fun set_env ctxt defs types funs ids vcs path prefix thy = VCs.map (fn
  1.1062 -    {pfuns, type_map, env = NONE} =>
  1.1063 -      {pfuns = pfuns,
  1.1064 -       type_map = type_map,
  1.1065 -       env = SOME
  1.1066 -         {ctxt = ctxt, defs = defs, types = types, funs = funs,
  1.1067 -          pfuns = check_pfuns_types thy prefix funs pfuns,
  1.1068 -          ids = ids, proving = false, vcs = print_open_vcs I vcs, path = path,
  1.1069 -          prefix = prefix}}
  1.1070 -  | _ => err_unfinished ()) thy;
  1.1071 -VCs.map: (VCs.T -> VCs.T) -> theory -> theory;
  1.1072 -(* Pure/context.ML
  1.1073 -signature THEORY_DATA =
  1.1074 -sig
  1.1075 -  type T                                  (*..VCs.T*)
  1.1076 -  val get: theory -> T
  1.1077 -  val put: T -> theory -> theory
  1.1078 -  val map: (T -> T) -> theory -> theory   (*..(VCs.T -> VCs.T)*)
  1.1079 -end;
  1.1080 -*)
  1.1081 -\<close> ML \<open>
  1.1082  structure Refs_Data = Theory_Data
  1.1083  (
  1.1084    type T = References.T
  1.1085 @@ -1100,6 +38,15 @@
  1.1086    val extend = I
  1.1087    fun merge (_, refs) = refs
  1.1088  );
  1.1089 +(*/----------------------------- shift to test/../? ----------------------------------------\*)
  1.1090 +(* Pure/context.ML
  1.1091 +signature THEORY_DATA =
  1.1092 +sig
  1.1093 +  type T
  1.1094 +  val get: theory -> T
  1.1095 +  val put: T -> theory -> theory
  1.1096 +  val map: (T -> T) -> theory -> theory
  1.1097 +end;*)
  1.1098  (*
  1.1099  Refs_Data.empty;                                    (*in Refs_Data defined workers are hidden*)
  1.1100  Refs_Data.extend;                                   (*in Refs_Data defined workers are hidden*)
  1.1101 @@ -1110,6 +57,7 @@
  1.1102  Refs_Data.get: theory -> Refs_Data.T;                            (*from signature THEORY_DATA*)
  1.1103  Refs_Data.put: Refs_Data.T -> theory -> theory;                  (*from signature THEORY_DATA*)
  1.1104  Refs_Data.map: (Refs_Data.T -> Refs_Data.T) -> theory -> theory; (*from signature THEORY_DATA*)
  1.1105 +(*\----------------------------- shift to test/../? ----------------------------------------/*)
  1.1106  \<close> ML \<open>
  1.1107  structure OMod_Data = Theory_Data
  1.1108  (
  1.1109 @@ -1127,17 +75,16 @@
  1.1110    fun merge (_, ctxt) = ctxt
  1.1111  )
  1.1112  \<close> ML \<open>
  1.1113 -\<close> ML \<open>
  1.1114  fun set_vcs _(*{types, vars, consts, funs} : Fdl_Parser.decls*)
  1.1115        _(*rules, _*) _(*(_, ident), vcs*) header path opt_prfx thy =
  1.1116    let
  1.1117 -  (*/----- keep running from spark_open \<open>greatest_common_divisor/g_c_d\<close> -----\*)
  1.1118 +  (*/----- keep running from spark_open \<open>greatest_common_divisor/g_c_d\<close> -----\* )
  1.1119      val ({types, vars, consts, funs} : Fdl_Parser.decls) = 
  1.1120        snd (Fdl_Parser.parse_declarations Position.start g_c_d_fdl_content)
  1.1121      val (rules, _) = Fdl_Parser.parse_rules Position.start g_c_d_rls_content
  1.1122      val ((_, ident), vcs) =
  1.1123        snd (Fdl_Parser.parse_vcs Fdl_Lexer.siv_header Position.start g_c_d_siv_content)
  1.1124 -  (*------ new argument:  ParseC.vcs = ParseC.formalise ----------------------*)
  1.1125 +  ( *------ new argument:  ParseC.vcs = ParseC.formalise ----------------------*)
  1.1126      val formalise = case header of xxx as
  1.1127        [(["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y", "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x", "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"],
  1.1128          ("Biegelinie", ["Biegelinien"], ["IntegrierenUndKonstanteBestimmen2"]))] => hd xxx
  1.1129 @@ -1145,158 +92,28 @@
  1.1130    (*\----- keep running from spark_open \<open>greatest_common_divisor/g_c_d\<close> -----/*)
  1.1131      val (hdlPIDE, _, refs, o_model, var_type_ctxt) = Step_Specify.initialisePIDE formalise
  1.1132    (*     ^^^^^^^ goes to PIDE as "Problem hdlPIDE"                            *)
  1.1133 -
  1.1134 -    val prfx' =
  1.1135 -      if opt_prfx = "" then
  1.1136 -        space_implode "__" (Long_Name.explode (Long_Name.qualifier ident))
  1.1137 -      else opt_prfx;
  1.1138 -    val prfx = to_lower prfx';
  1.1139 -    val {pfuns, ...} = VCs.get thy;                                                       (*..thy*)
  1.1140 -    val (defs, rules') = partition_opt dest_def rules;
  1.1141 -    val consts' =
  1.1142 -      subtract (fn ((_, (s, _)), (s', _)) => s = s') defs (Fdl_Parser.items consts);
  1.1143 -    (* ignore all complex rules in rls files *)
  1.1144 -    val (rules'', other_rules) =
  1.1145 -      List.partition (complex_rule o snd) rules';
  1.1146 -    val _ = if null rules'' then ()
  1.1147 -      else warning ("Ignoring rules: " ^ rulenames rules'');
  1.1148 -
  1.1149 -    val vcs' = VCtab.make (maps (fn (tr, vcs) =>
  1.1150 -      map (fn (s, (ps, cs)) => (s, (tr, NONE, ps, cs)))
  1.1151 -        (filter_out (is_trivial_vc o snd) vcs)) vcs);
  1.1152 -
  1.1153 -    val _ = (case filter_out (is_some o Fdl_Parser.lookup funs)
  1.1154 -        (pfuns_of_vcs prfx funs pfuns vcs') of
  1.1155 -        [] => ()
  1.1156 -      | fs => error ("Undeclared proof function(s) " ^ commas fs));
  1.1157 -
  1.1158 -    val (((defs', vars''), ivars)(*->*),(*<-*) (ids, thy')) =                            (*..thy'*)
  1.1159 -      ((Symtab.empty |>
  1.1160 -        Symtab.update ("false", (\<^term>\<open>False\<close>, booleanN)) |>
  1.1161 -        Symtab.update ("true", (\<^term>\<open>True\<close>, booleanN)),     (*..(defs', vars'')*)
  1.1162 -        Name.context)(*->*),(*<-*)                           (*..ivars*)
  1.1163 -       thy |> Sign.add_path                                                               (*..thy*)
  1.1164 -         ((if prfx' = "" then "" else prfx' ^ "__") ^ Long_Name.base_name ident)) |>
  1.1165 -      fold (add_type_def prfx) (Fdl_Parser.items types) |>
  1.1166 -      fold (snd oo add_const prfx) consts' |> (fn ids_thy as ((tab, _), _) =>
  1.1167 -        ids_thy |>
  1.1168 -        fold_map (add_def prfx types pfuns consts)
  1.1169 -          (sort_defs prfx funs pfuns (Symtab.defined tab) defs []) ||>>
  1.1170 -        fold_map (add_var prfx) (Fdl_Parser.items vars) ||>>
  1.1171 -        add_init_vars prfx vcs');                            (*..(ids,                      thy')*)
  1.1172 -
  1.1173 -    val ctxt =
  1.1174 -      [Element.Fixes (map (fn (s, T) =>
  1.1175 -         (Binding.name s, SOME T, NoSyn)) (vars'' @ ivars)),
  1.1176 -       Element.Assumes (map (fn (id, rl) =>
  1.1177 -         ((mk_rulename id, []),
  1.1178 -          [(term_of_rule thy' prfx types pfuns ids rl, [])]))                            (*..thy'*)
  1.1179 -           other_rules),
  1.1180 -       Element.Notes ("", [((Binding.name "defns", []), map (rpair [] o single o snd) defs')])]
  1.1181 -
  1.1182    in
  1.1183 -    set_env ctxt defs' types funs ids vcs' path prfx thy'
  1.1184 +    thy
  1.1185        |> Refs_Data.put refs
  1.1186        |> OMod_Data.put o_model
  1.1187        |> Ctxt_Data.put var_type_ctxt
  1.1188    end;
  1.1189  \<close> ML \<open>
  1.1190 -\<close>
  1.1191 -
  1.1192 -subsection \<open>cp. HOL/SPARK/Tools/fdl_lexer.ML\<close>
  1.1193 -ML \<open>
  1.1194 -\<close> ML \<open>
  1.1195 -type chars = (string * Position.T) list;
  1.1196 -val e_chars = []: (string * Position.T) list;               
  1.1197 -
  1.1198 -type banner = string * string * string;               val e_banner = ("b1", "b2", "b3")
  1.1199 -type date = string * string * string;                 val e_date = ("d1", "d2", "d3")
  1.1200 -type time = string * string * string * string option; val e_time = ("t1", "t2", "t3", SOME "t4")
  1.1201 -
  1.1202 -fun siv_header (_: chars) = 
  1.1203 -  ((e_banner, (e_date, e_time), (e_date, e_time), ("", "")), e_chars);
  1.1204 -\<close> ML \<open>
  1.1205 -\<close> 
  1.1206 -
  1.1207 -subsection \<open>cp. HOL/SPARK/Tools/spark_commands.ML\<close>
  1.1208 -ML \<open>
  1.1209 -\<close> ML \<open>
  1.1210 -(*  Title:      src/Tools/isac/BridgeJEdit/cp_spark_commands.ML
  1.1211 -    Author:     Walther Neuper, JKU Linz
  1.1212 -    (c) due to copyright terms
  1.1213 -
  1.1214 -Preliminary code for developing Outer_Syntax.command..Example from spark_open as a model.
  1.1215 -*)
  1.1216 -
  1.1217  fun spark_open header (files, prfx) thy =
  1.1218 -(*Fdl_Lexer.siv_header*)
  1.1219    let
  1.1220 -(** )val _ = @{print} {a = "//--- ###C spark_open", files = files, prfx = prfx};( **)
  1.1221 -    val xxx as ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file (*,.siv*)
  1.1222 -(** )  {lines = fdl_lines, pos = fdl_pos, ...},                              ( *.fld*)
  1.1223 -(** )  {lines = rls_lines, pos = rls_pos, ...}                               ( *.rls*)
  1.1224 -      ], thy') = 
  1.1225 -    ((** @{print} {a = "###C spark_open: before call <files thy>, files = siv_header !"};( **)
  1.1226 -      files thy);
  1.1227 -(*                                                                                    ^^^^^ arg!*)
  1.1228 +    val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file], thy') = files thy;
  1.1229      val path = fst (Path.split_ext src_path);
  1.1230    in
  1.1231 -(** )@{print} {a = "###C spark_open: result from siv_header, without thy'", fst_xxx = fst xxx};( **)
  1.1232 -(** )@{print} {a = "###C spark_open: 1 arg for SPARK_VCs.set_vcs",
  1.1233 -(** ) arg1 = (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines))),
  1.1234 -      arg2 = (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines)),            ( **)
  1.1235 -(** ) arg3 = (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines))), ( **)
  1.1236 -(*                                                     !^^^^^^^!*)
  1.1237 -      z = "\\--- ###C spark_open"};( **)
  1.1238 -    (*SPARK_VCs.*)set_vcs
  1.1239 -(*ADDED*) (**)
  1.1240 -(** )  (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines)))( **)Fdl_Parser.empty_decls(**)
  1.1241 -(** )  (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines))             ( **)Fdl_Parser.empty_rules(**)
  1.1242 -(** )  (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines)))    ( **)Fdl_Parser.empty_vcs(**)
  1.1243 -(**)   (ParseC.read_out_formalise (fst (header     (fst (ParseC.scan' vc_pos (cat_lines vc_lines))))))(**)
  1.1244 -      path prfx thy'
  1.1245 +    set_vcs Fdl_Parser.empty_decls Fdl_Parser.empty_rules Fdl_Parser.empty_vcs(**)
  1.1246 +      (ParseC.read_out_formalise (fst (header (fst (ParseC.scan' vc_pos (cat_lines vc_lines))))))
  1.1247 +        path prfx thy'
  1.1248    end;
  1.1249  \<close> ML \<open>
  1.1250 -\<close> ML \<open> (* compose NEW argument "header"                               ^v^v^v^v^v^v^v^v^v^v^v^v^v^v*)
  1.1251 -       (ParseC.read_out_formalise (fst (ParseC.vcs (fst (ParseC.scan' Position.start formalise_str)))))
  1.1252 -\<close> ML \<open>
  1.1253 -\<close> ML \<open>
  1.1254 -\<close> ML \<open> (* generate the arguments directly for: spark_open \<open>greatest_common_divisor/g_c_d\<close>*)
  1.1255 -(*     (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines))) *)
  1.1256 -        snd (Fdl_Parser.parse_declarations Position.start g_c_d_fdl_content)
  1.1257 -\<close> ML \<open>
  1.1258 -(*     (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines)) *)
  1.1259 -        Fdl_Parser.parse_rules Position.start g_c_d_rls_content
  1.1260 -\<close> ML \<open>
  1.1261 -val (("procedure", "Greatest_Common_Divisor.G_C_D"), vcs) =
  1.1262 -(*     (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines))) *)
  1.1263 -        snd (Fdl_Parser.parse_vcs Fdl_Lexer.siv_header Position.start g_c_d_siv_content);
  1.1264 -length vcs = 11;
  1.1265 -vcs;
  1.1266  \<close> ML \<open>
  1.1267  \<close> ML \<open>
  1.1268  \<close>
  1.1269  
  1.1270  section \<open>setup command_keyword \<close>
  1.1271 -subsection \<open>hide for development, use for Test_Isac\<close>
  1.1272 -text \<open>
  1.1273 -(*HIDE for development, activate for Test_Isac*)
  1.1274 -type chars = (string * Position.T) list;
  1.1275 -val e_chars = []: (string * Position.T) list;
  1.1276 -
  1.1277 -fun spark_open (_: ((*Fdl_Lexer.*)chars -> 'a * (*Fdl_Lexer.*)chars)) 
  1.1278 -    (_: ('b -> Token.file list * theory) * string) (_: 'b) = @ {theory}
  1.1279 -
  1.1280 -type banner = string * string * string;               val e_banner = ("b1", "b2", "b3")
  1.1281 -type date = string * string * string;                 val e_date = ("d1", "d2", "d3")
  1.1282 -type time = string * string * string * string option; val e_time = ("t1", "t2", "t3", SOME "t4")
  1.1283 -
  1.1284 -fun siv_header (_: chars) = 
  1.1285 -  ((e_banner, (e_date, e_time), (e_date, e_time), ("", "")), e_chars);
  1.1286 -(*HIDE for development, activate for Test_Isac*)
  1.1287 -\<close> ML \<open>
  1.1288 -\<close> 
  1.1289 -subsection \<open>def.for Build_Isac; for Test_Isac outcomment SPARK_Commands, Fdl_Lexer below\<close>
  1.1290  ML \<open>
  1.1291  \<close> ML \<open>
  1.1292  val siv_header = ParseC.vcs; (*---------------------------------------------vvvvvvvvv*)
  1.1293 @@ -1310,723 +127,15 @@
  1.1294  \<close> ML \<open>
  1.1295  \<close>
  1.1296  
  1.1297 -subsection \<open>test runs with Example and spark_open\<close>
  1.1298 -(*======= vv-- Example =================================================================*)
  1.1299 -(** Test_Isac finds exp_Statics_Biegel_Timischl_7-70.siv ?!?* )
  1.1300 -Example \<open>/usr/local/isabisac/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70\<close>(*.str*)
  1.1301 -( ***Output:
  1.1302 +subsection \<open>test runs with Example\<close>
  1.1303 +text \<open>
  1.1304 +  Do in Test_Some.thy:
  1.1305  
  1.1306 -*)
  1.1307 -(*====== ^^-- Example, spark_open--vv ==================================================*)
  1.1308 -(* DO THAT IN ..................... HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy)
  1.1309 -spark_open \<open>/usr/local/isabisac/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d\<close>
  1.1310 -( ***Output:
  1.1311 +    Example \<open>/usr/local/isabisac/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70\<close>
  1.1312  
  1.1313 -*)
  1.1314 -(*==================== spark_open--^^ ==================================================*)
  1.1315 -
  1.1316 -section \<open>Adapt SPARK's parser -> parse Formalise.T\<close>
  1.1317 -text \<open>
  1.1318 -  Possible models, where a file is read and parsed:
  1.1319 -  * for *.bib see usage of Resources.provide_parse_files     .. parsed by LaTeX
  1.1320 -  * Outer_Syntax.command \ <command_keyword>\<open>external_file\<close>  .. ?!?
  1.1321 -  * Outer_Syntax.command \<command_keyword>\<open>spark_open\<close>
  1.1322 -  We pursue the latter.
  1.1323 -  From c.f.2b9b4b38ab96 we have found Fdl_Lexer.scan as the very model
  1.1324 -  for scanning/parsing Formalise.T.
  1.1325 +  Starting the Calculation for the Example requires session Isac with Isac.Biegelinie etc.
  1.1326  \<close>
  1.1327  
  1.1328 -subsection \<open>testbed separates Fdl_Lexer.scan + Fdl_Parser from SPARK code\<close>
  1.1329 -declare [[ML_print_depth = 7]] (* 3 7 10 20 999999999 *)
  1.1330 -ML \<open>
  1.1331 -open Fdl_Lexer
  1.1332 -(** )open Fdl_Parser( *conflict with some of Fdl_Lexer.* *)
  1.1333 -(*
  1.1334 -  step 1: go top down and keep respective results according to trace from
  1.1335 -          spark_open \<open>greatest_common_divisor/g_c_d\<close> SPARK/../Greatest_Common_Divisor.thy.
  1.1336 -          (in the example this is simple: just watch the bottom of Output
  1.1337 -           The following verification conditions remain to be proved:
  1.1338 -             procedure_g_c_d_4
  1.1339 -             procedure_g_c_d_11)
  1.1340 -        ! remove @~{print} from calling functions (higher level) 
  1.1341 -          once you go down to the called functions (lower level)
  1.1342 -          in order to reduce noise and to focus the code under actual consideration.
  1.1343 -  step 2: at bottom gather the most elementary funs for transformation
  1.1344 -           of g_c_d_siv_content to args of Fdl_Lexer.scan
  1.1345 -  step 3: parse the tokens created by  Fdl_Lexer.scan
  1.1346 -*)
  1.1347 -\<close> ML \<open>
  1.1348 -\<close> ML \<open> (** high start level: parsing to header + VCs **)
  1.1349 -val parsed = Fdl_Parser.parse_vcs Fdl_Lexer.siv_header Position.start g_c_d_siv_content;
  1.1350 -\<close> ML \<open> (*               ^^^^^^^^^ <<<<<------------------------------------------------------*)
  1.1351 -val ((banner, (date, time), (date2, time2), (string, string2)), ((str, str2), vcss)) = parsed;
  1.1352 -
  1.1353 -if banner = ("Semantic Analysis of SPARK Text",
  1.1354 -    "Examiner Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039",
  1.1355 -    "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K.")  andalso
  1.1356 -  (date, time) = (("29", "NOV", "2010"), ("14", "30", "10", NONE)) andalso
  1.1357 -  (date2, time2) = (("29", "NOV", "2010"), ("14", "30", "11", NONE)) andalso
  1.1358 -  (string, string2) =
  1.1359 -    ("SPARK Simplifier Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039",
  1.1360 -    "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K.") andalso
  1.1361 -  (str, str2) = ("procedure", "Greatest_Common_Divisor.G_C_D")
  1.1362 -  then () else error "Fdl_Parser.parse_vcs header CHANGED";
  1.1363 -if length vcss = 11 (* verification conditions (VCs) from g_c_d_siv_content *) then
  1.1364 -  case vcss of
  1.1365 -    ("path(s) from start to run-time check associated with statement of line 8", 
  1.1366 -      [("procedure_g_c_d_1", ([], [("1", Fdl_Parser.Ident "true")]))]) ::
  1.1367 -    ("path(s) from start to run-time check associated with statement of line 8", 
  1.1368 -      [("procedure_g_c_d_2", ([], [("1", Fdl_Parser.Ident "true")]))]) ::
  1.1369 -    ("path(s) from start to assertion of line 10", 
  1.1370 -      [("procedure_g_c_d_3", ([], [("1", Fdl_Parser.Ident "true")]))]) :: 
  1.1371 -    ("path(s) from assertion of line 10 to assertion of line 10",
  1.1372 -      [("procedure_g_c_d_4",
  1.1373 -        (("1", Fdl_Parser.Funct (">=", [Fdl_Parser.Ident "c", Fdl_Parser.Number 0])) :: 
  1.1374 -         ("2", Fdl_Parser.Funct (">", [Fdl_Parser.Ident "d", Fdl_Parser.Number 0])) :: _,
  1.1375 -         _))]) :: _ => ()
  1.1376 -  | _ => error "Fdl_Parser.parse_vcs vcss CHANGED 1"
  1.1377 -else error "Fdl_Parser.parse_vcs vcss CHANGED 2";
  1.1378 -\<close> ML \<open>
  1.1379 -\<close> ML \<open>
  1.1380 -\<close> ML \<open> (* lower level 1: tokenize string to header + VCs *)
  1.1381 -val level1_header_toks =
  1.1382 -(* tokenize to header + VCs is NON-standard ..
  1.1383 -            vvvvvvvv--- parses vvvvvvvvvv     +     vvvvvvvvv, but leaves vcsss as Token list*)
  1.1384 -  Fdl_Lexer.tokenize Fdl_Lexer.siv_header Fdl_Lexer.c_comment Position.start g_c_d_siv_content;
  1.1385 -\<close> ML \<open> (*   ^^^^^^^^ <<<<<-------------------------------------------------------------------*)
  1.1386 -val ((banner, (date, time), (date2, time2), (string, string2)), toks ) = level1_header_toks;
  1.1387 -
  1.1388 -if banner = ("Semantic Analysis of SPARK Text",
  1.1389 -    "Examiner Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039",
  1.1390 -    "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K.")  andalso
  1.1391 -  (date, time) = (("29", "NOV", "2010"), ("14", "30", "10", NONE)) andalso
  1.1392 -  (date2, time2) = (("29", "NOV", "2010"), ("14", "30", "11", NONE)) andalso
  1.1393 -  (string, string2) =
  1.1394 -    ("SPARK Simplifier Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039",
  1.1395 -    "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K.")
  1.1396 -  then () else error "Fdl_Lexer.tokenize header CHANGED";
  1.1397 -if length toks = 319 (* tokens for str :: str :: verification conditions from g_c_d_siv_content *) then
  1.1398 -  case toks of
  1.1399 -    Token (Keyword, "procedure", _(*Position.T*)) :: 
  1.1400 -    Token (Long_Ident, "Greatest_Common_Divisor.G_C_D", _(*Position.T*)) :: _  => ()
  1.1401 -  | _ => error "Fdl_Lexer.tokenize tokens CHANGED 1"
  1.1402 -else error "Fdl_Lexer.tokenize header CHANGED 2";
  1.1403 -\<close> ML \<open>
  1.1404 -\<close> ML \<open>
  1.1405 -\<close> ML \<open> (* lower level 2: tokenize the string *)
  1.1406 -val chars = Fdl_Lexer.explode_pos g_c_d_siv_content(*_content*) Position.start;
  1.1407 -if length chars = 2886 then () else error "Fdl_Lexer.explode_pos g_c_d_siv_content CHANGED";
  1.1408 -val level2_header_toks = (Scan.finite char_stopper (*..from tokenize..*)
  1.1409 -  (Scan.error (Fdl_Lexer.scan Fdl_Lexer.siv_header Fdl_Lexer.c_comment)) chars);
  1.1410 -(* ---------------------------------------------------------------------------\
  1.1411 -               Fdl_Lexer.scan Fdl_Lexer.siv_header Fdl_Lexer.c_comment chars
  1.1412 -exception MORE () raised (line 176 of "General/scan.ML") ---------------------/ *)
  1.1413 -
  1.1414 -case level2_header_toks of
  1.1415 -  (((("Semantic Analysis of SPARK Text", 
  1.1416 -       "Examiner Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039", 
  1.1417 -       "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K."), 
  1.1418 -       (("29", "NOV", "2010"), ("14", "30", "10", NONE)),
  1.1419 -       (("29", "NOV", "2010"), ("14", "30", "11", NONE)), 
  1.1420 -       ("SPARK Simplifier Pro Edition, Version 9.1.0, Build Date 20101119, Build 19039", 
  1.1421 -         "Copyright (C) 2010 Altran Praxis Limited, Bath, U.K.")),
  1.1422 -     Token (Keyword, "procedure", _) :: Token (Long_Ident, "Greatest_Common_Divisor.G_C_D", _) ::
  1.1423 -     Token (Traceability, 
  1.1424 -       "path(s) from start to run-time check associated with statement of line 8", _) ::
  1.1425 -     _ ),
  1.1426 -   []) => ()
  1.1427 -| _ => error "Fdl_Lexer.scan level2 CHANGED";
  1.1428 -\<close> ML \<open>
  1.1429 -\<close> ML \<open>
  1.1430 -\<close> ML \<open> (* lower level 2: parse the tokens *)
  1.1431 -val ((_, tokens), _) = level2_header_toks;
  1.1432 -\<close> ML \<open>
  1.1433 -Fdl_Parser.vcs: T list ->
  1.1434 -  ((string * string) * (string * (string * ((string * Fdl_Parser.expr) list * (string * Fdl_Parser.expr) list)) list) list)
  1.1435 -  * T list;
  1.1436 -(filter Fdl_Lexer.is_proper): T list -> T list;
  1.1437 -\<close> ML \<open>
  1.1438 -val (parsed', _) =
  1.1439 -  Scan.finite Fdl_Lexer.stopper (Scan.error (Fdl_Parser.!!! Fdl_Parser.vcs))
  1.1440 -    (filter Fdl_Lexer.is_proper (*|*)tokens(*|*))
  1.1441 -(* ---------------------------------------------------------------------------\
  1.1442 -                 Fdl_Parser.vcs (*|*)tokens(*|*)
  1.1443 -exception ABORT fn raised (line 109 of "General/scan.ML") --------------------/ *)
  1.1444 -\<close> ML \<open>
  1.1445 -if parsed' = ((str, str2), vcss) then () else error "Fdl_Parser.parse_vcs level 2 CHANGED 2";
  1.1446 -\<close> ML \<open>
  1.1447 -\<close>
  1.1448 -
  1.1449 -subsection \<open>test data for Isac's Formalise.T parser\<close>
  1.1450 -ML \<open>
  1.1451 -\<close> ML \<open>
  1.1452 -val form_single_str = (
  1.1453 -  "(\n" ^
  1.1454 -(* Formalise.model: *)
  1.1455 -  "  [\"Traegerlaenge L\", \"Streckenlast q_0\", \"Biegelinie y\",\n" ^
  1.1456 -	"    \"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",\n" ^
  1.1457 -	"    \"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",\n" ^
  1.1458 -  "    \"AbleitungBiegelinie dy\"],\n" ^
  1.1459 -(* Formalise.spec: *)
  1.1460 -  "  (\"Biegelinie\", [\"Biegelinien\"], [\"IntegrierenUndKonstanteBestimmen2\"])\n" ^
  1.1461 -  ")")
  1.1462 -\<close> ML \<open>
  1.1463 -val formalise_str = "[" ^ form_single_str ^ "]"; (*..we assume one element ONLY *)
  1.1464 -\<close> ML \<open>
  1.1465 -val form_spec_str =
  1.1466 -(* Formalise.spec: *)
  1.1467 -  "  (\"Biegelinie\", [\"Biegelinien\"], [\"IntegrierenUndKonstanteBestimmen2\"])";
  1.1468 -val form_spec_str' =
  1.1469 -  "(\"Biegelinie\",[\"Biegelinien\"],[\"IntegrierenUndKonstanteBestimmen2\"])";
  1.1470 -\<close> ML \<open>
  1.1471 -val form_model_str = (
  1.1472 -(* Formalise.model: *)
  1.1473 -  "  [\"Traegerlaenge L\", \"Streckenlast q_0\", \"Biegelinie y\",\n" ^
  1.1474 -	"    \"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",\n" ^
  1.1475 -	"    \"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",\n" ^
  1.1476 -  "    \"AbleitungBiegelinie dy\"],\n")
  1.1477 -val form_model_str' = ( (* NO whitespace *)
  1.1478 -  "[\"Traegerlaenge L\",\"Streckenlast q_0\",\"Biegelinie y\",\"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",\"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",\"AbleitungBiegelinie dy\"]")
  1.1479 -\<close> ML \<open>
  1.1480 -val form_model_str'' = ( (* NO whitespace, NO [ ] *)
  1.1481 -  "\"Traegerlaenge L\",\"Streckenlast q_0\",\"Biegelinie y\",\"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",\"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",\"AbleitungBiegelinie dy\"]")
  1.1482 -\<close> ML \<open>
  1.1483 -val form_model_str''' = ( (* NO whitespace, NO [ ], NO "," *)
  1.1484 -  "[\"Traegerlaenge L\"\"Streckenlast q_0\"\"Biegelinie y\"\"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\"\"FunktionsVariable x\"\"GleichungsVariablen [c, c_2, c_3, c_4]\"\"AbleitungBiegelinie dy\"]")
  1.1485 -\<close> ML \<open>
  1.1486 -\<close>
  1.1487 -
  1.1488 -subsection \<open>Adapt SPARK's code for parsing Formalise.T\<close>
  1.1489 -text \<open>
  1.1490 -\<close> ML \<open> (* equip the string -----vvvvvvvvvvvvvvv with Positions..*)
  1.1491 -val chars = Fdl_Lexer.explode_pos form_single_str Position.start: Fdl_Lexer.chars;
  1.1492 -if length chars = 306 then () else error "Fdl_Lexer.explode_pos g_c_d_siv CHANGED";
  1.1493 -\<close> 
  1.1494 -
  1.1495 -subsubsection \<open>identify various code that might serve as model\<close>
  1.1496 -ML \<open>
  1.1497 -\<close> ML \<open> (* gen_comment adapted for a single string (*1*) on simplified form_model_str'' *)
  1.1498 -val tokenize_string = $$$ "\"" |-- !!! "unclosed string" (*2*)
  1.1499 -  (Scan.repeat (Scan.unless ($$$ "\"") any) --| $$$ "\"") >> make_token Comment(*!!!*);
  1.1500 -tokenize_string: chars -> T * chars
  1.1501 -\<close> ML \<open>
  1.1502 -val chars = Fdl_Lexer.explode_pos form_model_str'' Position.start: Fdl_Lexer.chars;
  1.1503 -tokenize_string chars; (*3*) (* = WE ONLY GET THE FIRST ELEMENT tokenized, WHY?..
  1.1504 -   (Token (Comment, "Traegerlaenge L", {line=1, offset=2}),
  1.1505 -    [(",", {line=1, offset=18}), ("\"", {line=1, offset=19}), ("S", {line=1, offset=20}), ("t", {line=1, offset=21}), ("r", {line=1, offset=22}), ("e", {line=1, offset=23}), ("c", {line=1, offset=24}),
  1.1506 -     ("k", {line=1, offset=25}), ("...", {line=1, offset=26}), ...])*)
  1.1507 -\<close> ML \<open>
  1.1508 -\<close> ML \<open> (* gen_comment adapted for several strings (*4*) on simplified form_model_str''' *)
  1.1509 -val tokenize_strings =
  1.1510 -  Fdl_Lexer.keyword "[" |-- !!! "unclosed list"
  1.1511 -    (Scan.repeat (Scan.unless (Fdl_Lexer.keyword "]") tokenize_string) --|
  1.1512 -  Fdl_Lexer.keyword "]");
  1.1513 -tokenize_strings: chars -> T(*Token of kind * string * Position.T*) list * chars
  1.1514 -\<close> ML \<open>
  1.1515 -val chars = Fdl_Lexer.explode_pos form_model_str''' Position.start;
  1.1516 -tokenize_strings chars;(* =       THIS LOOKS PROMISING..
  1.1517 -   ([Token (Comment, "Traegerlaenge L", {line=1, offset=3}), Token (Comment, "Streckenlast q_0", {line=1, offset=20}), Token (Comment, "Biegelinie y", {line=1, offset=38}),
  1.1518 -     Token (Comment, "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", {line=1, offset=52}), Token (Comment, "FunktionsVariable x", {line=1, offset=118}),
  1.1519 -     Token (Comment, "GleichungsVariablen [c, c_2, c_3, c_4]", {line=1, offset=139}), Token (Comment, "AbleitungBiegelinie dy", {line=1, offset=179})],
  1.1520 -    [])*)
  1.1521 -\<close> ML \<open>
  1.1522 -\<close> ML \<open> (*.. BUT THAT DOESN'T WORK FOR MORE REALISTIC DATA..*)
  1.1523 -val chars = Fdl_Lexer.explode_pos form_model_str' Position.start;
  1.1524 -(* (*6*)
  1.1525 -tokenize_strings chars;
  1.1526 -exception ABORT fn raised (line 109 of "General/scan.ML")*)
  1.1527 -\<close> ML \<open>
  1.1528 -\<close> ML \<open>
  1.1529 -\<close> ML \<open> (*A NEW IDEA: postpone list-structure to Fld_Parser, now just make reasonable tokens *)
  1.1530 -val chars = Fdl_Lexer.explode_pos "\"GleichungsVariablen [c, c_2, c_3, c_4]\"" Position.start;
  1.1531 -tokenize_string chars; (* = (Token (Comment, "GleichungsVariablen [c, c_2, c_3, c_4]"*)
  1.1532 -\<close> ML \<open>
  1.1533 -\<close> ML \<open> (*..what about delimiters of lists?..*)
  1.1534 -\<close> ML \<open> (*let's go bottom up..*)
  1.1535 -val xxx =
  1.1536 -  whitespace --
  1.1537 -    keyword "(" --
  1.1538 -      keyword "[";
  1.1539 -xxx: chars -> ((chars * chars) * chars) * chars;
  1.1540 -\<close> ML \<open>
  1.1541 -val xxx =
  1.1542 -                                      whitespace --
  1.1543 -    keyword "(" --
  1.1544 -      keyword "[" -- tokenize_string;
  1.1545 -xxx: chars -> (((chars * chars) * chars) * T) * chars
  1.1546 -\<close> ML \<open> (*                                 ^^^ list ?!*)
  1.1547 -\<close> ML \<open>
  1.1548 -val xxx =
  1.1549 -                                      whitespace --
  1.1550 -    keyword "(" --                    whitespace --
  1.1551 -      keyword "[" --                  whitespace --
  1.1552 -        tokenize_string;
  1.1553 -xxx: chars -> (((((chars * chars) * chars) * chars) * chars) * T) * chars;
  1.1554 -\<close> ML \<open> (* how can we drop  ^^^^^    ^^^^^    ^^ and make here ^^^ a list ?!
  1.1555 -          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^*)
  1.1556 -val chars = Fdl_Lexer.explode_pos "(\n  [\"Traegerlaenge L\""  Position.start;
  1.1557 -\<close> ML \<open>
  1.1558 -xxx chars; (* =  WE GET AN UNPLEASANT MIXTURE OF Fld_Lexer.T  AND Fld_Lexer.char ..
  1.1559 -   (((((([], [("(", {line=1, offset=1})]), [("\n", {line=1, offset=2}), (" ", {line=2, offset=3}), (" ", {line=2, offset=4})]), [("[", {line=2, offset=5})]), []),
  1.1560 -     Token (Comment, "Traegerlaenge L", {line=2, offset=7})),
  1.1561 -    [])*)
  1.1562 -\<close> ML \<open>
  1.1563 -tokenize_string: chars -> T * chars;
  1.1564 -whitespace: chars -> chars * chars;
  1.1565 -tokenize: (chars -> 'a * chars) -> (chars -> T * chars) -> Position.T -> string -> 'a * T list;
  1.1566 -Scan.repeat: ('a -> 'b * 'a) -> 'a -> 'b list * 'a
  1.1567 -(*                  ^^                ^^^^^^^                                           ^^^^^^*)
  1.1568 -\<close> ML \<open>
  1.1569 -\<close>  
  1.1570 -
  1.1571 -subsubsection \<open>have a closer look at SPARKs main tokenizer\<close>
  1.1572 -ML \<open>
  1.1573 -\<close> ML \<open> Fdl_Lexer.scan; (*7*) (*.. <ctrl><mouse> leads to the source *)
  1.1574 -\<close> ML \<open>
  1.1575 -val aaa = (*remove code specific for SPARK..*)
  1.1576 -  Scan.repeat (Scan.unless (Scan.one is_char_eof)
  1.1577 -    ( (*comment *)
  1.1578 -        (keyword "For" -- whitespace1) |--
  1.1579 -           Scan.repeat1 (Scan.unless (keyword ":") any) --|
  1.1580 -           keyword ":" >> make_token Traceability             
  1.1581 -     || Scan.max leq_token
  1.1582 -          (Scan.literal lexicon >> make_token Keyword)
  1.1583 -          (   long_identifier >> make_token Long_Ident
  1.1584 -           || identifier >> make_token Ident
  1.1585 -          )
  1.1586 -     || number >> make_token Number
  1.1587 -    )
  1.1588 -  );
  1.1589 -\<close> ML \<open> (* we investigate the alternatives || and confirm observations from tracing:
  1.1590 -          EACH ROUND OF LEXING CREATES one TOKEN ----------------------vvv *)
  1.1591 -(*fun scan header comment = ...                                                      (*8*)
  1.1592 -   (    comment *)
  1.1593 -  (*||*)(keyword "For" -- whitespace1) |--
  1.1594 -           Scan.repeat1 (Scan.unless (keyword ":") any) --|
  1.1595 -           keyword ":" >> make_token Traceability            : chars -> T * chars;
  1.1596 -  (*||*)Scan.max leq_token
  1.1597 -          (Scan.literal lexicon >> make_token Keyword)
  1.1598 -          (   long_identifier >> make_token Long_Ident
  1.1599 -           || identifier >> make_token Ident
  1.1600 -          )                                                  : chars -> T * chars;
  1.1601 -  (*||*)number >> make_token Number                          : chars -> T * chars;
  1.1602 -(*                                               ----------------------^^^ *)
  1.1603 -\<close> ML \<open>
  1.1604 -val aaa = 
  1.1605 -  Scan.repeat (Scan.unless (Scan.one is_char_eof) (*8b*)
  1.1606 -    ((**)   (tokenize_string (**)-- whitespace(**))      (* reads a first string including , ]  *)
  1.1607 -     (**)|| (keyword "," >> tokenize_string)             (* further element of string list ?" " *)
  1.1608 -     (**)|| ((Scan.max leq_token                         (* *)
  1.1609 -     (**)    (Scan.literal lexicon >> make_token Keyword)(**)
  1.1610 -     (**)    (   long_identifier >> make_token Long_Ident(**)
  1.1611 -     (**)     || identifier >> make_token Ident)         (**)
  1.1612 -     (**)    )  -- whitespace)                           (**)
  1.1613 -    )
  1.1614 -  );
  1.1615 -\<close> ML \<open>
  1.1616 -val chars = Fdl_Lexer.explode_pos "\"aa\",\"bb\"] ##" Position.start;
  1.1617 -\<close> ML \<open>
  1.1618 -aaa chars; (* --------vvvv here we get both as Tokens, list elements AND list delimiters
  1.1619 -   ([(Token (Comment, "aa", {line=1, offset=2}), []),                            (*6a*)
  1.1620 -     (Token (Keyword, ",", {line=1, offset=5}), []), 
  1.1621 -     (Token (Comment, "bb", {line=1, offset=7}), []), 
  1.1622 -     (Token (Keyword, "]", {line=1, offset=10}), 
  1.1623 -     [(" ", {line=1, offset=11})])],
  1.1624 -    [("#", {line=1, offset=12}), ("#", {line=1, offset=13})])*)
  1.1625 -\<close> ML \<open>
  1.1626 -\<close> ML \<open>
  1.1627 -\<close> ML \<open> (* what is this part of scan doing ..*)                    (*8*)
  1.1628 -val yyy = Scan.max leq_token
  1.1629 -             (Scan.literal lexicon >> make_token Keyword)
  1.1630 -             (   long_identifier >> make_token Long_Ident
  1.1631 -              || identifier >> make_token Ident);
  1.1632 -yyy: chars -> T * chars;
  1.1633 -\<close> ML \<open>
  1.1634 -val chars = Fdl_Lexer.explode_pos "For " Position.start;
  1.1635 -(**)yyy chars;(* = (Token (Keyword, "For", {line=1, offset=1}), [(" ", ..*)
  1.1636 -\<close> ML \<open>
  1.1637 -val chars = Fdl_Lexer.explode_pos ", " Position.start;
  1.1638 -(**)yyy chars;(* = (Token (Keyword, ",", {line=1, offset=1}), [(" ", ..*)
  1.1639 -\<close> ML \<open>
  1.1640 -val chars = Fdl_Lexer.explode_pos " , " Position.start;
  1.1641 -(** )yyy chars;( * exception FAIL NONE raised (line 204 of "General/scan.ML") *)
  1.1642 -\<close> ML \<open>
  1.1643 -val chars = Fdl_Lexer.explode_pos "id " Position.start;
  1.1644 -(**)yyy chars;(* = (Token (Ident, "id", {line=1, offset=1}), [(" ", ..*)
  1.1645 -\<close> ML \<open>
  1.1646 -val chars = Fdl_Lexer.explode_pos "spec.id " Position.start;
  1.1647 -(**)yyy chars;(* = (Token (Long_Ident, "spec.id", {line=1, offset=1}), [(" ", ..*)
  1.1648 -\<close> ML \<open> (*..we see: this part of scan handles whitespace perfectly! *)
  1.1649 -\<close> ML \<open>
  1.1650 -\<close>  
  1.1651 -
  1.1652 -subsubsection \<open>play with function types and construct them systematically\<close>
  1.1653 -ML \<open>
  1.1654 -\<close> ML \<open> (* string list is a string || keyword "," || whitespace*)            (*10*)
  1.1655 -tokenize_string                  : chars -> T * chars;
  1.1656 -keyword "," >> make_token Keyword: chars -> T * chars;
  1.1657 -whitespace                       : chars -> chars * chars; (*? ? ? how make this of equal type *)
  1.1658 -\<close> ML \<open>
  1.1659 -val chars = Fdl_Lexer.explode_pos ",  " Position.start;                                     (*11*)
  1.1660 -whitespace chars;
  1.1661 -(* = ([], [(",", {line=1, offset=1}), (" ", {line=1, offset=2}), (" ", {line=1, offset=3})])*)
  1.1662 -\<close> ML \<open>
  1.1663 -val chars = Fdl_Lexer.explode_pos "  ," Position.start;                                     (*11*)
  1.1664 -whitespace chars;
  1.1665 -(* = ([(" ", {line=1, offset=1}), (" ", {line=1, offset=2})], [(",", {line=1, offset=3})])*)
  1.1666 -\<close> ML \<open>
  1.1667 -val chars = Fdl_Lexer.explode_pos " , " Position.start;                                     (*11*)
  1.1668 -whitespace chars;
  1.1669 -(* = ([(" ", {line=1, offset=1})], [(",", {line=1, offset=2}), (" ", {line=1, offset=3})])*)
  1.1670 -\<close> ML \<open>
  1.1671 -\<close> ML \<open>
  1.1672 -\<close> ML \<open> (* investigate (whitespace -- keyword "," -- whitespace) >> make_token Keyword*)
  1.1673 -\<close> ML \<open>
  1.1674 -val xxx = (* this nicely separates whitespace from "," *)                                   (*12*)
  1.1675 -  (whitespace -- keyword "," -- whitespace): chars -> ((chars * chars) * chars) * chars;
  1.1676 -val chars = Fdl_Lexer.explode_pos "   ,   ." Position.start;
  1.1677 -xxx chars;
  1.1678 -(* = (((
  1.1679 -    [(" ", {line=1, offset=1}), (" ", {line=1, offset=2}), (" ", {line=1, offset=3})], 
  1.1680 -\<longrightarrow> [(",", {line=1, offset=4})]), 
  1.1681 -    [(" ", {line=1, offset=5}), (" ", {line=1, offset=6}), (" ", {line=1, offset=7})]),
  1.1682 -    [(".", {line=1, offset=8})])
  1.1683 -^^^^^^^^^^^^^^^^^^^^^^ this nicely separates whitespace from "," ^^^^^^^^^^^^^^^^^^^^^*)
  1.1684 -\<close> ML \<open>
  1.1685 -tokenize_string                                                            : chars -> T * chars;
  1.1686 -(*whitespace*)keyword ","(*whitespace*) >> make_token Keyword              : chars -> T * chars;
  1.1687 -(* we want this type uniformly for ||                                      ^^^^^^^^^^^^^^^^^^^^^ *)
  1.1688 -\<close> ML \<open>
  1.1689 -val eee = (fn (((_, chs1), _), chs2) => (chs1, chs2)) : (('a * 'b) * 'c) * 'd -> 'b * 'd;  (*14*)
  1.1690 -\<close> text \<open> HOW CORRECT THIS...
  1.1691 -(whitespace -- keyword "," -- whitespace) >> (fn (((_, chs1), _), chs2) => (chs1, chs2));
  1.1692 -ERROR Can't unify (string * Position.T) list to 'a * 'b
  1.1693 -\<close> ML \<open>
  1.1694 -val fff = (fn (((_, chars), _), _) => chars)                :       (('a * 'b) * 'c) * 'd -> 'b;(*14*)
  1.1695 -val ggg = (fn _ => (fn (((_, chars), _), _) => chars))      : 'a -> (('b * 'c) * 'd) * 'e -> 'c;
  1.1696 -val hhh = (fn xxx => (fn (((_, chars), _), _) => chars) xxx):       (('a * 'b) * 'c) * 'd -> 'b;
  1.1697 -val iii = (fn ((_, chars), _) => make_token Keyword chars)
  1.1698 -                                                            :      ('a * chars) * 'b -> T
  1.1699 -\<close> ML \<open>
  1.1700 -\<close> ML \<open> (*after above trials we use this pattern for building comma_spaced *)                   (*11a*)
  1.1701 -keyword ","                        : chars -> chars                          * chars;
  1.1702 -make_token Keyword                 :          chars                     -> T        ; 
  1.1703 -keyword "," >> make_token Keyword  : chars ->                              T * chars;
  1.1704 -                                                                        
  1.1705 -tokenize_string                    : chars ->                              T * chars;
  1.1706 -whitespace                         : chars -> chars                          * chars;
  1.1707 -\<close> ML \<open>
  1.1708 -\<close> ML \<open>
  1.1709 -val space_comma_space = xxx;
  1.1710 -val make_token_comma = iii;
  1.1711 -val comma_spaced = space_comma_space >> make_token_comma
  1.1712 -\<close> ML \<open>
  1.1713 -xxx  : chars -> ((chars * chars) * chars) * chars;
  1.1714 -(**)
  1.1715 -space_comma_space                  : chars -> ((chars * chars) * chars)      * chars;
  1.1716 -make_token_comma                   :          ((chars * chars) * chars) -> T        ;
  1.1717 -comma_spaced                       : chars ->                              T * chars; (**)
  1.1718 -\<close> ML \<open>
  1.1719 -val chars = Fdl_Lexer.explode_pos "   ,   ." Position.start;
  1.1720 -comma_spaced chars; (*(Token (Keyword, ",", {line=1, offset=4}), [(".", {line=1, offset=8})])*)
  1.1721 -(*\--------- found right type for arguments of Scan.repeat ---------------------------------/*)
  1.1722 -\<close> ML \<open>
  1.1723 -\<close> ML \<open>
  1.1724 -\<close> ML \<open> (*we use that for a list tokenizer..*) (*13*)
  1.1725 -val string_list = 
  1.1726 -  Scan.repeat (tokenize_string || comma_spaced);
  1.1727 -string_list: chars -> T list * chars
  1.1728 -\<close> ML \<open>
  1.1729 -val chars = Fdl_Lexer.explode_pos form_model_str'' Position.start;
  1.1730 -\<close> ML \<open> (* tokenize_string_list works..*)
  1.1731 -string_list chars; (* =
  1.1732 -   ([Token (Comment, "Traegerlaenge L", {line=1, offset=2})  , Token (Keyword, ",", {line=1, offset=18}), 
  1.1733 -     Token (Comment, "Streckenlast q_0", {line=1, offset=20}), Token (Keyword, ",", {line=1, offset=37}),
  1.1734 -     Token (Comment, "Biegelinie y", {line=1, offset=39})    , Token (Keyword, ",", {line=1, offset=52}), 
  1.1735 -     Token (Comment, "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", {line=1, offset=54}), 
  1.1736 -                                                               Token (Keyword, ",", {line=1, offset=119}), 
  1.1737 -     Token (Comment, "FunktionsVariable x", {line=1, offset=121}), 
  1.1738 -                                                               Token (Keyword, ",", {line=1, offset=141}),
  1.1739 -     Token (Comment, "GleichungsVariablen [c, c_2, c_3, c_4]", {line=1, offset=144}), 
  1.1740 -                                                               Token (Keyword, ",", {line=1, offset=183}), 
  1.1741 -     Token (Comment, "AbleitungBiegelinie dy", {line=1, offset=185})],
  1.1742 -    [("]", {line=1, offset=208})])*)
  1.1743 -\<close> ML \<open>
  1.1744 -\<close> ML \<open>
  1.1745 -\<close> ML \<open> (*but how connect with tokenizing other parts?..*)
  1.1746 -string_list :                          chars ->  T list * chars;
  1.1747 -Scan.repeat1: ('a    -> 'b * 'a   ) -> 'a    -> 'b list * 'a;
  1.1748 -Scan.repeat1: (chars -> 'b * chars) -> chars -> 'b list * chars;
  1.1749 -\<close> ML \<open>
  1.1750 -op @@@ : ('a -> 'b list * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd;
  1.1751 -op ::: : ('a -> 'b * 'c) * ('c -> 'b list * 'd) -> 'a -> 'b list * 'd;
  1.1752 -op --  : ('a -> 'b * 'c) * ('c -> 'd * 'e) -> 'a -> ('b * 'd) * 'e
  1.1753 -\<close> ML \<open>
  1.1754 -\<close> 
  1.1755 -
  1.1756 -subsubsection \<open>adapt Fdl_Lexer.scan to a tokenizer\<close>
  1.1757 -ML \<open>
  1.1758 -\<close> ML \<open> Fdl_Lexer.scan; (*<ctrl><mouse-click> jumps to the code*) (*14*) 
  1.1759 -\<close> ML \<open> (*the first top down trial fails..*)
  1.1760 -val xxx =
  1.1761 -(*fun scan (*header comment*) =
  1.1762 -( *!!! "bad header" header --| whitespace --*)
  1.1763 -  Scan.repeat (Scan.unless (Scan.one is_char_eof)
  1.1764 -    (!!! "bad input"
  1.1765 -       ( (*comment
  1.1766 -        ||*)(keyword "For" -- whitespace1) |--
  1.1767 -              Scan.repeat1 (Scan.unless (keyword ":") any) --|
  1.1768 -              keyword ":" >> make_token Traceability             
  1.1769 -        || Scan.max leq_token
  1.1770 -             (Scan.literal lexicon >> make_token Keyword)
  1.1771 -             (   long_identifier >> make_token Long_Ident
  1.1772 -              || identifier >> make_token Ident)
  1.1773 -        (*|| number >> make_token Number*)) --|
  1.1774 -     whitespace) );
  1.1775 -xxx: chars -> T list * chars;
  1.1776 -\<close> ML \<open> (*.. but we see, that the scanner delivers a token per call..*)
  1.1777 -\<close> ML \<open>
  1.1778 -\<close> ML \<open> (*we want a string like "GleichungsVariablen [c, c_2, c_3, c_4]" in ONE token*)
  1.1779 -val chars = Fdl_Lexer.explode_pos (form_single_str ^ ",") Position.start;
  1.1780 -(** )
  1.1781 -xxx chars;
  1.1782 -( ** )ERROR exception ABORT fn raised (line 109 of "General/scan.ML")( **)
  1.1783 -\<close> ML \<open>
  1.1784 -\<close> ML \<open>
  1.1785 -\<close> ML \<open> (*we have to tokenize [ and ] and , explicitly..*)
  1.1786 -val xxx =
  1.1787 -  keyword "[" -- whitespace --
  1.1788 -  Scan.repeat (tokenize_string || comma_spaced) --
  1.1789 -  keyword "]"
  1.1790 -\<close> ML \<open>
  1.1791 -val chars = Fdl_Lexer.explode_pos (form_model_str' ^ ".") Position.start;
  1.1792 -xxx chars; (* =    AGAIN WE GET AN UNPLEASANT MIXTURE..
  1.1793 -   (((([("[", {line=1, offset=1})], []),
  1.1794 -      [Token (Comment, "Traegerlaenge L", {line=1, offset=3}), Token (Keyword, ",", {line=1, offset=19}), Token (Comment, "Streckenlast q_0", {line=1, offset=21}), Token (Keyword, ",", {line=1, offset=38}),
  1.1795 -       Token (Comment, "Biegelinie y", {line=1, offset=40}), Token (Keyword, ",", {line=1, offset=53}), Token (Comment, "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", {line=1, offset=55}),
  1.1796 -       Token (Keyword, ",", {line=1, offset=120}), Token (Comment, "FunktionsVariable x", {line=1, offset=122}), Token (Keyword, ",", {line=1, offset=142}),
  1.1797 -       Token (Comment, "GleichungsVariablen [c, c_2, c_3, c_4]", {line=1, offset=145}), Token (Keyword, ",", {line=1, offset=184}), Token (Comment, "AbleitungBiegelinie dy", {line=1, offset=186})]),
  1.1798 -     [("]", {line=1, offset=209})]),
  1.1799 -    [(".", {line=1, offset=210})])*)
  1.1800 -\<close> ML \<open>
  1.1801 -\<close> ML \<open>
  1.1802 -\<close> ML \<open> (* IDEA COMBINING (*8*) and (*13*) for (*15*):
  1.1803 -          we do NOT tokenize the whole (string) list, but tokenize_string FIRST *)
  1.1804 -val scan =
  1.1805 -(*fun scan (*header comment*) =
  1.1806 -( *!!! "bad header" header --| whitespace --*)
  1.1807 -  Scan.repeat (Scan.unless (Scan.one is_char_eof)
  1.1808 -    (!!! "bad input"
  1.1809 -       (   tokenize_string                              (*.. THIS MUST BE FIRST *)
  1.1810 -           (*comment
  1.1811 -        ||(keyword "For" -- whitespace1) |--
  1.1812 -              Scan.repeat1 (Scan.unless (keyword ":") any) --|
  1.1813 -              keyword ":" >> make_token Traceability*)
  1.1814 -        ||Scan.max leq_token
  1.1815 -             (Scan.literal lexicon >> make_token Keyword)
  1.1816 -             (   long_identifier >> make_token Long_Ident
  1.1817 -              || identifier >> make_token Ident)
  1.1818 -      (*|| number >> make_token Number*) ) --|
  1.1819 -     whitespace) );
  1.1820 -scan: chars -> T list * chars;
  1.1821 -\<close> ML \<open>
  1.1822 -val chars = Fdl_Lexer.explode_pos (form_single_str ^ ",") Position.start;
  1.1823 -(** )
  1.1824 -xxx chars;
  1.1825 -( ** )ERROR exception ABORT fn raised (line 109 of "General/scan.ML")( **)
  1.1826 -\<close> ML \<open> (*BUT WITH THE WRAPPER IN tokenize THAT WORKS..*)
  1.1827 -(*fun tokenize_formalise pos str =   KEEP IDENTIFIERS CLOSE TO SPARK..*)
  1.1828 -fun scan' pos str =
  1.1829 -(Scan.finite char_stopper
  1.1830 -    (Scan.error scan) (explode_pos str pos));
  1.1831 -\<close> ML \<open>
  1.1832 -val (toks, _) = scan' Position.start formalise_str
  1.1833 -\<close> ML \<open>
  1.1834 -(*it =       ----------------------------------------------------------------- IS PEFECT ! ! ! !
  1.1835 -   ([Token (Keyword, "[", {line=1, offset=1}), 
  1.1836 -       Token (Keyword, "(", {line=1, offset=2}), 
  1.1837 -         Token (Keyword, "[", {line=2, offset=6}), 
  1.1838 -           Token (Comment, "Traegerlaenge L", {line=2, offset=8}), Token (Keyword, ",", {line=2, offset=24}), Token (Comment, "Streckenlast q_0", {line=2, offset=27}), Token (Keyword, ",", {line=2, offset=44}), Token (Comment, "Biegelinie y", {line=2, offset=47}), Token (Keyword, ",", {line=2, offset=60}), Token (Comment, "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", {line=3, offset=67}), Token (Keyword, ",", {line=3, offset=132}), Token (Comment, "FunktionsVariable x", {line=4, offset=139}), Token (Keyword, ",", {line=4, offset=159}), Token (Comment, "GleichungsVariablen [c, c_2, c_3, c_4]", {line=4, offset=162}), Token (Keyword, ",", {line=4, offset=201}), Token (Comment, "AbleitungBiegelinie dy", {line=5, offset=208}), 
  1.1839 -         Token (Keyword, "]", {line=5, offset=231}), 
  1.1840 -       Token (Keyword, ",", {line=5, offset=232}),
  1.1841 -         Token (Keyword, "(", {line=6, offset=236}), 
  1.1842 -           Token (Comment, "Biegelinie", {line=6, offset=238}), 
  1.1843 -         Token (Keyword, ",", {line=6, offset=249}), 
  1.1844 -           Token (Keyword, "[", {line=6, offset=251}),
  1.1845 -             Token (Comment, "Biegelinien", {line=6, offset=253}), 
  1.1846 -           Token (Keyword, "]", {line=6, offset=265}), 
  1.1847 -         Token (Keyword, ",", {line=6, offset=266}), 
  1.1848 -           Token (Keyword, "[", {line=6, offset=268}),
  1.1849 -             Token (Comment, "IntegrierenUndKonstanteBestimmen2", {line=6, offset=270}), 
  1.1850 -           Token (Keyword, "]", {line=6, offset=304}), 
  1.1851 -         Token (Keyword, ")", {line=6, offset=305}), 
  1.1852 -       Token (Keyword, ")", {line=7, offset=307}),
  1.1853 -     Token (Keyword, "]", {line=7, offset=308})],
  1.1854 -    [])
  1.1855 -\-- ^^ chars EXHAUSTED, PEFECT *)
  1.1856 -\<close> ML \<open>
  1.1857 -\<close> ML \<open>
  1.1858 -\<close>
  1.1859 -
  1.1860 -subsubsection \<open>Implement the parser\<close>
  1.1861 -ML \<open>
  1.1862 -\<close> ML \<open> (*the only difference between Parse and Fdl_Parser is Token.T versus Fdl_Lexer.T *)
  1.1863 -Parse.enum1: 
  1.1864 -  string ->                      'a parser                                  -> 'a list parser;
  1.1865 -Parse.enum1: 
  1.1866 -  string -> (Token.T     list -> 'a * Token.T     list) -> Token.T list     -> 'a list * Token.T     list;
  1.1867 -Fdl_Parser.enum1: 
  1.1868 -  string -> (Fdl_Lexer.T list -> 'a * Fdl_Lexer.T list) -> Fdl_Lexer.T list -> 'a list * Fdl_Lexer.T list;
  1.1869 -\<close> ML \<open>
  1.1870 -\<close> ML \<open> (*we start with the initial parentheses..*)
  1.1871 -val xxx =
  1.1872 -  Fdl_Parser.$$$ "[" --
  1.1873 -    Fdl_Parser.$$$ "(" --
  1.1874 -      Fdl_Parser.$$$ "[" |-- 
  1.1875 -        Fdl_Parser.!!! (Fdl_Parser.list1 Fdl_Parser.identifier --| 
  1.1876 -      Fdl_Parser.$$$ "]")
  1.1877 -\<close> ML \<open> (*need string_list_parser instead ^^^^^^^^^^^^^^^^^^^^^*)
  1.1878 -\<close> ML \<open> (*..vvvvvvvvvv is our model..*)
  1.1879 -Fdl_Parser.identifier: T list -> string * T list;
  1.1880 -\<close> ML \<open> (*we have..*)
  1.1881 -tokenize_string      : chars -> T * chars; (*we need:
  1.1882 -parse_string         : T list -> string * T list;    *)
  1.1883 -\<close> ML \<open>
  1.1884 -\<close> ML \<open> (*in analogy to Fdl_Parser ..*)
  1.1885 -val identifier = Fdl_Parser.group "identifier"
  1.1886 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Ident) >>
  1.1887 -     Fdl_Lexer.content_of);
  1.1888 -\<close> ML \<open> (*.. we implement..*)
  1.1889 -val parse_string = Fdl_Parser.group "string"
  1.1890 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Comment) >>
  1.1891 -     Fdl_Lexer.content_of);
  1.1892 -parse_string         : T list -> string * T list;
  1.1893 -\<close> ML \<open> (*we copy the structure from *101* *)
  1.1894 -val xxx =
  1.1895 -  Fdl_Parser.$$$ "[" --
  1.1896 -    Fdl_Parser.$$$ "(" --
  1.1897 -      (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1898 -        Fdl_Parser.list1 parse_string --| 
  1.1899 -      Fdl_Parser.$$$ "]")) --
  1.1900 -    Fdl_Parser.$$$ "," --
  1.1901 -      Fdl_Parser.$$$ "(" --
  1.1902 -        parse_string --                     (* "Biegelinie" *)
  1.1903 -      Fdl_Parser.$$$ "," --
  1.1904 -        (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1905 -          Fdl_Parser.list1 parse_string --| (* ["Biegelinien"] *)
  1.1906 -        Fdl_Parser.$$$ "]")) --
  1.1907 -      Fdl_Parser.$$$ "," --
  1.1908 -        (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1909 -          Fdl_Parser.list1 parse_string --| (* ["IntegrierenUndKonstanteBestimmen2"] *)
  1.1910 -        Fdl_Parser.$$$ "]")) --
  1.1911 -      Fdl_Parser.$$$ ")" --
  1.1912 -    Fdl_Parser.$$$ ")" --
  1.1913 -  Fdl_Parser.$$$ "]"
  1.1914 -\<close> ML \<open> (*.. this compiles correctly, but we modularise into parse_form_model..*)
  1.1915 -val parse_form_model =
  1.1916 -  (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1917 -    Fdl_Parser.list1 parse_string --| 
  1.1918 -  Fdl_Parser.$$$ "]"));
  1.1919 -parse_form_model: T list -> string list * T list;
  1.1920 -\<close> ML \<open>
  1.1921 -type form_model =           string list;
  1.1922 -parse_form_model: T list -> form_model   * T list
  1.1923 -\<close> ML \<open>
  1.1924 -\<close> ML \<open> (*.. and into parse_form_refs..*)
  1.1925 -val parse_form_refs = 
  1.1926 -  Fdl_Parser.$$$ "(" --
  1.1927 -    parse_string --                     (* "Biegelinie" *)
  1.1928 -  Fdl_Parser.$$$ "," --
  1.1929 -    (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1930 -      Fdl_Parser.list1 parse_string --| (* ["Biegelinien"] *)
  1.1931 -    Fdl_Parser.$$$ "]")) --
  1.1932 -  Fdl_Parser.$$$ "," --
  1.1933 -    (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
  1.1934 -      Fdl_Parser.list1 parse_string --| (* ["IntegrierenUndKonstanteBestimmen2"] *)
  1.1935 -    Fdl_Parser.$$$ "]")) --
  1.1936 -  Fdl_Parser.$$$ ")";
  1.1937 -parse_form_refs: T list -> ((((((string * string) * string) * string list) * string) * string list) * string) * T list;
  1.1938 -\<close> ML \<open>
  1.1939 -type form_refs = (((((string * string) * string) * string list) * string) * string list) * string;
  1.1940 -parse_form_refs: T list -> form_refs * T list; (*..how can we use this type --------------vvvvv*)
  1.1941 -\<close> ML \<open>
  1.1942 -\<close> ML \<open> (*we combinge parse_form_model and parse_form_refs..*)
  1.1943 -(*val parse_formalise =   KEEP IDENTIFIERS CLOSE TO SPARK..*)
  1.1944 -val vcs =
  1.1945 -  Fdl_Parser.!!! (
  1.1946 -    Fdl_Parser.$$$ "[" --
  1.1947 -      Fdl_Parser.$$$ "(" --
  1.1948 -        parse_form_model --
  1.1949 -      Fdl_Parser.$$$ "," --
  1.1950 -        parse_form_refs --
  1.1951 -      Fdl_Parser.$$$ ")" --
  1.1952 -    Fdl_Parser.$$$ "]");
  1.1953 -(*                                      vvvvvvvvvv*)
  1.1954 -vcs: T list -> ((((((string * string) * form_model) * string) * (*------------refs..*)
  1.1955 -  ((((((string * string) * string) * string list) * string) * string list) * string)) * string) * string) * T list;
  1.1956 -\<close> ML \<open>
  1.1957 -\<close> ML \<open> (*we use code from the parser..*)
  1.1958 -val (parsed', []) = (*TODO *##* *)
  1.1959 -Scan.finite Fdl_Lexer.stopper (Scan.error (Fdl_Parser.!!! vcs)) toks;
  1.1960 -(*see SPARK 99, but ..^^^^^^^ we don't need that ---------vvv   vvvv*)
  1.1961 -\<close> ML \<open>
  1.1962 -val (parsed', []) =                                       vcs   toks;
  1.1963 -\<close> ML \<open> (*..however, we adopt   vvvvvvvvvvvvvvvvvvvvvvvvvv error handling*)
  1.1964 -val (parsed', []) =           (Scan.error (Fdl_Parser.!!! vcs)) toks;
  1.1965 -\<close> ML \<open>
  1.1966 -case parsed' of
  1.1967 - ((((( (
  1.1968 -  "[", 
  1.1969 -    "("),
  1.1970 -      [ "Traegerlaenge L", "Streckenlast q_0", "Biegelinie y", 
  1.1971 -        "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x", 
  1.1972 -        "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"
  1.1973 -      ]),
  1.1974 -    ","), ((((((
  1.1975 -      "(", 
  1.1976 -        "Biegelinie"), 
  1.1977 -      ","), 
  1.1978 -        ["Biegelinien"]), 
  1.1979 -      ","), 
  1.1980 -        ["IntegrierenUndKonstanteBestimmen2"]), 
  1.1981 -      ")")),
  1.1982 -    ")"),
  1.1983 -  "]") => ()
  1.1984 -| _ => error "parse_formalise CHANGED";
  1.1985 -\<close> ML \<open>
  1.1986 -\<close> ML \<open> (*we read out Formalisation.T from the parser's result..*)
  1.1987 -fun read_out_formalise ((((( (
  1.1988 -  "[", 
  1.1989 -    "("),
  1.1990 -      form_model: TermC.as_string list),
  1.1991 -    ","), ((((((
  1.1992 -      "(", 
  1.1993 -        thy_id: ThyC.id), 
  1.1994 -      ","), 
  1.1995 -        probl_id: Problem.id), 
  1.1996 -      ","), 
  1.1997 -        meth_id: Method.id), 
  1.1998 -      ")")),
  1.1999 -    ")"),
  1.2000 -  "]") = [(form_model, (thy_id, probl_id, meth_id))]
  1.2001 -| read_out_formalise _ =
  1.2002 -  raise ERROR "read_out_formalise: WRONGLY parsed";
  1.2003 -read_out_formalise:
  1.2004 -  (((((string * string) * TermC.as_string list) * string) * 
  1.2005 -      ((((((string * ThyC.id) * string) * Problem.id) * string) * Method.id) * string)) * string) * string ->
  1.2006 -    (TermC.as_string list * (ThyC.id * Problem.id * Method.id)) list;
  1.2007 -\<close> ML \<open>
  1.2008 -type formalise = (((((string * string) * TermC.as_string list) * string) *
  1.2009 -  ((((((string * ThyC.id) * string) * Problem.id) * string) * Method.id) * string)) * string) * string;
  1.2010 -read_out_formalise: formalise -> Formalise.T list
  1.2011 -\<close> ML \<open>
  1.2012 -if (read_out_formalise parsed' : Formalise.T list ) =
  1.2013 -  [(["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y",
  1.2014 -     "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x", 
  1.2015 -     "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"],
  1.2016 -   ("Biegelinie", ["Biegelinien"], ["IntegrierenUndKonstanteBestimmen2"]))]
  1.2017 -then () else error "read_out_formalise CHANGED";
  1.2018 -\<close> ML \<open>
  1.2019 -\<close>
  1.2020 +section \<open>Adapt Sledgehammer to write Problem ("Biegelinie", ["Biegelinien"]) to thy\<close>
  1.2021  
  1.2022  end