src/Tools/isac/BridgeJEdit/Calculation.thy
author Walther Neuper <walther.neuper@jku.at>
Wed, 04 Nov 2020 09:59:30 +0100
changeset 60097 0aa54181d7c9
parent 60096 c161c3af5b8d
child 60099 2b9b4b38ab96
permissions -rw-r--r--
separate code for Example from spark_open, resolve name clash

notes
(1) imports HOL-SPARK.SPARK requires attention when switiching Build_Isac .. Test_Isac,
see Calculation.thy
(2) Isabelle's Specification was covered by SpecificationC
     1 (*  Title:      src/Tools/isac/BridgeJEdit/Calculation.thy
     2     Author:     Walther Neuper, JKU Linz
     3     (c) due to copyright terms
     4 
     5 Outer syntax for Isabelle/Isac. Compare src/Pure/Pure.thy
     6 *)
     7 
     8 theory Calculation
     9   imports "~~/src/Tools/isac/MathEngine/MathEngine" (** )"HOL-SPARK.SPARK"( **HIDE for Test_Isac*)
    10 keywords
    11   "Example" :: thy_load ("str") and (* "spark_vc" :: thy_goal *)
    12   "Problem" and (* root-problem + recursively in Solution *)
    13   "Specification" "Model" "References" "Solution" and (* structure only *)
    14   "Given" "Find" "Relate" "Where" and (* await input of term *)
    15   "RTheory" (* await input of string; "R" distingues "Problem".."RProblem" *)
    16   "RProblem" "RMethod" and (* await input of string list *)
    17   "Tactic" (* optional for each step 0..end of calculation *)
    18 begin
    19 
    20 ML_file parseC.sml
    21 
    22 section \<open>derive Outer_Syntax.command..Example from ..spark_open\<close>
    23 subsection \<open>trials 1\<close>
    24 ML \<open>
    25 val form_single_str = (
    26   "(                                                                             " ^
    27   "  [\"Traegerlaenge L\", \"Streckenlast q_0\", \"Biegelinie y\",               " ^
    28 	"    \"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",     " ^
    29 	"    \"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",      " ^
    30   "    \"AbleitungBiegelinie dy\"],                                              " ^
    31   "  (\"Biegelinie\", [\"Biegelinien\"], [\"IntegrierenUndKonstanteBestimmen2\"])" ^
    32   ")")
    33 \<close> ML \<open>
    34 \<close> text \<open> (*/------------------------ def. Fdl_Lexer.$$$ ----------------------------------\*)
    35 val form_single =
    36   Fdl_Lexer.$$$ "(" --
    37     Fdl_Lexer.$$$ "[" (* |-- Parse.list1 Parse.string --| Parse.$$$ "]") (* Formalise.model *) --*)
    38 \<close> text \<open>
    39 form_single: (string * Position.T) list ->
    40   ((string * Position.T) list * (string * Position.T) list) * (string * Position.T) list
    41 \<close> text \<open> (*\------------------------ def. Fdl_Lexer.$$$ ----------------------------------*)
    42 \<close> text \<open>
    43 form_single [("(", Position.none), ("[", Position.none)](* = (([("(", {})], [("[", {})]), [])*)
    44 (*          ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^   ...........................OK: ^^ *)
    45 \<close> text \<open>
    46                       (* |-- Parse.list1 Parse.string --| Parse.$$$ "]") (* Formalise.model *) --*)
    47   ..would need to understand Fdl_Lexer + Fdl_Parser ?!?
    48 \<close> ML \<open>
    49 \<close> ML \<open> (*/------------------------ Resources.parse_files -------------------------------\*)
    50 Resources.provide_parse_files "spark_open"
    51 \<close> ML \<open>
    52 Resources.parse_files "spark_open":                 (theory -> Token.file list) parser;
    53 Resources.parse_files "spark_open": Token.T list -> (theory -> Token.file list) * Token.T list;
    54 \<close> ML \<open>
    55 val toks = ParseC.tokenize form_single_str;
    56 \<close> text \<open>
    57              (Resources.parse_files "Example") toks;
    58 ParseC.parse (Resources.parse_files "Example") toks;
    59 (*exception FAIL (SOME fn) raised (line 152 of "General/scan.ML")*)
    60 \<close> ML \<open> (*\------------------------ Resources.parse_files -------------------------------/*)
    61 \<close> ML \<open>
    62 \<close> ML \<open>
    63 \<close> ML \<open> (*/------------------------ Parse.path ------------------------------------------\*)
    64 Parse.path: string parser;
    65 \<close> ML \<open>
    66 val toks = ParseC.tokenize
    67   "/usr/local/isabisac/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d.siv"
    68 \<close> text \<open>
    69 (Scan.ahead Parse.not_eof -- Parse.path) toks
    70 (*exception FAIL (SOME fn) raised (line 152 of "General/scan.ML")*)
    71 \<close> ML \<open> (*\------------------------ Parse.path ------------------------------------------/*)
    72 \<close> ML \<open>
    73 \<close> 
    74 
    75 subsection \<open>code for spark_open\<close>
    76 text \<open>
    77   We minimally change code for Example, until all works.
    78 
    79   The code is separated from Calculation.thy into files cp_spark_vcs.ML and cp_spark_commands.ML, 
    80   because text\<open>...\<close> and (*...*) for const_name>\<open>...\<close> etc and @ {thms ...} causes errors.
    81 
    82   Thus for Test_Isac (**)ML_file "cp_spark_vcs.ML", ""(*cp_spark_commands.ML*) must be outcommented.
    83 \<close>
    84 (*HIDE for Test_Isac** )
    85 ML_file "cp_spark_vcs.ML"
    86 
    87 ML_file "cp_spark_commands.ML"
    88 ( **HIDE for Test_Isac*)
    89 
    90 subsubsection \<open>cp. HOL/SPARK/Tools/spark_vcs.ML\<close>
    91  ML \<open>
    92 \<close> ML \<open>
    93 (*/--------------------------------- cp_spark_vcs.ML -----------------------------------------\*)
    94 (*\--------------------------------- cp_spark_vcs.ML -----------------------------------------/*)
    95 \<close> ML \<open>
    96 \<close>
    97 
    98 subsubsection \<open>cp. HOL/SPARK/Tools/spark_commands.ML\<close>
    99 ML \<open>
   100 \<close> ML \<open>
   101 (*/--------------------------------- cp_spark_commands.ML ------------------------------------\*)
   102 (*\--------------------------------- cp_spark_commands.ML ------------------------------------/*)
   103 \<close> ML \<open>
   104 \<close>
   105 
   106 section \<open>setup command_keyword \<close>
   107 subsection \<open>hide for development, use for Test_Isac\<close>
   108 ML \<open>
   109 type chars = (string * Position.T) list;
   110 val e_chars = []: (string * Position.T) list;
   111 
   112 fun spark_open (_: ((*Fdl_Lexer.*)chars -> 'a * (*Fdl_Lexer.*)chars)) 
   113     (_: ('b -> Token.file list * theory) * string) (_: 'b) = @{theory}
   114 
   115 type banner = string * string * string;               val e_banner = ("b1", "b2", "b3")
   116 type date = string * string * string;                 val e_date = ("d1", "d2", "d3")
   117 type time = string * string * string * string option; val e_time = ("t1", "t2", "t3", SOME "t4")
   118 
   119 fun siv_header (_: chars) = 
   120   ((e_banner, (e_date, e_time), (e_date, e_time), ("", "")), e_chars);
   121 \<close> ML \<open>
   122 \<close> 
   123 subsection \<open>def.for Build_Isac; for Test_Isac outcomment SPARK_Commands, Fdl_Lexer below\<close>
   124 ML \<open>
   125 \<close> ML \<open>
   126 val _ =                                                  
   127   Outer_Syntax.command \<^command_keyword>\<open>Example\<close> "start Isac Calculation from a Formalise-file"
   128     (*preliminary from spark_open*)
   129     (Resources.provide_parse_files "spark_open" -- Parse.parname
   130 (** )  >> (Toplevel.theory o ((*SPARK_Commands.*)spark_open Fdl_Lexer.siv_header)));( **)
   131 (**)  >> (Toplevel.theory o ((*SPARK_Commands.*)spark_open (*Fdl_Lexer.*)siv_header)));(**)
   132 \<close> ML \<open>
   133 \<close>
   134 
   135 section \<open>Test: compare Example .. spark_open\<close>
   136 (** )
   137 Example \<open>/usr/local/isabisac/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70\<close> ( *.str*)
   138 (*Output + ERROR:
   139 {a = "//--- ##1# spark_open", files = fn, header = fn, prfx = ""} (line 14 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML") 
   140 {a = "##1# spark_open: before call <files thy>, files = siv_header !"} (line 18 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML") 
   141 {a = "### provide_parse_files", fs =
   142  [{digest = "b34ac56c90f483e9cdd0f14dc45d5f703e40aa26", lines =
   143    ["[", "  (", "    [\"Traegerlaenge L\", \"Streckenlast q_0\", \"Biegelinie y\",", "\t    \"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",",
   144     "\t    \"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",", "      \"AbleitungBiegelinie dy\"],", "    (\"Biegelinie\", [\"Biegelinien\"], [\"IntegrierenUndKonstanteBestimmen2\"])", "  )",
   145     "]"],
   146    pos = {line=1, offset=1, file=~~/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.str, id=-1818}, src_path =
   147    "/usr/local/isabisac/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.str"}]} (line 241 of "PIDE/resources.ML") 
   148 exception Bind raised (line 15 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML")*)
   149 
   150 
   151 (*set theory imports "HOL-SPARK.SPARK"..* )
   152 spark_open \<open>/usr/local/isabisac/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d\<close>
   153 ( *Output + ERROR:
   154 {a = "//--- ##1# spark_open", files = fn, header = fn, prfx = ""} (line 14 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML") 
   155 {a = "##1# spark_open: before call \"files thy\", files = siv_header"} (line 18 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML") 
   156 No such file: "/usr/local/isabisac/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d.str"\<^here>
   157 *)
   158 end