1.1 --- a/src/HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy Sat Oct 24 12:31:22 2020 +0200
1.2 +++ b/src/HOL/SPARK/Examples/Gcd/Greatest_Common_Divisor.thy Mon Oct 26 13:52:26 2020 +0100
1.3 @@ -11,7 +11,7 @@
1.4 gcd = "gcd :: int \<Rightarrow> int \<Rightarrow> int"
1.5
1.6 spark_open \<open>greatest_common_divisor/g_c_d\<close> (*..from 3 files
1.7 - ./greatest_common_divisor/g_c_d.siv, *.fdl, *.rls
1.8 + ./greatest_common_divisor/g_c_d.siv, *.fdl, *.rls open *.siv and
1.9 find the following 2 open verification conditions (VC): *)
1.10
1.11 spark_vc procedure_g_c_d_4 (*..select 1st VC for proof: *)
2.1 --- a/src/HOL/SPARK/Tools/spark_commands.ML Sat Oct 24 12:31:22 2020 +0200
2.2 +++ b/src/HOL/SPARK/Tools/spark_commands.ML Mon Oct 26 13:52:26 2020 +0100
2.3 @@ -5,16 +5,22 @@
2.4 Isar commands for handling SPARK/Ada verification conditions.
2.5 *)
2.6
2.7 -structure SPARK_Commands: sig end =
2.8 +structure SPARK_Commands(** ): sig end( *make funs visible*) =
2.9 struct
2.10
2.11 fun spark_open header (files, prfx) thy =
2.12 let
2.13 +(**)val _ = writeln "/--- spark_open";(**)
2.14 +(**)val _ = @{print} {files = files, header = header, prfx = prfx};(**)
2.15 val ([{src_path, lines = vc_lines, pos = vc_pos, ...}: Token.file,
2.16 {lines = fdl_lines, pos = fdl_pos, ...},
2.17 {lines = rls_lines, pos = rls_pos, ...}], thy') = files thy;
2.18 val path = fst (Path.split_ext src_path);
2.19 in
2.20 +(**)@{print} {lin1 = (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines))),
2.21 + lin2 = (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines)),
2.22 + lin3 = (snd (Fdl_Parser.parse_vcs header vc_pos (cat_lines vc_lines)))};(**)
2.23 +(**)writeln "\\--- spark_open";(**)
2.24 SPARK_VCs.set_vcs
2.25 (snd (Fdl_Parser.parse_declarations fdl_pos (cat_lines fdl_lines)))
2.26 (Fdl_Parser.parse_rules rls_pos (cat_lines rls_lines))
2.27 @@ -100,8 +106,11 @@
2.28 val _ =
2.29 Outer_Syntax.command \<^command_keyword>\<open>spark_open\<close>
2.30 "open a new SPARK environment and load a SPARK-generated .siv file"
2.31 - (Resources.provide_parse_files "spark_open" -- Parse.parname
2.32 - >> (Toplevel.theory o spark_open Fdl_Lexer.siv_header));
2.33 + ((writeln "### start provide_parse_files -- parname";
2.34 + Resources.provide_parse_files "spark_open" -- Parse.parname)
2.35 + >> (writeln "### after >>";
2.36 + Toplevel.theory o (writeln "### start spark_open";
2.37 + spark_open Fdl_Lexer.siv_header)));
2.38
2.39 val pfun_type = Scan.option
2.40 (Args.parens (Parse.list1 Parse.name) --| Args.colon -- Parse.name);
3.1 --- a/src/HOL/SPARK/Tools/spark_vcs.ML Sat Oct 24 12:31:22 2020 +0200
3.2 +++ b/src/HOL/SPARK/Tools/spark_vcs.ML Mon Oct 26 13:52:26 2020 +0100
3.3 @@ -1064,6 +1064,48 @@
3.4 (remove (op =) d defs) (d :: sdefs)
3.5 | NONE => error ("Bad definitions: " ^ rulenames defs));
3.6
3.7 +(* output from ..
3.8 +/--- spark_open
3.9 +{files = fn, header = fn, prfx = ""} (line 14 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML")
3.10 +{lin1 = ===== g_c_d.fdl =====
3.11 + {consts =
3.12 + ({("integer__last", "integer"), ("integer__size", "integer"), ("natural__last", "integer"), ("natural__size", "integer"), ("integer__first", "integer"), ("natural__first", "integer"),
3.13 + ("integer__base__last", "integer"), ("natural__base__last", "integer"), ("integer__base__first", "integer"), ("natural__base__first", "integer")},
3.14 + [("integer__size", "integer"), ("integer__last", "integer"), ("integer__first", "integer"), ("natural__size", "integer"), ("natural__last", "integer"), ("natural__first", "integer"),
3.15 + ("integer__base__last", "integer"), ("integer__base__first", "integer"), ("natural__base__last", "integer"), ("natural__base__first", "integer")]),
3.16 + funs = ({("gcd", (["integer", "integer"], "integer")), ("round__", (["real"], "integer"))}, [("gcd", (["integer", "integer"], "integer")), ("round__", (["real"], "integer"))]), types = ({}, []), vars =
3.17 + ({("c", "integer"), ("d", "integer"), ("m", "integer"), ("n", "integer")}, [("d", "integer"), ("c", "integer"), ("n", "integer"), ("m", "integer")])},
3.18 + lin2 = ===== g_c_d.rls =====
3.19 + ([(("g_c_d_rules", 1), Inference_Rule ([], Funct (">=", [Ident "integer__size", Number 0]))), (("g_c_d_rules", 2), Substitution_Rule ([], Ident "integer__first", Funct ("-", [Number 2147483648]))),
3.20 + (("g_c_d_rules", 3), Substitution_Rule ([], Ident "integer__last", Number 2147483647)), (("g_c_d_rules", 4), Substitution_Rule ([], Ident "integer__base__first", Funct ("-", [Number 2147483648]))),
3.21 + (("g_c_d_rules", 5), Substitution_Rule ([], Ident "integer__base__last", Number 2147483647)), (("g_c_d_rules", 6), Inference_Rule ([], Funct (">=", [Ident "natural__size", Number 0]))),
3.22 + (("g_c_d_rules", 7), Substitution_Rule ([], Ident "natural__first", Number 0)), (("g_c_d_rules", 8), Substitution_Rule ([], Ident "natural__last", Number 2147483647)),
3.23 + (("g_c_d_rules", 9), Substitution_Rule ([], Ident "natural__base__first", Funct ("-", [Number 2147483648]))), (("g_c_d_rules", 10), Substitution_Rule ([], Ident "natural__base__last", Number 2147483647))],
3.24 + [("g_c_d_rules", [(Ident "X", [("X", "any")]), (Funct ("<=", [Ident "X", Ident "Y"]), [("X", "ire"), ("Y", "ire")]), (Funct (">=", [Ident "X", Ident "Y"]), [("X", "ire"), ("Y", "ire")])])]),
3.25 + lin3 = ===== g_c_d.siv =====
3.26 + (("procedure", "Greatest_Common_Divisor.G_C_D"),
3.27 + [("path(s) from start to run-time check associated with statement of line 8", [("procedure_g_c_d_1", ([], [("1", Ident "true")]))]),
3.28 + ("path(s) from start to run-time check associated with statement of line 8", [("procedure_g_c_d_2", ([], [("1", Ident "true")]))]),
3.29 + ("path(s) from start to assertion of line 10", [("procedure_g_c_d_3", ([], [("1", Ident "true")]))]),
3.30 + ("path(s) from assertion of line 10 to assertion of line 10",
3.31 + [("procedure_g_c_d_4",
3.32 + ([("1", Funct (">=", [Ident "c", Number 0])), ("2", Funct (">", [Ident "d", Number 0])), ("3", Funct ("=", [Funct ("gcd", [Ident "c", Ident "d"]), Funct ("gcd", [Ident "m", Ident "n"])])),
3.33 + ("4", Funct (">=", [Ident "m", Number 0])), ("5", Funct ("<=", [Ident "m", Number 2147483647])), ("6", Funct ("<=", [Ident "n", Number 2147483647])), ("7", Funct (">", [Ident "n", Number 0])),
3.34 + ("8", Funct ("<=", [Ident "c", Number 2147483647])), ("9", Funct ("<=", [Ident "...", ...])), ("10", Funct ("...", [...])), ("11", Funct ("...", [...])), ("...", ...), ...],
3.35 + [("1", Funct (">", [Funct ("-", [Ident "c", Funct ("*", [Funct ("div", [Ident "c", Ident "..."]), Ident "d"])]), Number 0])),
3.36 + ("2", Funct ("=", [Funct ("gcd", [Ident "d", Funct ("-", [Ident "c", Funct ("...", [...])])]), Funct ("gcd", [Ident "m", Ident "n"])]))]))]),
3.37 + ("path(s) from assertion of line 10 to run-time check associated with \n statement of line 11", [("procedure_g_c_d_5", ([], [("1", Ident "true")]))]),
3.38 + ("path(s) from assertion of line 10 to run-time check associated with \n statement of line 12", [("procedure_g_c_d_6", ([], [("1", Ident "true")]))]),
3.39 + ("path(s) from assertion of line 10 to run-time check associated with \n statement of line 12", [("procedure_g_c_d_7", ([], [("1", Ident "true")]))]),
3.40 + ("path(s) from start to run-time check associated with statement of line 14", [("procedure_g_c_d_8", ([], [("1", Ident "true")]))]),
3.41 + ("path(s) from assertion of line 10 to run-time check associated with \n statement of line 14", [("procedure_g_c_d_9", ([], [("1", Ident "true")]))]),
3.42 + ("path(s) from start to finish", [("procedure_g_c_d_10", ([], [("1", Ident "true")]))]),
3.43 + ("path(s) from assertion of line 10 to finish",
3.44 + [("procedure_g_c_d_11",
3.45 + ([("1", Funct (">=", [Ident "c", Number 0])), ("2", Funct (">", [Ident "...", ...])), ("3", Funct ("...", [...])), ("4", Funct ("...", [...])), ("...", ...), ...],
3.46 + [("1", Funct ("=", [Ident "d", Funct ("...", [...])]))]))])])} (line 20 of "/usr/local/isabisac/src/HOL/SPARK/Tools/spark_commands.ML")
3.47 +\--- spark_open
3.48 +*)
3.49 fun set_vcs ({types, vars, consts, funs} : decls)
3.50 (rules, _) ((_, ident), vcs) path opt_prfx thy =
3.51 let
4.1 --- a/src/Tools/isac/BridgeJEdit/BridgeJEdit.thy Sat Oct 24 12:31:22 2020 +0200
4.2 +++ b/src/Tools/isac/BridgeJEdit/BridgeJEdit.thy Mon Oct 26 13:52:26 2020 +0100
4.3 @@ -6,7 +6,7 @@
4.4 *)
4.5
4.6 theory BridgeJEdit
4.7 - imports Isac
4.8 + imports Calculation
4.9 begin
4.10
4.11 end
5.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
5.2 +++ b/src/Tools/isac/BridgeJEdit/Calculation.thy Mon Oct 26 13:52:26 2020 +0100
5.3 @@ -0,0 +1,79 @@
5.4 +(* Title: src/Tools/isac/BridgeJEdit/parseC.sml
5.5 + Author: Walther Neuper, JKU Linz
5.6 + (c) due to copyright terms
5.7 +
5.8 +Outer syntax for Isabelle/Isac. Compare src/Pure/Pure.thy
5.9 +*)
5.10 +
5.11 +theory Calculation
5.12 + imports "~~/src/Tools/isac/MathEngine/MathEngine" (** )"HOL-SPARK.SPARK"( *prelim.for devel.*)
5.13 +keywords
5.14 + "Example" :: thy_load ("str") and (* "spark_vc" :: thy_goal *)
5.15 + "Problem" and (* root-problem + recursively in Solution *)
5.16 + "Specification" "Model" "References" "Solution" and (* structure only *)
5.17 + "Given" "Find" "Relate" "Where" and (* await input of term *)
5.18 + "RTheory" (* await input of string; "R" distingues "Problem".."RProblem" *)
5.19 + "RProblem" "RMethod" and (* await input of string list *)
5.20 + "Tactic" (* optional for each step 0..end of calculation *)
5.21 +begin
5.22 +
5.23 +ML_file parseC.sml
5.24 +
5.25 +section \<open>\<close>
5.26 +ML \<open>
5.27 +\<close> ML \<open>
5.28 +ParseC.problem (* not yet used *)
5.29 +\<close> ML \<open>
5.30 +
5.31 +(*** get data from a file and initialise a calculation ***)
5.32 +
5.33 +(** data from file (~~ spark_open) **)
5.34 +
5.35 +\<close> ML \<open>
5.36 +\<close> ML \<open> (*spark_commands.ML: Outer_Syntax.command \<^command_keyword>\<open>spark_open_vcg\<close>*)
5.37 +\<close> ML \<open> (*1st arg.of >>*)
5.38 +Resources.provide_parse_files "Example" -- Parse.parname:
5.39 + Token.T list -> ((theory -> Token.file list * theory) * string) * Token.T list;
5.40 +Resources.provide_parse_files "Example" -- Parse.parname:
5.41 + ((theory -> Token.file list * theory) * string) parser
5.42 +\<close> ML \<open>
5.43 +\<close> ML \<open> (*2nd arg.of >>*)
5.44 +type chars = (string * Position.T) list;
5.45 +val e_chars = []: (string * Position.T) list;
5.46 +
5.47 +type banner = string * string * string; val e_banner = ("b1", "b2", "b3")
5.48 +type date = string * string * string; val e_date = ("d1", "d2", "d3")
5.49 +type time = string * string * string * string option; val e_time = ("t1", "t2", "t3", SOME "t4")
5.50 +
5.51 +fun vcg_header (_: chars) = ((e_banner, SOME (e_date, e_time)), e_chars);
5.52 +(*^^^^^^^^^^^^*)
5.53 +vcg_header: chars -> (banner * (date * time) option) * chars
5.54 +\<close> ML \<open>
5.55 +fun spark_open (_: (*Fdl_Lexer.*)chars -> 'a * (*Fdl_Lexer.*)chars)
5.56 +(*^^^^^^^^^^^^*)
5.57 + (_: ('b -> Token.file list * theory) * string) (_: 'b) = @{theory};
5.58 +spark_open: (chars -> 'a * chars) -> ('b -> Token.file list * theory) * string -> 'b -> theory
5.59 +\<close> ML \<open>
5.60 +val exp_file = "~~/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.str"
5.61 +\<close> ML \<open>
5.62 +\<close> ML \<open>
5.63 +\<close> ML \<open>
5.64 +\<close> ML \<open>
5.65 +\<close> ML \<open>
5.66 +\<close>
5.67 +
5.68 +section \<open>setup command_keyword \<close>
5.69 +(** ISAC **)
5.70 +ML \<open>
5.71 +\<close> ML \<open>
5.72 +val _ =
5.73 + Outer_Syntax.command \<^command_keyword>\<open>Example\<close> "start example from file"
5.74 + (*preliminary from spark_open*)
5.75 + (Resources.provide_parse_files "spark_open" -- Parse.parname (*1st arg of >>*)
5.76 + >> (Toplevel.theory o spark_open (*Fdl_Lexer.*)vcg_header)); (*2nd arg of >>*)
5.77 +\<close> ML \<open>
5.78 +\<close>
5.79 +
5.80 +section \<open>tests, before -> test/*\<close>
5.81 +
5.82 +end
5.83 \ No newline at end of file
6.1 --- a/src/Tools/isac/BridgeJEdit/Isac.thy Sat Oct 24 12:31:22 2020 +0200
6.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
6.3 @@ -1,39 +0,0 @@
6.4 -(* Title: src/Tools/isac/BridgeJEdit/parseC.sml
6.5 - Author: Walther Neuper, JKU Linz
6.6 - (c) due to copyright terms
6.7 -
6.8 -Outer syntax for Isabelle/Isac. Compare src/Pure/Pure.thy
6.9 -*)
6.10 -
6.11 -theory Isac
6.12 - imports "~~/src/Tools/isac/MathEngine/MathEngine"
6.13 - keywords
6.14 - (* this has a type and thus is a "major keyword" *)
6.15 - "ISAC" :: diag
6.16 - (* the following are without type: "minor keywords" *)
6.17 - and "Problem" (* root-problem + recursively in Solution *)
6.18 - and "Specification" "Model" "References" "Solution" (* structure only *)
6.19 - and "Given" "Find" "Relate" "Where" (* await input of term *)
6.20 - and "RTheory" (* await input of string; "R" distingues "Problem".."RProblem" *)
6.21 - "RProblem" "RMethod" (* await input of string list *)
6.22 - and "Tactic" (* optionally repeated with each step 0..end of calculation *)
6.23 -begin
6.24 - ML_file parseC.sml
6.25 -
6.26 -ML \<open>
6.27 -\<close> ML \<open>
6.28 -ParseC.problem (* not yet used *)
6.29 -\<close> ML \<open>
6.30 -\<close>
6.31 -
6.32 -(** setup command_keyword ISAC **)
6.33 -ML \<open>
6.34 -\<close> ML \<open> (*try to adapt original from Makarius to problem parser*)
6.35 -val _ =
6.36 - Outer_Syntax.command @{command_keyword ISAC} "embedded ISAC language"
6.37 - (Parse.input ((**)Parse.cartouche (** )--(**) ParseC.problem( **)) >> (fn input =>
6.38 - Toplevel.keep (fn _ => ignore (ML_Lex.read_source input))))
6.39 -\<close> ML \<open>
6.40 -\<close>
6.41 -
6.42 -end
6.43 \ No newline at end of file
7.1 --- a/src/Tools/isac/Examples/README Sat Oct 24 12:31:22 2020 +0200
7.2 +++ b/src/Tools/isac/Examples/README Mon Oct 26 13:52:26 2020 +0100
7.3 @@ -5,7 +5,11 @@
7.4 *.html copied from ~/proto4/kbase/exp
7.5
7.6 exp_Statics_Biegel_Timischl_7-70.html generated from *.xml in Isac-Java
7.7 -! exp_Statics_Biegel_Timischl_7-70.sml Formalise.T list to start calculation,
7.8 +! exp_Statics_Biegel_Timischl_7-70.str Formalise.T list to start calculation,
7.9 see ~~/test/Tools/isac/Knowledge/biegelinie-4.sml
7.10 exp_Statics_Biegel_Timischl_7-70.xml generates *.html for browsing
7.11 + CONTAINS DATA FOR *.html AND *.sml
7.12 README this
7.13 +
7.14 +for cp:
7.15 +"~~/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.str"
7.16 \ No newline at end of file
8.1 --- a/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.sml Sat Oct 24 12:31:22 2020 +0200
8.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
8.3 @@ -1,9 +0,0 @@
8.4 -[
8.5 - (
8.6 - ["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y",
8.7 - "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]",
8.8 - "FunktionsVariable x", "GleichungsVariablen [c, c_2, c_3, c_4]",
8.9 - "AbleitungBiegelinie dy"],
8.10 - ("Biegelinie", ["Biegelinien"], ["IntegrierenUndKonstanteBestimmen2"])
8.11 - )
8.12 -]
8.13 \ No newline at end of file
9.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
9.2 +++ b/src/Tools/isac/Examples/exp_Statics_Biegel_Timischl_7-70.str Mon Oct 26 13:52:26 2020 +0100
9.3 @@ -0,0 +1,9 @@
9.4 +[
9.5 + (
9.6 + ["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y",
9.7 + "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]",
9.8 + "FunktionsVariable x", "GleichungsVariablen [c, c_2, c_3, c_4]",
9.9 + "AbleitungBiegelinie dy"],
9.10 + ("Biegelinie", ["Biegelinien"], ["IntegrierenUndKonstanteBestimmen2"])
9.11 + )
9.12 +]
9.13 \ No newline at end of file
10.1 --- a/test/Pure/Isar/Check_Outer_Syntax.thy Sat Oct 24 12:31:22 2020 +0200
10.2 +++ b/test/Pure/Isar/Check_Outer_Syntax.thy Mon Oct 26 13:52:26 2020 +0100
10.3 @@ -1,12 +1,13 @@
10.4 theory Check_Outer_Syntax
10.5 - imports Test_Parse_Isac
10.6 + imports HOL.HOL
10.7 keywords
10.8 (** )"ISAC" :: diag and ( *imported by Test_Parse_Isac*)
10.9 "forthel" :: thy_decl and (*Naproche*)
10.10 "spark_status" :: diag and (*HOL/SPARK*)
10.11 "spark_vc" :: thy_goal and (*HOL/SPARK*)
10.12 - "spark_open_vcg" :: thy_load (*HOL/SPARK*)
10.13 + "spark_open" :: thy_load (*HOL/SPARK*)
10.14 begin
10.15 +ML_file "~~/src/Tools/isac/BridgeJEdit/parseC.sml"
10.16
10.17 section \<open>Survey on checked examples\<close>
10.18 text \<open>
10.19 @@ -26,10 +27,10 @@
10.20 uses Isabelle's standard proof machinery,
10.21 so goes directly into ???
10.22 * ?
10.23 - * \<open>Outer_Syntax.command..spark_open_vcg :: thy_load\<close>
10.24 + * \<open>Outer_Syntax.command..spark_open :: thy_load\<close>
10.25 opens a "SPARK environment",
10.26 which requires verification conditions,
10.27 - * uses vcg_header parser
10.28 + * uses siv_header parser
10.29
10.30 Legend:
10.31 * headline of subsection in this file for respective example
10.32 @@ -44,7 +45,7 @@
10.33 Outer_Syntax.command @ {command_keyword ISAC} "embedded ISAC language"
10.34 /============================================================ 3rd arg of Outer_Syntax.command*)
10.35 (
10.36 - (Parse.input ((**)Parse.cartouche (**) -- (** ) ParseC.( **)problem (**)))
10.37 + (Parse.input ((**)Parse.cartouche (**) -- (**) ParseC.problem (**)))
10.38 >> (* try variants ^^^^^^^^^^^^^^^ ^^ ^^^^^^^*)
10.39 (fn input => Toplevel.keep (fn _ => ignore (ML_Lex.read_source input)))
10.40 )
10.41 @@ -64,14 +65,14 @@
10.42 : string parser;
10.43 ((**) Parse.cartouche (** )-- (** ) ParseC.( **)problem ( **))
10.44 : Token.T list -> string * Token.T list;
10.45 -((** )Parse.cartouche (**) -- (**) ParseC.( **)problem (**))
10.46 - : Token.T list -> problem * Token.T list;
10.47 -((**) Parse.cartouche (**) -- (** ) ParseC.( **)problem (**))
10.48 - : Token.T list -> cartouche_problem * Token.T list;
10.49 +\<close> ML \<open>
10.50 +((** )Parse.cartouche (**) -- ( **) ParseC.problem (**))
10.51 + : Token.T list -> ParseC.problem * Token.T list;
10.52 +\<close> ML \<open>
10.53
10.54 \<close> ML \<open> (*================= 1st arg of >> instantiates types*)
10.55 val xxx =
10.56 -(Parse.input ((**)Parse.cartouche (**) -- (** ) ParseC.( **)problem (**)))
10.57 +(Parse.input ((**)Parse.cartouche (**) -- (**) ParseC.problem (**)))
10.58 : Input.source parser;
10.59 xxx: Token.T list -> Input.source * Token.T list;
10.60 \<close> ML \<open>
10.61 @@ -326,30 +327,36 @@
10.62 \<close> ML \<open>
10.63 \<close>
10.64
10.65 -subsection \<open>Outer_Syntax.command..spark_open_vcg :: thy_load\<close>
10.66 +subsection \<open>Outer_Syntax.command..spark_open :: thy_load\<close>
10.67 ML \<open>
10.68 \<close> ML \<open> (*===== dummy cp.from SPARK/Tools/spark_commands.ML for Outer_Syntax.command..spark_open_vcg*)
10.69 type chars = (string * Position.T) list;
10.70 +val e_chars = []: (string * Position.T) list;
10.71 +
10.72 fun spark_open (_: ((*Fdl_Lexer.*)chars -> 'a * (*Fdl_Lexer.*)chars))
10.73 (_: ('b -> Token.file list * theory) * string) (_: 'b) = @{theory}
10.74 \<close> ML \<open>
10.75 spark_open: ((*Fdl_Lexer.*)chars -> 'a * (*Fdl_Lexer.*)chars) -> ('b -> Token.file list * theory) * string -> 'b -> theory
10.76 \<close> ML \<open>
10.77 \<close> ML \<open> (*===== dummy cp.from SPARK/Tools/fdl_lexer.ML for Outer_Syntax.command..spark_open_vcg*)
10.78 -fun vcg_header (_: (string * Position.T) list) =
10.79 - ((("", "", ""), NONE: ((string * string * string) * (string * string * string * string option)) option), []: (string * Position.T) list)
10.80 +type banner = string * string * string; val e_banner = ("b1", "b2", "b3")
10.81 +type date = string * string * string; val e_date = ("d1", "d2", "d3")
10.82 +type time = string * string * string * string option; val e_time = ("t1", "t2", "t3", SOME "t4")
10.83 +
10.84 +fun siv_header (_: chars) =
10.85 + ((e_banner, (e_date, e_time), (e_date, e_time), ("", "")), e_chars);
10.86 \<close> ML \<open>
10.87 -vcg_header: (string * Position.T) list ->
10.88 - ((string * string * string) * ((string * string * string) * (string * string * string * string option)) option) * (string * Position.T) list
10.89 +siv_header: chars ->
10.90 + (banner * (date * time) * (date * time) * (string * string)) * chars
10.91 \<close> ML \<open>
10.92 val _ =
10.93 - Outer_Syntax.command \<^command_keyword>\<open>spark_open_vcg\<close>
10.94 + Outer_Syntax.command \<^command_keyword>\<open>spark_open\<close>
10.95 "open a new SPARK environment and load a SPARK-generated .vcg file"
10.96 (* /============================================================ 3rd arg of Outer_Syntax.command*)
10.97 (
10.98 - Resources.provide_parse_files "spark_open_vcg" -- Parse.parname
10.99 + Resources.provide_parse_files "spark_open" -- Parse.parname
10.100 >>
10.101 - (Toplevel.theory o spark_open (*Fdl_Lexer.*)vcg_header)
10.102 + (Toplevel.theory o spark_open (*Fdl_Lexer.*)siv_header)
10.103 );
10.104 \<close> ML \<open> (*decompose 3rd arg of Outer_Syntax.command with respect to >>*)
10.105 (* 1st arg 2nd arg result
10.106 @@ -357,12 +364,12 @@
10.107 op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c;
10.108
10.109 \<close> ML \<open> (*================= 1st arg of >> instantiates types..*)
10.110 -val xxx = Resources.provide_parse_files "spark_open_vcg" -- Parse.parname
10.111 +val xxx = Resources.provide_parse_files "spark_open" -- Parse.parname
10.112 : ((theory -> Token.file list * theory) * string) parser;
10.113 xxx: Token.T list -> ((theory -> Token.file list * theory) * string) * Token.T list;
10.114 (* ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ *)
10.115 \<close> ML \<open>
10.116 -Resources.provide_parse_files "spark_open_vcg":
10.117 +Resources.provide_parse_files "spark_open":
10.118 (theory -> Token.file list * theory) parser;
10.119 (* ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ *)
10.120 Parse.parname: ( string) parser;
10.121 @@ -380,7 +387,7 @@
10.122 'd * Token.T list (*result*);
10.123
10.124 \<close> ML \<open> (*================= 2nd arg of >> instantiates types..*)
10.125 -(Toplevel.theory o spark_open (*Fdl_Lexer.*)vcg_header)
10.126 +(Toplevel.theory o spark_open (*Fdl_Lexer.*)siv_header)
10.127 : (theory -> Token.file list * theory) * string -> (Toplevel.transition -> Toplevel.transition)
10.128 (*^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *)
10.129 \<close> ML \<open>
10.130 @@ -397,6 +404,41 @@
10.131 (Toplevel.transition -> Toplevel.transition) parser (*result*);
10.132 (* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *)
10.133 \<close> ML \<open>
10.134 +\<close> ML \<open> (*compose 2nd arg*)
10.135 +spark_open: (chars -> 'a * chars) ->
10.136 + ('b -> Token.file list * theory) * string -> 'b -> theory
10.137 +\<close> ML \<open>
10.138 +siv_header: chars -> (banner * (date * time) * (date * time) * (string * string)) * chars
10.139 +(* ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^*)
10.140 +\<close> ML \<open>
10.141 +spark_open: (chars -> (*'a siv_header \<Rightarrow>*)(banner * (date * time) * (date * time) * (string * string)) * chars) ->
10.142 + ('b -> Token.file list * theory) * string -> 'b -> theory
10.143 +\<close> ML \<open>
10.144 +Toplevel.theory: (theory -> theory) -> Toplevel.transition -> Toplevel.transition
10.145 +(* ~~~~~~*)
10.146 +\<close> ML \<open>
10.147 +spark_open: (chars -> (*'a siv_header \<Rightarrow>*)(banner * (date * time) * (date * time) * (string * string)) * chars) ->
10.148 + ((*'b Toplevel.theory \<Rightarrow>*)theory -> Token.file list * theory) * string -> (*'b*)theory -> theory
10.149 +\<close> ML \<open>
10.150 +spark_open siv_header(*: (_a -> Token.file list * theory) * string -> _a -> theory*);
10.151 +spark_open siv_header: (theory -> Token.file list * theory) * string -> theory -> theory
10.152 +(* ~~~~~~~ *)
10.153 +\<close> ML \<open>
10.154 +\<close> ML \<open> (*appl. in 2nd arg*)
10.155 +(Toplevel.theory o spark_open (*Fdl_Lexer.*)siv_header):
10.156 + (theory -> Token.file list * theory) * string -> Toplevel.transition -> Toplevel.transition
10.157 +(* ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ ..appl. spark_open siv_header
10.158 + -> theory -> theory*)
10.159 +\<close> ML \<open>
10.160 +\<close> ML \<open>
10.161 +\<close> ML \<open>
10.162 +\<close> ML \<open>
10.163 +\<close> ML \<open>
10.164 +\<close> ML \<open>
10.165 +\<close> ML \<open>
10.166 +\<close> ML \<open>
10.167 +\<close> ML \<open> (*how the parser's arg "spark_open" strolls through the signatures..*)
10.168 +\<close> ML \<open>
10.169 \<close>
10.170
10.171 section \<open>Experiments towards Outer_Syntax.command ISAC :: diag\<close>
10.172 @@ -449,43 +491,6 @@
10.173 \<close> ML \<open>
10.174 \<close>
10.175
10.176 -subsection \<open>How are "\<open>" "\<close>" handled in Naproche .. Parse.embedded_position INSIGHT AT END\<close>
10.177 -text \<open>
10.178 - see also
10.179 - subsubsection \<open>decompose Outer_Syntax.local_theory..Naproche\<close>
10.180 - see /usr/local/Isabelle_Naproche-20190611/contrib/naproche-20190418/Isabelle/Test_WN.thy
10.181 - conclusion:
10.182 -********************************************************************************
10.183 -* check_forthel just sends to server, so tokens DO NOT COME FROM some ML code *
10.184 -********************************************************************************
10.185 -\<close> ML \<open>
10.186 -\<close> ML \<open> (* compare Parse.cartouche .. Parse.embedded (latter found in Isabelle_Naproche)*)
10.187 -Parse.cartouche (tokenize ("\<open>" ^ problem_headline_str ^ "\<close>"))
10.188 -(* = ("Problem ( \"Biegelinie\" , [\"Biegelinien\"] )", [])*)
10.189 -\<close> ML \<open>
10.190 -Parse.embedded (tokenize ("\<open>" ^ problem_headline_str ^ "\<close>"))
10.191 -(* = ("Problem ( \"Biegelinie\" , [\"Biegelinien\"] )", [])*)
10.192 -\<close> ML \<open>
10.193 -parse problem_headline (tokenize problem_headline_str );
10.194 -\<close> text \<open> (* but all these create
10.195 -exception MORE () raised (line 159 of "General/scan.ML")*)..
10.196 -
10.197 -parse problem_headline (tokenize ("\<open>" ^ problem_headline_str ^ "\<close>"));
10.198 -parse (Parse.embedded -- problem) (tokenize ("\<open>" ^ problem_headline_str ^ "\<close>"));
10.199 -
10.200 -\<close> text \<open> (*as well a Parse.cartouche by Makarius*)..
10.201 -parse (Parse.cartouche -- problem) (tokenize ("\<open>" ^ problem_headline_str ^ "\<close>"));
10.202 -\<close> ML \<open>
10.203 -\<close> ML \<open> (*this drops "\<open>" "\<close>", found in Naproche/naproche.ML*)
10.204 -Parse.embedded_input: Input.source parser
10.205 -\<close> ML \<open> (*BUT Outer_Syntax.command REQUIRES ..*)
10.206 -Parse.cartouche: string parser
10.207 -\<close> ML \<open> (* ..AND THERE IS NO OPORTUNITY FOR "problem parser"*)
10.208 -(*requires: "overwrite above "fun problem" for subsequent checks"..*)
10.209 -problem(**): problem parser
10.210 -\<close> ML \<open>
10.211 -\<close>
10.212 -
10.213 subsection \<open>transfer handling "\<open>" "\<close>" from Naproche to ISAC ?\<close>
10.214 text \<open>
10.215 this combines
10.216 @@ -520,27 +525,6 @@
10.217 ML_file ""
10.218 ( **)
10.219
10.220 -subsection \<open>transfer parse from command..spark_open_vcg to ISAC ?\<close>
10.221 -text \<open>
10.222 - we want:
10.223 - (
10.224 - parse problem
10.225 - >> (*from command ISAC + spark_status*)
10.226 - (fn input => Toplevel.keep (fn _ => ignore (/--ML_Lex.read_source--/ input)))
10.227 - )
10.228 -\<close> text \<open>
10.229 -op >>
10.230 -: ( problem parser) (*1st arg*)*
10.231 -(* ^^^^^^^ *)
10.232 - (problem -> (Toplevel.transition -> Toplevel.transition)) (*2nd arg*)->
10.233 -(* ^^^^^^^ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*)
10.234 - (Toplevel.transition -> Toplevel.transition) parser (*result*);
10.235 -(* ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ *)
10.236 -\<close> ML \<open>
10.237 -\<close> ML \<open>
10.238 -\<close>
10.239 -
10.240 -
10.241 section \<open>xxx\<close>
10.242 subsection \<open>xxx\<close>
10.243 ML \<open>
11.1 --- a/test/Pure/Isar/README Sat Oct 24 12:31:22 2020 +0200
11.2 +++ b/test/Pure/Isar/README Mon Oct 26 13:52:26 2020 +0100
11.3 @@ -11,7 +11,7 @@
11.4 Test_Parse_Isac.thy definitions for keywords
11.5 ^^^^^^^^^^^^^^^^^^^| Tools and Goals
11.6 | Stepwise extending parser: Problem headline .. Problem
11.7 - +----> part.cp --> ~~/src/Tools/isac/BridgeJEdit/parseC.sml
11.8 + +----> cp --> ~~/src/Tools/isac/BridgeJEdit/parseC.sml
11.9 Test_Parsers.thy scanner combinators from src/Pure/General/scan.ML
11.10 parse compound suffixes: Lists, terms, -- Scan.optional, ...
11.11 Test_Parse_Term.thy parse following Stefan Berghofer at Isabelle Developer Workshop 2010
12.1 --- a/test/Tools/isac/Test_Isac.thy Sat Oct 24 12:31:22 2020 +0200
12.2 +++ b/test/Tools/isac/Test_Isac.thy Mon Oct 26 13:52:26 2020 +0100
12.3 @@ -77,10 +77,10 @@
12.4 "~~/test/Pure/Isar/Keyword_ISAC.thy" (* Malformed theory import, "keywords" ?!? *)
12.5 "~~/test/Pure/Isar/Test_Parse_Isac.thy" (* Malformed theory import, "keywords" ?!? *)
12.6 "~~/test/Pure/Isar/Test_Parsers_Cookbook.thy" (* Malformed theory import ?!? *)
12.7 - "~~/test/Pure/Isar/Theory_Commands" (*Duplicate outer syntax command "ISAC" *)
12.8 + "~~/test/Pure/Isar/Theory_Commands" (* Duplicate outer syntax command "ISAC" *)
12.9 + "~~/test/Pure/Isar/Downto_Synchronized" (* re-defines / breaks structures !!! *)
12.10 \--- .. these work independently, but create problems here *)
12.11 (**)"~~/test/Pure/Isar/Check_Outer_Syntax"
12.12 -(** )"~~/test/Pure/Isar/Downto_Synchorinzed"( *TODO: errors to resolve*)
12.13 (**)"~~/test/Pure/Isar/Test_Parsers"
12.14 (**)"~~/test/Pure/Isar/Test_Parse_Term"
12.15 (*/---------------------- do Minisubpbl before ADDTESTS/All_Ctxt ------------------------------\*)
13.1 --- a/test/Tools/isac/Test_Isac_Short.thy Sat Oct 24 12:31:22 2020 +0200
13.2 +++ b/test/Tools/isac/Test_Isac_Short.thy Mon Oct 26 13:52:26 2020 +0100
13.3 @@ -77,10 +77,10 @@
13.4 "~~/test/Pure/Isar/Keyword_ISAC.thy" (* Malformed theory import, "keywords" ?!? *)
13.5 "~~/test/Pure/Isar/Test_Parse_Isac.thy" (* Malformed theory import, "keywords" ?!? *)
13.6 "~~/test/Pure/Isar/Test_Parsers_Cookbook.thy" (* Malformed theory import ?!? *)
13.7 - "~~/test/Pure/Isar/Theory_Commands" (*Duplicate outer syntax command "ISAC" *)
13.8 + "~~/test/Pure/Isar/Theory_Commands" (* Duplicate outer syntax command "ISAC" *)
13.9 + "~~/test/Pure/Isar/Downto_Synchronized" (* re-defines / breaks structures !!! *)
13.10 \--- .. these work independently, but create problems here *)
13.11 (**)"~~/test/Pure/Isar/Check_Outer_Syntax"
13.12 -(** )"~~/test/Pure/Isar/Downto_Synchorinzed"( *TODO: errors to resolve*)
13.13 (**)"~~/test/Pure/Isar/Test_Parsers"
13.14 (**)"~~/test/Pure/Isar/Test_Parse_Term"
13.15 (*/---------------------- do Minisubpbl before ADDTESTS/All_Ctxt ------------------------------\*)