simplified Symbol based on lazy Symbol.Interpretation -- reduced odd "functorial style";
tuned implicit build/init messages;
1 /* Title: Pure/Isar/outer_syntax.scala
4 Isabelle/Isar outer syntax.
10 import scala.util.parsing.input.{Reader, CharSequenceReader}
11 import scala.collection.mutable
16 protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
17 protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
18 lazy val completion: Completion = (new Completion).add_symbols // FIXME odd initialization
20 def keyword_kind(name: String): Option[String] = keywords.get(name)
22 def + (name: String, kind: String, replace: String): Outer_Syntax =
24 val new_keywords = keywords + (name -> kind)
25 val new_lexicon = lexicon + name
26 val new_completion = completion + (name, replace)
28 override val lexicon = new_lexicon
29 override val keywords = new_keywords
30 override lazy val completion = new_completion
34 def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
36 def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
38 def is_command(name: String): Boolean =
39 keyword_kind(name) match {
40 case Some(kind) => kind != Keyword.MINOR
44 def heading_level(name: String): Option[Int] =
46 // FIXME avoid hard-wired info!?
47 case "header" => Some(1)
48 case "chapter" => Some(2)
49 case "section" | "sect" => Some(3)
50 case "subsection" | "subsect" => Some(4)
51 case "subsubsection" | "subsubsect" => Some(5)
53 keyword_kind(name) match {
54 case Some(kind) if Keyword.theory(kind) => Some(6)
59 def heading_level(command: Command): Option[Int] =
60 heading_level(command.name)
65 def scan(input: Reader[Char]): List[Token] =
69 parseAll(rep(token(is_command)), input) match {
70 case Success(tokens, _) => tokens
71 case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
75 def scan(input: CharSequence): List[Token] =
76 scan(new CharSequenceReader(input))
78 def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
82 var in: Reader[Char] = new CharSequenceReader(input)
83 val toks = new mutable.ListBuffer[Token]
86 parse(token_context(is_command, ctxt), in) match {
87 case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
88 case NoSuccess(_, rest) =>
89 error("Unexpected failure of tokenizing input:\n" + rest.source.toString)