wenzelm@34173
|
1 |
/* Title: Pure/Isar/outer_syntax.scala
|
wenzelm@34173
|
2 |
Author: Makarius
|
wenzelm@34173
|
3 |
|
wenzelm@34173
|
4 |
Isabelle/Isar outer syntax.
|
wenzelm@34173
|
5 |
*/
|
wenzelm@34173
|
6 |
|
wenzelm@34173
|
7 |
package isabelle
|
wenzelm@34173
|
8 |
|
wenzelm@34173
|
9 |
|
wenzelm@34173
|
10 |
import scala.util.parsing.input.{Reader, CharSequenceReader}
|
wenzelm@44285
|
11 |
import scala.collection.mutable
|
wenzelm@34173
|
12 |
|
wenzelm@34173
|
13 |
|
wenzelm@34173
|
14 |
class Outer_Syntax(symbols: Symbol.Interpretation)
|
wenzelm@34173
|
15 |
{
|
wenzelm@36947
|
16 |
protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
|
wenzelm@34173
|
17 |
protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
|
wenzelm@34173
|
18 |
lazy val completion: Completion = new Completion + symbols // FIXME !?
|
wenzelm@34173
|
19 |
|
wenzelm@38789
|
20 |
def keyword_kind(name: String): Option[String] = keywords.get(name)
|
wenzelm@38789
|
21 |
|
wenzelm@40779
|
22 |
def + (name: String, kind: String, replace: String): Outer_Syntax =
|
wenzelm@34173
|
23 |
{
|
wenzelm@34173
|
24 |
val new_keywords = keywords + (name -> kind)
|
wenzelm@34173
|
25 |
val new_lexicon = lexicon + name
|
wenzelm@40779
|
26 |
val new_completion = completion + (name, replace)
|
wenzelm@34173
|
27 |
new Outer_Syntax(symbols) {
|
wenzelm@34173
|
28 |
override val lexicon = new_lexicon
|
wenzelm@34173
|
29 |
override val keywords = new_keywords
|
wenzelm@34173
|
30 |
override lazy val completion = new_completion
|
wenzelm@34173
|
31 |
}
|
wenzelm@34173
|
32 |
}
|
wenzelm@34173
|
33 |
|
wenzelm@40779
|
34 |
def + (name: String, kind: String): Outer_Syntax = this + (name, kind, name)
|
wenzelm@40779
|
35 |
|
wenzelm@36947
|
36 |
def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
|
wenzelm@34173
|
37 |
|
wenzelm@34173
|
38 |
def is_command(name: String): Boolean =
|
wenzelm@40715
|
39 |
keyword_kind(name) match {
|
wenzelm@36947
|
40 |
case Some(kind) => kind != Keyword.MINOR
|
wenzelm@34173
|
41 |
case None => false
|
wenzelm@34173
|
42 |
}
|
wenzelm@34173
|
43 |
|
wenzelm@40711
|
44 |
def heading_level(name: String): Option[Int] =
|
wenzelm@40711
|
45 |
name match {
|
wenzelm@40715
|
46 |
// FIXME avoid hard-wired info!?
|
wenzelm@40711
|
47 |
case "header" => Some(1)
|
wenzelm@40711
|
48 |
case "chapter" => Some(2)
|
wenzelm@40711
|
49 |
case "section" | "sect" => Some(3)
|
wenzelm@40711
|
50 |
case "subsection" | "subsect" => Some(4)
|
wenzelm@40711
|
51 |
case "subsubsection" | "subsubsect" => Some(5)
|
wenzelm@40715
|
52 |
case _ =>
|
wenzelm@40715
|
53 |
keyword_kind(name) match {
|
wenzelm@40715
|
54 |
case Some(kind) if Keyword.theory(kind) => Some(6)
|
wenzelm@40715
|
55 |
case _ => None
|
wenzelm@40715
|
56 |
}
|
wenzelm@40711
|
57 |
}
|
wenzelm@40711
|
58 |
|
wenzelm@40711
|
59 |
def heading_level(command: Command): Option[Int] =
|
wenzelm@40711
|
60 |
heading_level(command.name)
|
wenzelm@40711
|
61 |
|
wenzelm@34173
|
62 |
|
wenzelm@34173
|
63 |
/* tokenize */
|
wenzelm@34173
|
64 |
|
wenzelm@36966
|
65 |
def scan(input: Reader[Char]): List[Token] =
|
wenzelm@34173
|
66 |
{
|
wenzelm@34173
|
67 |
import lexicon._
|
wenzelm@34173
|
68 |
|
wenzelm@34173
|
69 |
parseAll(rep(token(symbols, is_command)), input) match {
|
wenzelm@34173
|
70 |
case Success(tokens, _) => tokens
|
wenzelm@34269
|
71 |
case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
|
wenzelm@34173
|
72 |
}
|
wenzelm@34173
|
73 |
}
|
wenzelm@34173
|
74 |
|
wenzelm@36966
|
75 |
def scan(input: CharSequence): List[Token] =
|
wenzelm@34173
|
76 |
scan(new CharSequenceReader(input))
|
wenzelm@44285
|
77 |
|
wenzelm@44285
|
78 |
def scan_context(input: CharSequence, context: Scan.Context): (List[Token], Scan.Context) =
|
wenzelm@44285
|
79 |
{
|
wenzelm@44285
|
80 |
import lexicon._
|
wenzelm@44285
|
81 |
|
wenzelm@44285
|
82 |
var in: Reader[Char] = new CharSequenceReader(input)
|
wenzelm@44285
|
83 |
val toks = new mutable.ListBuffer[Token]
|
wenzelm@44285
|
84 |
var ctxt = context
|
wenzelm@44285
|
85 |
while (!in.atEnd) {
|
wenzelm@44285
|
86 |
parse(token_context(symbols, is_command, ctxt), in) match {
|
wenzelm@44285
|
87 |
case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
|
wenzelm@44285
|
88 |
case NoSuccess(_, rest) =>
|
wenzelm@44285
|
89 |
error("Unexpected failure of tokenizing input:\n" + rest.source.toString)
|
wenzelm@44285
|
90 |
}
|
wenzelm@44285
|
91 |
}
|
wenzelm@44285
|
92 |
(toks.toList, ctxt)
|
wenzelm@44285
|
93 |
}
|
wenzelm@34173
|
94 |
}
|