author | wenzelm |
Tue, 05 Jan 2010 15:44:06 +0100 | |
changeset 34269 | b5025782a4ed |
parent 34173 | 446a33b874b3 |
child 36947 | 285b39022372 |
permissions | -rw-r--r-- |
wenzelm@34173 | 1 |
/* Title: Pure/Isar/outer_syntax.scala |
wenzelm@34173 | 2 |
Author: Makarius |
wenzelm@34173 | 3 |
|
wenzelm@34173 | 4 |
Isabelle/Isar outer syntax. |
wenzelm@34173 | 5 |
*/ |
wenzelm@34173 | 6 |
|
wenzelm@34173 | 7 |
package isabelle |
wenzelm@34173 | 8 |
|
wenzelm@34173 | 9 |
|
wenzelm@34173 | 10 |
import scala.util.parsing.input.{Reader, CharSequenceReader} |
wenzelm@34173 | 11 |
|
wenzelm@34173 | 12 |
|
wenzelm@34173 | 13 |
class Outer_Syntax(symbols: Symbol.Interpretation) |
wenzelm@34173 | 14 |
{ |
wenzelm@34173 | 15 |
protected val keywords: Map[String, String] = Map((";" -> Outer_Keyword.DIAG)) |
wenzelm@34173 | 16 |
protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty |
wenzelm@34173 | 17 |
lazy val completion: Completion = new Completion + symbols // FIXME !? |
wenzelm@34173 | 18 |
|
wenzelm@34173 | 19 |
def + (name: String, kind: String): Outer_Syntax = |
wenzelm@34173 | 20 |
{ |
wenzelm@34173 | 21 |
val new_keywords = keywords + (name -> kind) |
wenzelm@34173 | 22 |
val new_lexicon = lexicon + name |
wenzelm@34173 | 23 |
val new_completion = completion + name |
wenzelm@34173 | 24 |
new Outer_Syntax(symbols) { |
wenzelm@34173 | 25 |
override val lexicon = new_lexicon |
wenzelm@34173 | 26 |
override val keywords = new_keywords |
wenzelm@34173 | 27 |
override lazy val completion = new_completion |
wenzelm@34173 | 28 |
} |
wenzelm@34173 | 29 |
} |
wenzelm@34173 | 30 |
|
wenzelm@34173 | 31 |
def + (name: String): Outer_Syntax = this + (name, Outer_Keyword.MINOR) |
wenzelm@34173 | 32 |
|
wenzelm@34173 | 33 |
def is_command(name: String): Boolean = |
wenzelm@34173 | 34 |
keywords.get(name) match { |
wenzelm@34173 | 35 |
case Some(kind) => kind != Outer_Keyword.MINOR |
wenzelm@34173 | 36 |
case None => false |
wenzelm@34173 | 37 |
} |
wenzelm@34173 | 38 |
|
wenzelm@34173 | 39 |
|
wenzelm@34173 | 40 |
/* tokenize */ |
wenzelm@34173 | 41 |
|
wenzelm@34173 | 42 |
def scan(input: Reader[Char]): List[Outer_Lex.Token] = |
wenzelm@34173 | 43 |
{ |
wenzelm@34173 | 44 |
import lexicon._ |
wenzelm@34173 | 45 |
|
wenzelm@34173 | 46 |
parseAll(rep(token(symbols, is_command)), input) match { |
wenzelm@34173 | 47 |
case Success(tokens, _) => tokens |
wenzelm@34269 | 48 |
case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) |
wenzelm@34173 | 49 |
} |
wenzelm@34173 | 50 |
} |
wenzelm@34173 | 51 |
|
wenzelm@34173 | 52 |
def scan(input: CharSequence): List[Outer_Lex.Token] = |
wenzelm@34173 | 53 |
scan(new CharSequenceReader(input)) |
wenzelm@34173 | 54 |
} |