src/Pure/Isar/outer_syntax.scala
author wenzelm
Tue, 17 Aug 2010 22:57:11 +0200
changeset 38789 0924654b8163
parent 36966 21be4832c362
child 40711 2516ea25a54b
permissions -rw-r--r--
report command token name instead of kind, which can be retrieved later via Outer_Syntax.keyword_kind;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 
    12 
    13 class Outer_Syntax(symbols: Symbol.Interpretation)
    14 {
    15   protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
    16   protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
    17   lazy val completion: Completion = new Completion + symbols  // FIXME !?
    18 
    19   def keyword_kind(name: String): Option[String] = keywords.get(name)
    20 
    21   def + (name: String, kind: String): Outer_Syntax =
    22   {
    23     val new_keywords = keywords + (name -> kind)
    24     val new_lexicon = lexicon + name
    25     val new_completion = completion + name
    26     new Outer_Syntax(symbols) {
    27       override val lexicon = new_lexicon
    28       override val keywords = new_keywords
    29       override lazy val completion = new_completion
    30     }
    31   }
    32 
    33   def + (name: String): Outer_Syntax = this + (name, Keyword.MINOR)
    34 
    35   def is_command(name: String): Boolean =
    36     keywords.get(name) match {
    37       case Some(kind) => kind != Keyword.MINOR
    38       case None => false
    39     }
    40 
    41 
    42   /* tokenize */
    43 
    44   def scan(input: Reader[Char]): List[Token] =
    45   {
    46     import lexicon._
    47 
    48     parseAll(rep(token(symbols, is_command)), input) match {
    49       case Success(tokens, _) => tokens
    50       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    51     }
    52   }
    53 
    54   def scan(input: CharSequence): List[Token] =
    55     scan(new CharSequenceReader(input))
    56 }