src/Pure/Isar/outer_syntax.scala
author wenzelm
Tue, 05 Jan 2010 15:44:06 +0100
changeset 34269 b5025782a4ed
parent 34173 446a33b874b3
child 36947 285b39022372
permissions -rw-r--r--
tuned message;
     1 /*  Title:      Pure/Isar/outer_syntax.scala
     2     Author:     Makarius
     3 
     4 Isabelle/Isar outer syntax.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.util.parsing.input.{Reader, CharSequenceReader}
    11 
    12 
    13 class Outer_Syntax(symbols: Symbol.Interpretation)
    14 {
    15   protected val keywords: Map[String, String] = Map((";" -> Outer_Keyword.DIAG))
    16   protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
    17   lazy val completion: Completion = new Completion + symbols  // FIXME !?
    18 
    19   def + (name: String, kind: String): Outer_Syntax =
    20   {
    21     val new_keywords = keywords + (name -> kind)
    22     val new_lexicon = lexicon + name
    23     val new_completion = completion + name
    24     new Outer_Syntax(symbols) {
    25       override val lexicon = new_lexicon
    26       override val keywords = new_keywords
    27       override lazy val completion = new_completion
    28     }
    29   }
    30 
    31   def + (name: String): Outer_Syntax = this + (name, Outer_Keyword.MINOR)
    32 
    33   def is_command(name: String): Boolean =
    34     keywords.get(name) match {
    35       case Some(kind) => kind != Outer_Keyword.MINOR
    36       case None => false
    37     }
    38 
    39 
    40   /* tokenize */
    41 
    42   def scan(input: Reader[Char]): List[Outer_Lex.Token] =
    43   {
    44     import lexicon._
    45 
    46     parseAll(rep(token(symbols, is_command)), input) match {
    47       case Success(tokens, _) => tokens
    48       case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    49     }
    50   }
    51 
    52   def scan(input: CharSequence): List[Outer_Lex.Token] =
    53     scan(new CharSequenceReader(input))
    54 }