src/Pure/Isar/outer_syntax.scala
changeset 44569 5130dfe1b7be
parent 44330 4b4b93672f15
child 44648 6dfdb70496fe
     1.1 --- a/src/Pure/Isar/outer_syntax.scala	Wed Jul 06 23:11:59 2011 +0200
     1.2 +++ b/src/Pure/Isar/outer_syntax.scala	Thu Jul 07 13:48:30 2011 +0200
     1.3 @@ -11,11 +11,11 @@
     1.4  import scala.collection.mutable
     1.5  
     1.6  
     1.7 -class Outer_Syntax(symbols: Symbol.Interpretation)
     1.8 +class Outer_Syntax
     1.9  {
    1.10    protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG))
    1.11    protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty
    1.12 -  lazy val completion: Completion = new Completion + symbols // FIXME odd initialization
    1.13 +  lazy val completion: Completion = (new Completion).add_symbols // FIXME odd initialization
    1.14  
    1.15    def keyword_kind(name: String): Option[String] = keywords.get(name)
    1.16  
    1.17 @@ -24,7 +24,7 @@
    1.18      val new_keywords = keywords + (name -> kind)
    1.19      val new_lexicon = lexicon + name
    1.20      val new_completion = completion + (name, replace)
    1.21 -    new Outer_Syntax(symbols) {
    1.22 +    new Outer_Syntax {
    1.23        override val lexicon = new_lexicon
    1.24        override val keywords = new_keywords
    1.25        override lazy val completion = new_completion
    1.26 @@ -66,7 +66,7 @@
    1.27    {
    1.28      import lexicon._
    1.29  
    1.30 -    parseAll(rep(token(symbols, is_command)), input) match {
    1.31 +    parseAll(rep(token(is_command)), input) match {
    1.32        case Success(tokens, _) => tokens
    1.33        case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString)
    1.34      }
    1.35 @@ -83,7 +83,7 @@
    1.36      val toks = new mutable.ListBuffer[Token]
    1.37      var ctxt = context
    1.38      while (!in.atEnd) {
    1.39 -      parse(token_context(symbols, is_command, ctxt), in) match {
    1.40 +      parse(token_context(is_command, ctxt), in) match {
    1.41          case Success((x, c), rest) => { toks += x; ctxt = c; in = rest }
    1.42          case NoSuccess(_, rest) =>
    1.43            error("Unexpected failure of tokenizing input:\n" + rest.source.toString)