src/Pure/General/scan.scala
changeset 44569 5130dfe1b7be
parent 44294 a26e514c92b2
child 44570 58bb7ca5c7a2
     1.1 --- a/src/Pure/General/scan.scala	Wed Jul 06 23:11:59 2011 +0200
     1.2 +++ b/src/Pure/General/scan.scala	Thu Jul 07 13:48:30 2011 +0200
     1.3 @@ -335,11 +335,11 @@
     1.4        string | (alt_string | (verb | cmt))
     1.5      }
     1.6  
     1.7 -    private def other_token(symbols: Symbol.Interpretation, is_command: String => Boolean)
     1.8 +    private def other_token(is_command: String => Boolean)
     1.9        : Parser[Token] =
    1.10      {
    1.11 -      val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
    1.12 -      val nat = many1(symbols.is_digit)
    1.13 +      val id = one(Symbol.is_letter) ~ many(Symbol.is_letdig) ^^ { case x ~ y => x + y }
    1.14 +      val nat = many1(Symbol.is_digit)
    1.15        val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
    1.16        val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
    1.17  
    1.18 @@ -355,14 +355,14 @@
    1.19          ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
    1.20  
    1.21        val sym_ident =
    1.22 -        (many1(symbols.is_symbolic_char) | one(sym => symbols.is_symbolic(sym))) ^^
    1.23 +        (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
    1.24          (x => Token(Token.Kind.SYM_IDENT, x))
    1.25  
    1.26 -      val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    1.27 +      val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
    1.28  
    1.29        // FIXME check
    1.30 -      val junk = many(sym => !(symbols.is_blank(sym)))
    1.31 -      val junk1 = many1(sym => !(symbols.is_blank(sym)))
    1.32 +      val junk = many(sym => !(Symbol.is_blank(sym)))
    1.33 +      val junk1 = many1(sym => !(Symbol.is_blank(sym)))
    1.34  
    1.35        val bad_delimiter =
    1.36          ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.UNPARSED, x + y) }
    1.37 @@ -376,11 +376,10 @@
    1.38            command_keyword) | bad))
    1.39      }
    1.40  
    1.41 -    def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] =
    1.42 -      delimited_token | other_token(symbols, is_command)
    1.43 +    def token(is_command: String => Boolean): Parser[Token] =
    1.44 +      delimited_token | other_token(is_command)
    1.45  
    1.46 -    def token_context(symbols: Symbol.Interpretation, is_command: String => Boolean, ctxt: Context)
    1.47 -      : Parser[(Token, Context)] =
    1.48 +    def token_context(is_command: String => Boolean, ctxt: Context): Parser[(Token, Context)] =
    1.49      {
    1.50        val string =
    1.51          quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
    1.52 @@ -388,7 +387,7 @@
    1.53          quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
    1.54        val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
    1.55        val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
    1.56 -      val other = other_token(symbols, is_command) ^^ { case x => (x, Finished) }
    1.57 +      val other = other_token(is_command) ^^ { case x => (x, Finished) }
    1.58  
    1.59        string | (alt_string | (verb | (cmt | other)))
    1.60      }