src/Pure/General/scan.scala
author wenzelm
Sat, 13 Nov 2010 19:21:53 +0100
changeset 40769 1050315f6ee2
parent 40536 47f572aff50a
child 43312 781c622af16a
permissions -rw-r--r--
simplified/robustified treatment of malformed symbols, which are now fully internalized (total Symbol.explode etc.);
allow malformed symbols inside quoted material, comments etc. -- for improved user experience with incremental re-parsing;
refined treatment of malformed surrogates (Scala);
     1 /*  Title:      Pure/General/scan.scala
     2     Author:     Makarius
     3 
     4 Efficient scanning of keywords and tokens.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.collection.generic.Addable
    11 import scala.collection.IndexedSeq
    12 import scala.collection.immutable.PagedSeq
    13 import scala.util.parsing.input.{OffsetPosition, Position => InputPosition, Reader}
    14 import scala.util.parsing.combinator.RegexParsers
    15 
    16 import java.io.{File, InputStream, BufferedInputStream, FileInputStream}
    17 
    18 
    19 object Scan
    20 {
    21   /** Lexicon -- position tree **/
    22 
    23   object Lexicon
    24   {
    25     private case class Tree(val branches: Map[Char, (String, Tree)])
    26     private val empty_tree = Tree(Map())
    27 
    28     val empty: Lexicon = new Lexicon
    29     def apply(elems: String*): Lexicon = empty ++ elems
    30   }
    31 
    32   class Lexicon extends Addable[String, Lexicon] with RegexParsers
    33   {
    34     /* representation */
    35 
    36     import Lexicon.Tree
    37     protected val main_tree: Tree = Lexicon.empty_tree
    38 
    39 
    40     /* auxiliary operations */
    41 
    42     private def content(tree: Tree, result: List[String]): List[String] =
    43       (result /: tree.branches.toList) ((res, entry) =>
    44         entry match { case (_, (s, tr)) =>
    45           if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
    46 
    47     private def lookup(str: CharSequence): Option[(Boolean, Tree)] =
    48     {
    49       val len = str.length
    50       def look(tree: Tree, tip: Boolean, i: Int): Option[(Boolean, Tree)] =
    51       {
    52         if (i < len) {
    53           tree.branches.get(str.charAt(i)) match {
    54             case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
    55             case None => None
    56           }
    57         } else Some(tip, tree)
    58       }
    59       look(main_tree, false, 0)
    60     }
    61 
    62     def completions(str: CharSequence): List[String] =
    63       lookup(str) match {
    64         case Some((true, tree)) => content(tree, List(str.toString))
    65         case Some((false, tree)) => content(tree, Nil)
    66         case None => Nil
    67       }
    68 
    69 
    70     /* pseudo Set methods */
    71 
    72     def iterator: Iterator[String] = content(main_tree, Nil).sortWith(_ <= _).iterator
    73 
    74     override def toString: String = iterator.mkString("Lexicon(", ", ", ")")
    75 
    76     def empty: Lexicon = Lexicon.empty
    77     def isEmpty: Boolean = main_tree.branches.isEmpty
    78 
    79     def contains(elem: String): Boolean =
    80       lookup(elem) match {
    81         case Some((tip, _)) => tip
    82         case _ => false
    83       }
    84 
    85 
    86     /* Addable methods */
    87 
    88     def repr = this
    89 
    90     def + (elem: String): Lexicon =
    91       if (contains(elem)) this
    92       else {
    93         val len = elem.length
    94         def extend(tree: Tree, i: Int): Tree =
    95           if (i < len) {
    96             val c = elem.charAt(i)
    97             val end = (i + 1 == len)
    98             tree.branches.get(c) match {
    99               case Some((s, tr)) =>
   100                 Tree(tree.branches +
   101                   (c -> (if (end) elem else s, extend(tr, i + 1))))
   102               case None =>
   103                 Tree(tree.branches +
   104                   (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
   105             }
   106           }
   107           else tree
   108         val old = this
   109         new Lexicon { override val main_tree = extend(old.main_tree, 0) }
   110       }
   111 
   112 
   113 
   114     /** RegexParsers methods **/
   115 
   116     override val whiteSpace = "".r
   117 
   118 
   119     /* keywords from lexicon */
   120 
   121     def keyword: Parser[String] = new Parser[String]
   122     {
   123       def apply(in: Input) =
   124       {
   125         val source = in.source
   126         val offset = in.offset
   127         val len = source.length - offset
   128 
   129         def scan(tree: Tree, result: String, i: Int): String =
   130         {
   131           if (i < len) {
   132             tree.branches.get(source.charAt(offset + i)) match {
   133               case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1)
   134               case None => result
   135             }
   136           }
   137           else result
   138         }
   139         val result = scan(main_tree, "", 0)
   140         if (result.isEmpty) Failure("keyword expected", in)
   141         else Success(result, in.drop(result.length))
   142       }
   143     }.named("keyword")
   144 
   145 
   146     /* symbol range */
   147 
   148     def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] =
   149       new Parser[String]
   150       {
   151         def apply(in: Input) =
   152         {
   153           val start = in.offset
   154           val end = in.source.length
   155           val matcher = new Symbol.Matcher(in.source)
   156 
   157           var i = start
   158           var count = 0
   159           var finished = false
   160           while (!finished) {
   161             if (i < end && count < max_count) {
   162               val n = matcher(i, end)
   163               val sym = in.source.subSequence(i, i + n).toString
   164               if (pred(sym)) { i += n; count += 1 }
   165               else finished = true
   166             }
   167             else finished = true
   168           }
   169           if (count < min_count) Failure("bad input", in)
   170           else Success(in.source.subSequence(start, i).toString, in.drop(i - start))
   171         }
   172       }.named("symbol_range")
   173 
   174     def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1)
   175     def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE)
   176     def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE)
   177 
   178 
   179     /* quoted strings */
   180 
   181     private def quoted(quote: String): Parser[String] =
   182     {
   183       quote ~
   184         rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" |
   185           (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ~
   186       quote ^^ { case x ~ ys ~ z => x + ys.mkString + z }
   187     }.named("quoted")
   188 
   189     def quoted_content(quote: String, source: String): String =
   190     {
   191       require(parseAll(quoted(quote), source).successful)
   192       val body = source.substring(1, source.length - 1)
   193       if (body.exists(_ == '\\')) {
   194         val content =
   195           rep(many1(sym => sym != quote && sym != "\\") |
   196               "\\" ~> (quote | "\\" | """\d\d\d""".r ^^ { case x => x.toInt.toChar.toString }))
   197         parseAll(content ^^ (_.mkString), body).get
   198       }
   199       else body
   200     }
   201 
   202 
   203     /* verbatim text */
   204 
   205     private def verbatim: Parser[String] =
   206     {
   207       "{*" ~ rep(many1(sym => sym != "*") | """\*(?!\})""".r) ~ "*}" ^^
   208         { case x ~ ys ~ z => x + ys.mkString + z }
   209     }.named("verbatim")
   210 
   211     def verbatim_content(source: String): String =
   212     {
   213       require(parseAll(verbatim, source).successful)
   214       source.substring(2, source.length - 2)
   215     }
   216 
   217 
   218     /* nested comments */
   219 
   220     def comment: Parser[String] = new Parser[String]
   221     {
   222       val comment_text =
   223         rep(many1(sym => sym != "*" && sym != "(") | """\*(?!\))|\((?!\*)""".r)
   224       val comment_open = "(*" ~ comment_text ^^^ ()
   225       val comment_close = comment_text ~ "*)" ^^^ ()
   226 
   227       def apply(in: Input) =
   228       {
   229         var rest = in
   230         def try_parse(p: Parser[Unit]): Boolean =
   231         {
   232           parse(p, rest) match {
   233             case Success(_, next) => { rest = next; true }
   234             case _ => false
   235           }
   236         }
   237         var depth = 0
   238         var finished = false
   239         while (!finished) {
   240           if (try_parse(comment_open)) depth += 1
   241           else if (depth > 0 && try_parse(comment_close)) depth -= 1
   242           else finished = true
   243         }
   244         if (in.offset < rest.offset && depth == 0)
   245           Success(in.source.subSequence(in.offset, rest.offset).toString, rest)
   246         else Failure("comment expected", in)
   247       }
   248     }.named("comment")
   249 
   250     def comment_content(source: String): String =
   251     {
   252       require(parseAll(comment, source).successful)
   253       source.substring(2, source.length - 2)
   254     }
   255 
   256 
   257     /* outer syntax tokens */
   258 
   259     def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] =
   260     {
   261       val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
   262       val nat = many1(symbols.is_digit)
   263       val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
   264       val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
   265 
   266       val ident = id ~ rep("." ~> id) ^^
   267         { case x ~ Nil => Token(Token.Kind.IDENT, x)
   268           case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
   269 
   270       val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
   271       val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
   272       val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
   273       val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
   274       val float =
   275         ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
   276 
   277       val sym_ident =
   278         (many1(symbols.is_symbolic_char) | one(sym => symbols.is_symbolic(sym))) ^^
   279         (x => Token(Token.Kind.SYM_IDENT, x))
   280 
   281       val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
   282 
   283       val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
   284       val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
   285 
   286       val junk = many1(sym => !(symbols.is_blank(sym)))
   287       val bad_delimiter =
   288         ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.UNPARSED, x + y) }
   289       val bad = junk ^^ (x => Token(Token.Kind.UNPARSED, x))
   290 
   291 
   292       /* tokens */
   293 
   294       (space | (string | (alt_string | (verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) |
   295         comment ^^ (x => Token(Token.Kind.COMMENT, x)))))) |
   296       bad_delimiter |
   297       ((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
   298         keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))) |
   299       bad
   300     }
   301   }
   302 
   303 
   304 
   305   /** read file without decoding -- enables efficient length operation **/
   306 
   307   private class Restricted_Seq(seq: IndexedSeq[Char], start: Int, end: Int)
   308     extends CharSequence
   309   {
   310     def charAt(i: Int): Char =
   311       if (0 <= i && i < length) seq(start + i)
   312       else throw new IndexOutOfBoundsException
   313 
   314     def length: Int = end - start  // avoid potentially expensive seq.length
   315 
   316     def subSequence(i: Int, j: Int): CharSequence =
   317       if (0 <= i && i <= j && j <= length) new Restricted_Seq(seq, start + i, start + j)
   318       else throw new IndexOutOfBoundsException
   319 
   320     override def toString: String =
   321     {
   322       val buf = new StringBuilder(length)
   323       for (offset <- start until end) buf.append(seq(offset))
   324       buf.toString
   325     }
   326   }
   327 
   328   abstract class Byte_Reader extends Reader[Char] { def close: Unit }
   329 
   330   def byte_reader(file: File): Byte_Reader =
   331   {
   332     val stream = new BufferedInputStream(new FileInputStream(file))
   333     val seq = new PagedSeq(
   334       (buf: Array[Char], offset: Int, length: Int) =>
   335         {
   336           var i = 0
   337           var c = 0
   338           var eof = false
   339           while (!eof && i < length) {
   340             c = stream.read
   341             if (c == -1) eof = true
   342             else { buf(offset + i) = c.toChar; i += 1 }
   343           }
   344           if (i > 0) i else -1
   345         })
   346     val restricted_seq = new Restricted_Seq(seq, 0, file.length.toInt)
   347 
   348     class Paged_Reader(override val offset: Int) extends Byte_Reader
   349     {
   350       override lazy val source: CharSequence = restricted_seq
   351       def first: Char = if (seq.isDefinedAt(offset)) seq(offset) else '\032'
   352       def rest: Paged_Reader = if (seq.isDefinedAt(offset)) new Paged_Reader(offset + 1) else this
   353       def pos: InputPosition = new OffsetPosition(source, offset)
   354       def atEnd: Boolean = !seq.isDefinedAt(offset)
   355       override def drop(n: Int): Paged_Reader = new Paged_Reader(offset + n)
   356       def close { stream.close }
   357     }
   358     new Paged_Reader(0)
   359   }
   360 }