src/Pure/General/scan.scala
author wenzelm
Sun, 10 Jan 2010 17:29:09 +0100
changeset 34306 78c10aea025d
parent 34272 92810c85262e
child 34319 f879b649ac4c
permissions -rw-r--r--
tuned;
     1 /*  Title:      Pure/General/scan.scala
     2     Author:     Makarius
     3 
     4 Efficient scanning of keywords.
     5 */
     6 
     7 package isabelle
     8 
     9 
    10 import scala.collection.immutable.PagedSeq
    11 import scala.util.parsing.input.{OffsetPosition, Position => InputPosition, Reader}
    12 import scala.util.parsing.combinator.RegexParsers
    13 
    14 import java.io.{File, InputStream, BufferedInputStream, FileInputStream}
    15 
    16 
    17 object Scan
    18 {
    19   /** Lexicon -- position tree **/
    20 
    21   object Lexicon
    22   {
    23     private case class Tree(val branches: Map[Char, (String, Tree)])
    24     private val empty_tree = Tree(Map())
    25 
    26     val empty: Lexicon = new Lexicon
    27     def apply(elems: String*): Lexicon = empty ++ elems
    28   }
    29 
    30   class Lexicon extends scala.collection.Set[String] with RegexParsers
    31   {
    32     /* representation */
    33 
    34     import Lexicon.Tree
    35     protected val main_tree: Tree = Lexicon.empty_tree
    36 
    37 
    38     /* auxiliary operations */
    39 
    40     private def content(tree: Tree, result: List[String]): List[String] =
    41       (result /: tree.branches.toList) ((res, entry) =>
    42         entry match { case (_, (s, tr)) =>
    43           if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
    44 
    45     private def lookup(str: CharSequence): Option[(Boolean, Tree)] =
    46     {
    47       val len = str.length
    48       def look(tree: Tree, tip: Boolean, i: Int): Option[(Boolean, Tree)] =
    49       {
    50         if (i < len) {
    51           tree.branches.get(str.charAt(i)) match {
    52             case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
    53             case None => None
    54           }
    55         } else Some(tip, tree)
    56       }
    57       look(main_tree, false, 0)
    58     }
    59 
    60     def completions(str: CharSequence): List[String] =
    61       lookup(str) match {
    62         case Some((true, tree)) => content(tree, List(str.toString))
    63         case Some((false, tree)) => content(tree, Nil)
    64         case None => Nil
    65       }
    66 
    67 
    68     /* Set methods */
    69 
    70     override def stringPrefix = "Lexicon"
    71 
    72     override def isEmpty: Boolean = { main_tree.branches.isEmpty }
    73 
    74     def size: Int = content(main_tree, Nil).length
    75     def elements: Iterator[String] = content(main_tree, Nil).sort(_ <= _).elements
    76 
    77     def contains(elem: String): Boolean =
    78       lookup(elem) match {
    79         case Some((tip, _)) => tip
    80         case _ => false
    81       }
    82 
    83     def + (elem: String): Lexicon =
    84       if (contains(elem)) this
    85       else {
    86         val len = elem.length
    87         def extend(tree: Tree, i: Int): Tree =
    88           if (i < len) {
    89             val c = elem.charAt(i)
    90             val end = (i + 1 == len)
    91             tree.branches.get(c) match {
    92               case Some((s, tr)) =>
    93                 Tree(tree.branches +
    94                   (c -> (if (end) elem else s, extend(tr, i + 1))))
    95               case None =>
    96                 Tree(tree.branches +
    97                   (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
    98             }
    99           }
   100           else tree
   101         val old = this
   102         new Lexicon { override val main_tree = extend(old.main_tree, 0) }
   103       }
   104 
   105     def + (elem1: String, elem2: String, elems: String*): Lexicon =
   106       this + elem1 + elem2 ++ elems
   107     def ++ (elems: Iterable[String]): Lexicon = (this /: elems) ((s, elem) => s + elem)
   108     def ++ (elems: Iterator[String]): Lexicon = (this /: elems) ((s, elem) => s + elem)
   109 
   110 
   111 
   112     /** RegexParsers methods **/
   113 
   114     override val whiteSpace = "".r
   115 
   116 
   117     /* keywords from lexicon */
   118 
   119     def keyword: Parser[String] = new Parser[String]
   120     {
   121       def apply(in: Input) =
   122       {
   123         val source = in.source
   124         val offset = in.offset
   125         val len = source.length - offset
   126 
   127         def scan(tree: Tree, result: String, i: Int): String =
   128         {
   129           if (i < len) {
   130             tree.branches.get(source.charAt(offset + i)) match {
   131               case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1)
   132               case None => result
   133             }
   134           }
   135           else result
   136         }
   137         val result = scan(main_tree, "", 0)
   138         if (result.isEmpty) Failure("keyword expected", in)
   139         else Success(result, in.drop(result.length))
   140       }
   141     }.named("keyword")
   142 
   143 
   144     /* symbol range */
   145 
   146     def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] =
   147       new Parser[String]
   148       {
   149         def apply(in: Input) =
   150         {
   151           val start = in.offset
   152           val end = in.source.length
   153           val matcher = new Symbol.Matcher(in.source)
   154 
   155           var i = start
   156           var count = 0
   157           var finished = false
   158           while (!finished) {
   159             if (i < end && count < max_count) {
   160               val n = matcher(i, end)
   161               val sym = in.source.subSequence(i, i + n).toString
   162               if (pred(sym)) { i += n; count += 1 }
   163               else finished = true
   164             }
   165             else finished = true
   166           }
   167           if (count < min_count) Failure("bad input", in)
   168           else Success(in.source.subSequence(start, i).toString, in.drop(i - start))
   169         }
   170       }.named("symbol_range")
   171 
   172     def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1)
   173     def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE)
   174     def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE)
   175 
   176 
   177     /* quoted strings */
   178 
   179     private def quoted(quote: String): Parser[String] =
   180     {
   181       quote ~
   182         rep(many1(sym => sym != quote && sym != "\\" && Symbol.is_closed(sym)) |
   183           "\\" + quote | "\\\\" |
   184           (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ~
   185       quote ^^ { case x ~ ys ~ z => x + ys.mkString + z }
   186     }.named("quoted")
   187 
   188     def quoted_content(quote: String, source: String): String =
   189     {
   190       require(parseAll(quoted(quote), source).successful)
   191       val body = source.substring(1, source.length - 1)
   192       if (body.exists(_ == '\\')) {
   193         val content =
   194           rep(many1(sym => sym != quote && sym != "\\" && Symbol.is_closed(sym)) |
   195               "\\" ~> (quote | "\\" | """\d\d\d""".r ^^ { case x => x.toInt.toChar.toString }))
   196         parseAll(content ^^ (_.mkString), body).get
   197       }
   198       else body
   199     }
   200 
   201 
   202     /* verbatim text */
   203 
   204     private def verbatim: Parser[String] =
   205     {
   206       "{*" ~ rep(many1(sym => sym != "*" && Symbol.is_closed(sym)) | """\*(?!\})""".r) ~ "*}" ^^
   207         { case x ~ ys ~ z => x + ys.mkString + z }
   208     }.named("verbatim")
   209 
   210     def verbatim_content(source: String): String =
   211     {
   212       require(parseAll(verbatim, source).successful)
   213       source.substring(2, source.length - 2)
   214     }
   215 
   216 
   217     /* nested comments */
   218 
   219     def comment: Parser[String] = new Parser[String]
   220     {
   221       val comment_text =
   222         rep(many1(sym => sym != "*" && sym != "(" && Symbol.is_closed(sym)) |
   223           """\*(?!\))|\((?!\*)""".r)
   224       val comment_open = "(*" ~ comment_text ^^^ ()
   225       val comment_close = comment_text ~ "*)" ^^^ ()
   226 
   227       def apply(in: Input) =
   228       {
   229         var rest = in
   230         def try_parse(p: Parser[Unit]): Boolean =
   231         {
   232           parse(p, rest) match {
   233             case Success(_, next) => { rest = next; true }
   234             case _ => false
   235           }
   236         }
   237         var depth = 0
   238         var finished = false
   239         while (!finished) {
   240           if (try_parse(comment_open)) depth += 1
   241           else if (depth > 0 && try_parse(comment_close)) depth -= 1
   242           else finished = true
   243         }
   244         if (in.offset < rest.offset && depth == 0)
   245           Success(in.source.subSequence(in.offset, rest.offset).toString, rest)
   246         else Failure("comment expected", in)
   247       }
   248     }.named("comment")
   249 
   250     def comment_content(source: String): String =
   251     {
   252       require(parseAll(comment, source).successful)
   253       source.substring(2, source.length - 2)
   254     }
   255 
   256 
   257     /* outer syntax tokens */
   258 
   259     def token(symbols: Symbol.Interpretation, is_command: String => Boolean):
   260       Parser[Outer_Lex.Token] =
   261     {
   262       import Outer_Lex.Token_Kind, Outer_Lex.Token
   263 
   264       val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
   265       val nat = many1(symbols.is_digit)
   266       val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
   267 
   268       val ident = id ~ rep("." ~> id) ^^
   269         { case x ~ Nil => Token(Token_Kind.IDENT, x)
   270           case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) }
   271 
   272       val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) }
   273       val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) }
   274       val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) }
   275       val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x))
   276 
   277       val sym_ident =
   278         (many1(symbols.is_symbolic_char) |
   279           one(sym => symbols.is_symbolic(sym) & Symbol.is_closed(sym))) ^^
   280         (x => Token(Token_Kind.SYM_IDENT, x))
   281 
   282       val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x))
   283 
   284       val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x))
   285       val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x))
   286 
   287       val junk = many1(sym => !(symbols.is_blank(sym)))
   288       val bad_delimiter =
   289         ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token_Kind.BAD_INPUT, x + y) }
   290       val bad = junk ^^ (x => Token(Token_Kind.BAD_INPUT, x))
   291 
   292 
   293       /* tokens */
   294 
   295       (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) |
   296         comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) |
   297       bad_delimiter |
   298       ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) |||
   299         keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) |
   300       bad
   301     }
   302   }
   303 
   304 
   305 
   306   /** read file without decoding -- enables efficient length operation **/
   307 
   308   private class Restricted_Seq(seq: RandomAccessSeq[Char], start: Int, end: Int)
   309     extends CharSequence
   310   {
   311     def charAt(i: Int): Char =
   312       if (0 <= i && i < length) seq(start + i)
   313       else throw new IndexOutOfBoundsException
   314 
   315     def length: Int = end - start  // avoid potentially expensive seq.length
   316 
   317     def subSequence(i: Int, j: Int): CharSequence =
   318       if (0 <= i && i <= j && j <= length) new Restricted_Seq(seq, start + i, start + j)
   319       else throw new IndexOutOfBoundsException
   320 
   321     override def toString: String =
   322     {
   323       val buf = new StringBuilder(length)
   324       for (offset <- start until end) buf.append(seq(offset))
   325       buf.toString
   326     }
   327   }
   328 
   329   abstract class Byte_Reader extends Reader[Char] { def close: Unit }
   330 
   331   def byte_reader(file: File): Byte_Reader =
   332   {
   333     val stream = new BufferedInputStream(new FileInputStream(file))
   334     val seq = new PagedSeq(
   335       (buf: Array[Char], offset: Int, length: Int) =>
   336         {
   337           var i = 0
   338           var c = 0
   339           var eof = false
   340           while (!eof && i < length) {
   341             c = stream.read
   342             if (c == -1) eof = true
   343             else { buf(offset + i) = c.toChar; i += 1 }
   344           }
   345           if (i > 0) i else -1
   346         })
   347     val restricted_seq = new Restricted_Seq(seq, 0, file.length.toInt)
   348 
   349     class Paged_Reader(override val offset: Int) extends Byte_Reader
   350     {
   351       override lazy val source: CharSequence = restricted_seq
   352       def first: Char = if (seq.isDefinedAt(offset)) seq(offset) else '\032'
   353       def rest: Paged_Reader = if (seq.isDefinedAt(offset)) new Paged_Reader(offset + 1) else this
   354       def pos: InputPosition = new OffsetPosition(source, offset)
   355       def atEnd: Boolean = !seq.isDefinedAt(offset)
   356       override def drop(n: Int): Paged_Reader = new Paged_Reader(offset + n)
   357       def close { stream.close }
   358     }
   359     new Paged_Reader(0)
   360   }
   361 }