1 /* Title: Pure/General/scan.scala
4 Efficient scanning of keywords.
10 import scala.collection.immutable.PagedSeq
11 import scala.util.parsing.input.{OffsetPosition, Position => InputPosition, Reader}
12 import scala.util.parsing.combinator.RegexParsers
14 import java.io.{File, InputStream, BufferedInputStream, FileInputStream}
19 /** Lexicon -- position tree **/
23 private case class Tree(val branches: Map[Char, (String, Tree)])
24 private val empty_tree = Tree(Map())
26 val empty: Lexicon = new Lexicon
27 def apply(elems: String*): Lexicon = empty ++ elems
30 class Lexicon extends scala.collection.Set[String] with RegexParsers
35 protected val main_tree: Tree = Lexicon.empty_tree
38 /* auxiliary operations */
40 private def content(tree: Tree, result: List[String]): List[String] =
41 (result /: tree.branches.toList) ((res, entry) =>
42 entry match { case (_, (s, tr)) =>
43 if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
45 private def lookup(str: CharSequence): Option[(Boolean, Tree)] =
48 def look(tree: Tree, tip: Boolean, i: Int): Option[(Boolean, Tree)] =
51 tree.branches.get(str.charAt(i)) match {
52 case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
55 } else Some(tip, tree)
57 look(main_tree, false, 0)
60 def completions(str: CharSequence): List[String] =
62 case Some((true, tree)) => content(tree, List(str.toString))
63 case Some((false, tree)) => content(tree, Nil)
70 override def stringPrefix = "Lexicon"
72 override def isEmpty: Boolean = { main_tree.branches.isEmpty }
74 def size: Int = content(main_tree, Nil).length
75 def elements: Iterator[String] = content(main_tree, Nil).sort(_ <= _).elements
77 def contains(elem: String): Boolean =
79 case Some((tip, _)) => tip
83 def + (elem: String): Lexicon =
84 if (contains(elem)) this
87 def extend(tree: Tree, i: Int): Tree =
89 val c = elem.charAt(i)
90 val end = (i + 1 == len)
91 tree.branches.get(c) match {
94 (c -> (if (end) elem else s, extend(tr, i + 1))))
97 (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
102 new Lexicon { override val main_tree = extend(old.main_tree, 0) }
105 def + (elem1: String, elem2: String, elems: String*): Lexicon =
106 this + elem1 + elem2 ++ elems
107 def ++ (elems: Iterable[String]): Lexicon = (this /: elems) ((s, elem) => s + elem)
108 def ++ (elems: Iterator[String]): Lexicon = (this /: elems) ((s, elem) => s + elem)
112 /** RegexParsers methods **/
114 override val whiteSpace = "".r
117 /* keywords from lexicon */
119 def keyword: Parser[String] = new Parser[String]
121 def apply(in: Input) =
123 val source = in.source
124 val offset = in.offset
125 val len = source.length - offset
127 def scan(tree: Tree, result: String, i: Int): String =
130 tree.branches.get(source.charAt(offset + i)) match {
131 case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1)
137 val result = scan(main_tree, "", 0)
138 if (result.isEmpty) Failure("keyword expected", in)
139 else Success(result, in.drop(result.length))
146 def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] =
149 def apply(in: Input) =
151 val start = in.offset
152 val end = in.source.length
153 val matcher = new Symbol.Matcher(in.source)
159 if (i < end && count < max_count) {
160 val n = matcher(i, end)
161 val sym = in.source.subSequence(i, i + n).toString
162 if (pred(sym)) { i += n; count += 1 }
167 if (count < min_count) Failure("bad input", in)
168 else Success(in.source.subSequence(start, i).toString, in.drop(i - start))
170 }.named("symbol_range")
172 def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1)
173 def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE)
174 def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE)
179 private def quoted(quote: String): Parser[String] =
182 rep(many1(sym => sym != quote && sym != "\\" && Symbol.is_closed(sym)) |
183 "\\" + quote | "\\\\" |
184 (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ~
185 quote ^^ { case x ~ ys ~ z => x + ys.mkString + z }
188 def quoted_content(quote: String, source: String): String =
190 require(parseAll(quoted(quote), source).successful)
191 val body = source.substring(1, source.length - 1)
192 if (body.exists(_ == '\\')) {
194 rep(many1(sym => sym != quote && sym != "\\" && Symbol.is_closed(sym)) |
195 "\\" ~> (quote | "\\" | """\d\d\d""".r ^^ { case x => x.toInt.toChar.toString }))
196 parseAll(content ^^ (_.mkString), body).get
204 private def verbatim: Parser[String] =
206 "{*" ~ rep(many1(sym => sym != "*" && Symbol.is_closed(sym)) | """\*(?!\})""".r) ~ "*}" ^^
207 { case x ~ ys ~ z => x + ys.mkString + z }
210 def verbatim_content(source: String): String =
212 require(parseAll(verbatim, source).successful)
213 source.substring(2, source.length - 2)
217 /* nested comments */
219 def comment: Parser[String] = new Parser[String]
222 rep(many1(sym => sym != "*" && sym != "(" && Symbol.is_closed(sym)) |
223 """\*(?!\))|\((?!\*)""".r)
224 val comment_open = "(*" ~ comment_text ^^^ ()
225 val comment_close = comment_text ~ "*)" ^^^ ()
227 def apply(in: Input) =
230 def try_parse(p: Parser[Unit]): Boolean =
232 parse(p, rest) match {
233 case Success(_, next) => { rest = next; true }
240 if (try_parse(comment_open)) depth += 1
241 else if (depth > 0 && try_parse(comment_close)) depth -= 1
244 if (in.offset < rest.offset && depth == 0)
245 Success(in.source.subSequence(in.offset, rest.offset).toString, rest)
246 else Failure("comment expected", in)
250 def comment_content(source: String): String =
252 require(parseAll(comment, source).successful)
253 source.substring(2, source.length - 2)
257 /* outer syntax tokens */
259 def token(symbols: Symbol.Interpretation, is_command: String => Boolean):
260 Parser[Outer_Lex.Token] =
262 import Outer_Lex.Token_Kind, Outer_Lex.Token
264 val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
265 val nat = many1(symbols.is_digit)
266 val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
268 val ident = id ~ rep("." ~> id) ^^
269 { case x ~ Nil => Token(Token_Kind.IDENT, x)
270 case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) }
272 val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) }
273 val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) }
274 val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) }
275 val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x))
278 (many1(symbols.is_symbolic_char) |
279 one(sym => symbols.is_symbolic(sym) & Symbol.is_closed(sym))) ^^
280 (x => Token(Token_Kind.SYM_IDENT, x))
282 val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x))
284 val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x))
285 val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x))
287 val junk = many1(sym => !(symbols.is_blank(sym)))
289 ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token_Kind.BAD_INPUT, x + y) }
290 val bad = junk ^^ (x => Token(Token_Kind.BAD_INPUT, x))
295 (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) |
296 comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) |
298 ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) |||
299 keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) |
306 /** read file without decoding -- enables efficient length operation **/
308 private class Restricted_Seq(seq: RandomAccessSeq[Char], start: Int, end: Int)
311 def charAt(i: Int): Char =
312 if (0 <= i && i < length) seq(start + i)
313 else throw new IndexOutOfBoundsException
315 def length: Int = end - start // avoid potentially expensive seq.length
317 def subSequence(i: Int, j: Int): CharSequence =
318 if (0 <= i && i <= j && j <= length) new Restricted_Seq(seq, start + i, start + j)
319 else throw new IndexOutOfBoundsException
321 override def toString: String =
323 val buf = new StringBuilder(length)
324 for (offset <- start until end) buf.append(seq(offset))
329 abstract class Byte_Reader extends Reader[Char] { def close: Unit }
331 def byte_reader(file: File): Byte_Reader =
333 val stream = new BufferedInputStream(new FileInputStream(file))
334 val seq = new PagedSeq(
335 (buf: Array[Char], offset: Int, length: Int) =>
340 while (!eof && i < length) {
342 if (c == -1) eof = true
343 else { buf(offset + i) = c.toChar; i += 1 }
347 val restricted_seq = new Restricted_Seq(seq, 0, file.length.toInt)
349 class Paged_Reader(override val offset: Int) extends Byte_Reader
351 override lazy val source: CharSequence = restricted_seq
352 def first: Char = if (seq.isDefinedAt(offset)) seq(offset) else '\032'
353 def rest: Paged_Reader = if (seq.isDefinedAt(offset)) new Paged_Reader(offset + 1) else this
354 def pos: InputPosition = new OffsetPosition(source, offset)
355 def atEnd: Boolean = !seq.isDefinedAt(offset)
356 override def drop(n: Int): Paged_Reader = new Paged_Reader(offset + n)
357 def close { stream.close }