simplified Symbol based on lazy Symbol.Interpretation -- reduced odd "functorial style";
tuned implicit build/init messages;
1 /* Title: Pure/General/scan.scala
4 Efficient scanning of keywords and tokens.
10 import scala.collection.generic.Addable
11 import scala.collection.IndexedSeq
12 import scala.collection.immutable.PagedSeq
13 import scala.util.parsing.input.{OffsetPosition, Position => InputPosition, Reader}
14 import scala.util.parsing.combinator.RegexParsers
16 import java.io.{File, InputStream, BufferedInputStream, FileInputStream}
21 /** context of partial scans **/
23 sealed abstract class Context
24 case object Finished extends Context
25 case class Quoted(quote: String) extends Context
26 case object Verbatim extends Context
27 case class Comment(depth: Int) extends Context
31 /** Lexicon -- position tree **/
35 protected case class Tree(val branches: Map[Char, (String, Tree)])
36 private val empty_tree = Tree(Map())
38 val empty: Lexicon = new Lexicon
39 def apply(elems: String*): Lexicon = empty ++ elems
42 class Lexicon extends Addable[String, Lexicon] with RegexParsers
47 protected val main_tree: Tree = Lexicon.empty_tree
50 /* auxiliary operations */
52 private def content(tree: Tree, result: List[String]): List[String] =
53 (result /: tree.branches.toList) ((res, entry) =>
54 entry match { case (_, (s, tr)) =>
55 if (s.isEmpty) content(tr, res) else content(tr, s :: res) })
57 private def lookup(str: CharSequence): Option[(Boolean, Tree)] =
60 def look(tree: Tree, tip: Boolean, i: Int): Option[(Boolean, Tree)] =
63 tree.branches.get(str.charAt(i)) match {
64 case Some((s, tr)) => look(tr, !s.isEmpty, i + 1)
67 } else Some(tip, tree)
69 look(main_tree, false, 0)
72 def completions(str: CharSequence): List[String] =
74 case Some((true, tree)) => content(tree, List(str.toString))
75 case Some((false, tree)) => content(tree, Nil)
80 /* pseudo Set methods */
82 def iterator: Iterator[String] = content(main_tree, Nil).sortWith(_ <= _).iterator
84 override def toString: String = iterator.mkString("Lexicon(", ", ", ")")
86 def empty: Lexicon = Lexicon.empty
87 def isEmpty: Boolean = main_tree.branches.isEmpty
89 def contains(elem: String): Boolean =
91 case Some((tip, _)) => tip
100 def + (elem: String): Lexicon =
101 if (contains(elem)) this
103 val len = elem.length
104 def extend(tree: Tree, i: Int): Tree =
106 val c = elem.charAt(i)
107 val end = (i + 1 == len)
108 tree.branches.get(c) match {
109 case Some((s, tr)) =>
111 (c -> (if (end) elem else s, extend(tr, i + 1))))
114 (c -> (if (end) elem else "", extend(Lexicon.empty_tree, i + 1))))
119 new Lexicon { override val main_tree = extend(old.main_tree, 0) }
124 /** RegexParsers methods **/
126 override val whiteSpace = "".r
129 /* optional termination */
131 def opt_term[T](p: => Parser[T]): Parser[Option[T]] =
132 p ^^ (x => Some(x)) | """\z""".r ^^ (_ => None)
135 /* keywords from lexicon */
137 def keyword: Parser[String] = new Parser[String]
139 def apply(in: Input) =
141 val source = in.source
142 val offset = in.offset
143 val len = source.length - offset
145 def scan(tree: Tree, result: String, i: Int): String =
148 tree.branches.get(source.charAt(offset + i)) match {
149 case Some((s, tr)) => scan(tr, if (s.isEmpty) result else s, i + 1)
155 val result = scan(main_tree, "", 0)
156 if (result.isEmpty) Failure("keyword expected", in)
157 else Success(result, in.drop(result.length))
164 def symbol_range(pred: String => Boolean, min_count: Int, max_count: Int): Parser[String] =
167 def apply(in: Input) =
169 val start = in.offset
170 val end = in.source.length
171 val matcher = new Symbol.Matcher(in.source)
177 if (i < end && count < max_count) {
178 val n = matcher(i, end)
179 val sym = in.source.subSequence(i, i + n).toString
180 if (pred(sym)) { i += n; count += 1 }
185 if (count < min_count) Failure("bad input", in)
186 else Success(in.source.subSequence(start, i).toString, in.drop(i - start))
188 }.named("symbol_range")
190 def one(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, 1)
191 def many(pred: String => Boolean): Parser[String] = symbol_range(pred, 0, Integer.MAX_VALUE)
192 def many1(pred: String => Boolean): Parser[String] = symbol_range(pred, 1, Integer.MAX_VALUE)
197 private def quoted_body(quote: String): Parser[String] =
199 rep(many1(sym => sym != quote && sym != "\\") | "\\" + quote | "\\\\" |
200 (("""\\\d\d\d""".r) ^? { case x if x.substring(1, 4).toInt <= 255 => x })) ^^ (_.mkString)
203 private def quoted(quote: String): Parser[String] =
205 quote ~ quoted_body(quote) ~ quote ^^ { case x ~ y ~ z => x + y + z }
208 def quoted_content(quote: String, source: String): String =
210 require(parseAll(quoted(quote), source).successful)
211 val body = source.substring(1, source.length - 1)
212 if (body.exists(_ == '\\')) {
214 rep(many1(sym => sym != quote && sym != "\\") |
215 "\\" ~> (quote | "\\" | """\d\d\d""".r ^^ { case x => x.toInt.toChar.toString }))
216 parseAll(content ^^ (_.mkString), body).get
221 def quoted_context(quote: String, ctxt: Context): Parser[(String, Context)] =
225 quote ~ quoted_body(quote) ~ opt_term(quote) ^^
226 { case x ~ y ~ Some(z) => (x + y + z, Finished)
227 case x ~ y ~ None => (x + y, Quoted(quote)) }
228 case Quoted(q) if q == quote =>
229 quoted_body(quote) ~ opt_term(quote) ^^
230 { case x ~ Some(y) => (x + y, Finished)
231 case x ~ None => (x, ctxt) }
232 case _ => failure("")
234 }.named("quoted_context")
239 private def verbatim_body: Parser[String] =
240 rep(many1(sym => sym != "*") | """\*(?!\})""".r) ^^ (_.mkString)
242 private def verbatim: Parser[String] =
244 "{*" ~ verbatim_body ~ "*}" ^^ { case x ~ y ~ z => x + y + z }
247 def verbatim_content(source: String): String =
249 require(parseAll(verbatim, source).successful)
250 source.substring(2, source.length - 2)
253 def verbatim_context(ctxt: Context): Parser[(String, Context)] =
257 "{*" ~ verbatim_body ~ opt_term("*}") ^^
258 { case x ~ y ~ Some(z) => (x + y + z, Finished)
259 case x ~ y ~ None => (x + y, Verbatim) }
261 verbatim_body ~ opt_term("*}") ^^
262 { case x ~ Some(y) => (x + y, Finished)
263 case x ~ None => (x, Verbatim) }
264 case _ => failure("")
266 }.named("verbatim_context")
269 /* nested comments */
271 private def comment_depth(depth: Int): Parser[(String, Int)] = new Parser[(String, Int)]
276 rep1(many1(sym => sym != "*" && sym != "(") | """\*(?!\))|\((?!\*)""".r)
278 def apply(in: Input) =
281 def try_parse[A](p: Parser[A]): Boolean =
283 parse(p ^^^ (), rest) match {
284 case Success(_, next) => { rest = next; true }
291 if (try_parse("(*")) d += 1
292 else if (d > 0 && try_parse("*)")) d -= 1
293 else if (d == 0 || !try_parse(comment_text)) finished = true
295 if (in.offset < rest.offset)
296 Success((in.source.subSequence(in.offset, rest.offset).toString, d), rest)
297 else Failure("comment expected", in)
299 }.named("comment_depth")
301 def comment: Parser[String] =
302 comment_depth(0) ^? { case (x, d) if d == 0 => x }
304 def comment_context(ctxt: Context): Parser[(String, Context)] =
313 comment_depth(depth) ^^
314 { case (x, 0) => (x, Finished)
315 case (x, d) => (x, Comment(d)) }
319 def comment_content(source: String): String =
321 require(parseAll(comment, source).successful)
322 source.substring(2, source.length - 2)
326 /* outer syntax tokens */
328 private def delimited_token: Parser[Token] =
330 val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
331 val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
332 val verb = verbatim ^^ (x => Token(Token.Kind.VERBATIM, x))
333 val cmt = comment ^^ (x => Token(Token.Kind.COMMENT, x))
335 string | (alt_string | (verb | cmt))
338 private def other_token(is_command: String => Boolean)
341 val id = one(Symbol.is_letter) ~ many(Symbol.is_letdig) ^^ { case x ~ y => x + y }
342 val nat = many1(Symbol.is_digit)
343 val natdot = nat ~ "." ~ nat ^^ { case x ~ y ~ z => x + y + z }
344 val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
346 val ident = id ~ rep("." ~> id) ^^
347 { case x ~ Nil => Token(Token.Kind.IDENT, x)
348 case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
350 val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
351 val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
352 val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
353 val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
355 ("-" ~ natdot ^^ { case x ~ y => x + y } | natdot) ^^ (x => Token(Token.Kind.FLOAT, x))
358 (many1(Symbol.is_symbolic_char) | one(sym => Symbol.is_symbolic(sym))) ^^
359 (x => Token(Token.Kind.SYM_IDENT, x))
361 val space = many1(Symbol.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
364 val junk = many(sym => !(Symbol.is_blank(sym)))
365 val junk1 = many1(sym => !(Symbol.is_blank(sym)))
368 ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.UNPARSED, x + y) }
369 val bad = junk1 ^^ (x => Token(Token.Kind.UNPARSED, x))
371 val command_keyword =
372 keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))
374 space | (bad_delimiter |
375 (((ident | (var_ | (type_ident | (type_var | (float | (nat_ | sym_ident)))))) |||
376 command_keyword) | bad))
379 def token(is_command: String => Boolean): Parser[Token] =
380 delimited_token | other_token(is_command)
382 def token_context(is_command: String => Boolean, ctxt: Context): Parser[(Token, Context)] =
385 quoted_context("\"", ctxt) ^^ { case (x, c) => (Token(Token.Kind.STRING, x), c) }
387 quoted_context("`", ctxt) ^^ { case (x, c) => (Token(Token.Kind.ALT_STRING, x), c) }
388 val verb = verbatim_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.VERBATIM, x), c) }
389 val cmt = comment_context(ctxt) ^^ { case (x, c) => (Token(Token.Kind.COMMENT, x), c) }
390 val other = other_token(is_command) ^^ { case x => (x, Finished) }
392 string | (alt_string | (verb | (cmt | other)))
398 /** read file without decoding -- enables efficient length operation **/
400 private class Restricted_Seq(seq: IndexedSeq[Char], start: Int, end: Int)
403 def charAt(i: Int): Char =
404 if (0 <= i && i < length) seq(start + i)
405 else throw new IndexOutOfBoundsException
407 def length: Int = end - start // avoid potentially expensive seq.length
409 def subSequence(i: Int, j: Int): CharSequence =
410 if (0 <= i && i <= j && j <= length) new Restricted_Seq(seq, start + i, start + j)
411 else throw new IndexOutOfBoundsException
413 override def toString: String =
415 val buf = new StringBuilder(length)
416 for (offset <- start until end) buf.append(seq(offset))
421 abstract class Byte_Reader extends Reader[Char] { def close: Unit }
423 def byte_reader(file: File): Byte_Reader =
425 val stream = new BufferedInputStream(new FileInputStream(file))
426 val seq = new PagedSeq(
427 (buf: Array[Char], offset: Int, length: Int) =>
432 while (!eof && i < length) {
434 if (c == -1) eof = true
435 else { buf(offset + i) = c.toChar; i += 1 }
439 val restricted_seq = new Restricted_Seq(seq, 0, file.length.toInt)
441 class Paged_Reader(override val offset: Int) extends Byte_Reader
443 override lazy val source: CharSequence = restricted_seq
444 def first: Char = if (seq.isDefinedAt(offset)) seq(offset) else '\032'
445 def rest: Paged_Reader = if (seq.isDefinedAt(offset)) new Paged_Reader(offset + 1) else this
446 def pos: InputPosition = new OffsetPosition(source, offset)
447 def atEnd: Boolean = !seq.isDefinedAt(offset)
448 override def drop(n: Int): Paged_Reader = new Paged_Reader(offset + n)
449 def close { stream.close }