src/Pure/Isar/parse.scala
changeset 36948 d2cdad45fd14
parent 34314 f0a6f02ad705
child 36966 21be4832c362
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/src/Pure/Isar/parse.scala	Sat May 15 22:15:57 2010 +0200
     1.3 @@ -0,0 +1,77 @@
     1.4 +/*  Title:      Pure/Isar/outer_parse.scala
     1.5 +    Author:     Makarius
     1.6 +
     1.7 +Generic parsers for Isabelle/Isar outer syntax.
     1.8 +*/
     1.9 +
    1.10 +package isabelle
    1.11 +
    1.12 +import scala.util.parsing.combinator.Parsers
    1.13 +
    1.14 +
    1.15 +object Parse
    1.16 +{
    1.17 +  /* parsing tokens */
    1.18 +
    1.19 +  trait Parser extends Parsers
    1.20 +  {
    1.21 +    type Elem = Outer_Lex.Token
    1.22 +
    1.23 +    def filter_proper = true
    1.24 +
    1.25 +    private def proper(in: Input): Input =
    1.26 +      if (in.atEnd || !in.first.is_ignored || !filter_proper) in
    1.27 +      else proper(in.rest)
    1.28 +
    1.29 +    def token(s: String, pred: Elem => Boolean): Parser[Elem] = new Parser[Elem]
    1.30 +    {
    1.31 +      def apply(raw_input: Input) =
    1.32 +      {
    1.33 +        val in = proper(raw_input)
    1.34 +        if (in.atEnd) Failure(s + " expected (past end-of-file!)", in)
    1.35 +        else {
    1.36 +          val token = in.first
    1.37 +          if (pred(token)) Success(token, proper(in.rest))
    1.38 +          else
    1.39 +            token.text match {
    1.40 +              case (txt, "") =>
    1.41 +                Failure(s + " expected,\nbut " + txt + " was found", in)
    1.42 +              case (txt1, txt2) =>
    1.43 +                Failure(s + " expected,\nbut " + txt1 + " was found:\n" + txt2, in)
    1.44 +            }
    1.45 +        }
    1.46 +      }
    1.47 +    }
    1.48 +
    1.49 +    def not_eof: Parser[Elem] = token("input token", _ => true)
    1.50 +    def eof: Parser[Unit] = not(not_eof)
    1.51 +
    1.52 +    def atom(s: String, pred: Elem => Boolean): Parser[String] =
    1.53 +      token(s, pred) ^^ (_.content)
    1.54 +
    1.55 +    def keyword(name: String): Parser[String] =
    1.56 +      atom(Outer_Lex.Token_Kind.KEYWORD.toString + " \"" + name + "\"",
    1.57 +        tok => tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == name)
    1.58 +
    1.59 +    def name: Parser[String] = atom("name declaration", _.is_name)
    1.60 +    def xname: Parser[String] = atom("name reference", _.is_xname)
    1.61 +    def text: Parser[String] = atom("text", _.is_text)
    1.62 +    def ML_source: Parser[String] = atom("ML source", _.is_text)
    1.63 +    def doc_source: Parser[String] = atom("document source", _.is_text)
    1.64 +    def path: Parser[String] = atom("file name/path specification", _.is_name)
    1.65 +
    1.66 +    private def tag_name: Parser[String] =
    1.67 +      atom("tag name", tok =>
    1.68 +          tok.kind == Outer_Lex.Token_Kind.IDENT ||
    1.69 +          tok.kind == Outer_Lex.Token_Kind.STRING)
    1.70 +
    1.71 +    def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name)
    1.72 +
    1.73 +
    1.74 +    /* wrappers */
    1.75 +
    1.76 +    def parse[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = p(in)
    1.77 +    def parse_all[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = parse(phrase(p), in)
    1.78 +  }
    1.79 +}
    1.80 +