9 |
9 |
10 import scala.util.parsing.input.{Reader, CharSequenceReader} |
10 import scala.util.parsing.input.{Reader, CharSequenceReader} |
11 import scala.collection.mutable |
11 import scala.collection.mutable |
12 |
12 |
13 |
13 |
14 class Outer_Syntax(symbols: Symbol.Interpretation) |
14 class Outer_Syntax |
15 { |
15 { |
16 protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG)) |
16 protected val keywords: Map[String, String] = Map((";" -> Keyword.DIAG)) |
17 protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty |
17 protected val lexicon: Scan.Lexicon = Scan.Lexicon.empty |
18 lazy val completion: Completion = new Completion + symbols // FIXME odd initialization |
18 lazy val completion: Completion = (new Completion).add_symbols // FIXME odd initialization |
19 |
19 |
20 def keyword_kind(name: String): Option[String] = keywords.get(name) |
20 def keyword_kind(name: String): Option[String] = keywords.get(name) |
21 |
21 |
22 def + (name: String, kind: String, replace: String): Outer_Syntax = |
22 def + (name: String, kind: String, replace: String): Outer_Syntax = |
23 { |
23 { |
24 val new_keywords = keywords + (name -> kind) |
24 val new_keywords = keywords + (name -> kind) |
25 val new_lexicon = lexicon + name |
25 val new_lexicon = lexicon + name |
26 val new_completion = completion + (name, replace) |
26 val new_completion = completion + (name, replace) |
27 new Outer_Syntax(symbols) { |
27 new Outer_Syntax { |
28 override val lexicon = new_lexicon |
28 override val lexicon = new_lexicon |
29 override val keywords = new_keywords |
29 override val keywords = new_keywords |
30 override lazy val completion = new_completion |
30 override lazy val completion = new_completion |
31 } |
31 } |
32 } |
32 } |
64 |
64 |
65 def scan(input: Reader[Char]): List[Token] = |
65 def scan(input: Reader[Char]): List[Token] = |
66 { |
66 { |
67 import lexicon._ |
67 import lexicon._ |
68 |
68 |
69 parseAll(rep(token(symbols, is_command)), input) match { |
69 parseAll(rep(token(is_command)), input) match { |
70 case Success(tokens, _) => tokens |
70 case Success(tokens, _) => tokens |
71 case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) |
71 case _ => error("Unexpected failure of tokenizing input:\n" + input.source.toString) |
72 } |
72 } |
73 } |
73 } |
74 |
74 |
81 |
81 |
82 var in: Reader[Char] = new CharSequenceReader(input) |
82 var in: Reader[Char] = new CharSequenceReader(input) |
83 val toks = new mutable.ListBuffer[Token] |
83 val toks = new mutable.ListBuffer[Token] |
84 var ctxt = context |
84 var ctxt = context |
85 while (!in.atEnd) { |
85 while (!in.atEnd) { |
86 parse(token_context(symbols, is_command, ctxt), in) match { |
86 parse(token_context(is_command, ctxt), in) match { |
87 case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
87 case Success((x, c), rest) => { toks += x; ctxt = c; in = rest } |
88 case NoSuccess(_, rest) => |
88 case NoSuccess(_, rest) => |
89 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
89 error("Unexpected failure of tokenizing input:\n" + rest.source.toString) |
90 } |
90 } |
91 } |
91 } |