src/Tools/jEdit/src/jedit/isabelle_token_marker.scala
author wenzelm
Tue, 08 Jun 2010 13:51:25 +0200
changeset 37467 5c6695de35ba
parent 37246 04d2521e79b0
child 38453 67fc24df3721
permissions -rw-r--r--
disable set_styles for now -- there are still some race conditions of PropertiesChanged vs. TextArea painting (NB: without it Isabelle_Token_Marker will crash if sub/superscript is actually used);
wenzelm@36760
     1
/*  Title:      Tools/jEdit/src/jedit/isabelle_token_marker.scala
wenzelm@36760
     2
    Author:     Fabian Immler, TU Munich
wenzelm@36760
     3
wenzelm@36760
     4
Include isabelle's command- and keyword-declarations live in jEdits
wenzelm@36760
     5
syntax-highlighting.
wenzelm@36760
     6
*/
immler@34470
     7
immler@34470
     8
package isabelle.jedit
immler@34470
     9
wenzelm@34703
    10
wenzelm@37197
    11
import isabelle._
immler@34470
    12
immler@34520
    13
import org.gjt.sp.jedit.buffer.JEditBuffer
wenzelm@37246
    14
import org.gjt.sp.jedit.syntax.{Token, TokenMarker, TokenHandler, ParserRuleSet, SyntaxStyle}
wenzelm@37246
    15
import org.gjt.sp.jedit.textarea.TextArea
immler@34470
    16
wenzelm@37246
    17
import java.awt.Font
wenzelm@37246
    18
import java.awt.font.TextAttribute
immler@34520
    19
import javax.swing.text.Segment;
immler@34470
    20
immler@34470
    21
wenzelm@34763
    22
object Isabelle_Token_Marker
wenzelm@34591
    23
{
wenzelm@37246
    24
  /* extended token styles */
wenzelm@37246
    25
wenzelm@37246
    26
  private val plain_range: Int = Token.ID_COUNT
wenzelm@37246
    27
  private val full_range: Int = 3 * plain_range
wenzelm@37246
    28
  private def check_range(i: Int) { require(0 <= i && i < plain_range) }
wenzelm@37246
    29
wenzelm@37246
    30
  def subscript(i: Byte): Byte = { check_range(i); (i + plain_range).toByte }
wenzelm@37246
    31
  def superscript(i: Byte): Byte = { check_range(i); (i + 2 * plain_range).toByte }
wenzelm@37246
    32
wenzelm@37246
    33
  private def script_style(style: SyntaxStyle, i: Int): SyntaxStyle =
wenzelm@37246
    34
  {
wenzelm@37246
    35
    import scala.collection.JavaConversions._
wenzelm@37246
    36
    val script_font =
wenzelm@37246
    37
      style.getFont.deriveFont(Map(TextAttribute.SUPERSCRIPT -> new java.lang.Integer(i)))
wenzelm@37246
    38
    new SyntaxStyle(style.getForegroundColor, style.getBackgroundColor, script_font)
wenzelm@37246
    39
  }
wenzelm@37246
    40
wenzelm@37246
    41
  def extend_styles(styles: Array[SyntaxStyle]): Array[SyntaxStyle] =
wenzelm@37246
    42
  {
wenzelm@37246
    43
    val new_styles = new Array[SyntaxStyle](full_range)
wenzelm@37246
    44
    for (i <- 0 until plain_range) {
wenzelm@37246
    45
      val style = styles(i)
wenzelm@37246
    46
      new_styles(i) = style
wenzelm@37246
    47
      new_styles(subscript(i.toByte)) = script_style(style, -1)
wenzelm@37246
    48
      new_styles(superscript(i.toByte)) = script_style(style, 1)
wenzelm@37246
    49
    }
wenzelm@37246
    50
    new_styles
wenzelm@37246
    51
  }
wenzelm@37246
    52
wenzelm@37246
    53
wenzelm@34712
    54
  /* line context */
wenzelm@34712
    55
wenzelm@34730
    56
  private val rule_set = new ParserRuleSet("isabelle", "MAIN")
wenzelm@34712
    57
  private class LineContext(val line: Int, prev: LineContext)
wenzelm@34730
    58
    extends TokenMarker.LineContext(rule_set, prev)
wenzelm@34712
    59
wenzelm@34712
    60
wenzelm@34712
    61
  /* mapping to jEdit token types */
wenzelm@34641
    62
  // TODO: as properties or CSS style sheet
wenzelm@34712
    63
wenzelm@37198
    64
  private val command_style: Map[String, Byte] =
wenzelm@37198
    65
  {
wenzelm@37198
    66
    import Token._
wenzelm@37198
    67
    Map[String, Byte](
wenzelm@37198
    68
      Keyword.THY_END -> KEYWORD2,
wenzelm@37198
    69
      Keyword.THY_SCRIPT -> LABEL,
wenzelm@37198
    70
      Keyword.PRF_SCRIPT -> LABEL,
wenzelm@37198
    71
      Keyword.PRF_ASM -> KEYWORD3,
wenzelm@37198
    72
      Keyword.PRF_ASM_GOAL -> KEYWORD3
wenzelm@37198
    73
    ).withDefaultValue(KEYWORD1)
wenzelm@37198
    74
  }
wenzelm@37198
    75
wenzelm@37198
    76
  private val token_style: Map[String, Byte] =
wenzelm@34641
    77
  {
wenzelm@34864
    78
    import Token._
wenzelm@34642
    79
    Map[String, Byte](
immler@34552
    80
      // logical entities
wenzelm@37195
    81
      Markup.TCLASS -> NULL,
wenzelm@37195
    82
      Markup.TYCON -> NULL,
wenzelm@37195
    83
      Markup.FIXED_DECL -> FUNCTION,
wenzelm@37195
    84
      Markup.FIXED -> NULL,
wenzelm@37195
    85
      Markup.CONST_DECL -> FUNCTION,
wenzelm@37195
    86
      Markup.CONST -> NULL,
wenzelm@37195
    87
      Markup.FACT_DECL -> FUNCTION,
wenzelm@37195
    88
      Markup.FACT -> NULL,
wenzelm@34642
    89
      Markup.DYNAMIC_FACT -> LABEL,
wenzelm@37195
    90
      Markup.LOCAL_FACT_DECL -> FUNCTION,
wenzelm@37195
    91
      Markup.LOCAL_FACT -> NULL,
immler@34552
    92
      // inner syntax
wenzelm@37195
    93
      Markup.TFREE -> NULL,
wenzelm@37195
    94
      Markup.FREE -> NULL,
wenzelm@37195
    95
      Markup.TVAR -> NULL,
wenzelm@37195
    96
      Markup.SKOLEM -> NULL,
wenzelm@37195
    97
      Markup.BOUND -> NULL,
wenzelm@37195
    98
      Markup.VAR -> NULL,
wenzelm@34642
    99
      Markup.NUM -> DIGIT,
wenzelm@34642
   100
      Markup.FLOAT -> DIGIT,
wenzelm@34642
   101
      Markup.XNUM -> DIGIT,
wenzelm@34641
   102
      Markup.XSTR -> LITERAL4,
wenzelm@37195
   103
      Markup.LITERAL -> OPERATOR,
wenzelm@34642
   104
      Markup.INNER_COMMENT -> COMMENT1,
wenzelm@37195
   105
      Markup.SORT -> NULL,
wenzelm@37195
   106
      Markup.TYP -> NULL,
wenzelm@37195
   107
      Markup.TERM -> NULL,
wenzelm@37195
   108
      Markup.PROP -> NULL,
wenzelm@37195
   109
      Markup.ATTRIBUTE -> NULL,
wenzelm@37195
   110
      Markup.METHOD -> NULL,
immler@34552
   111
      // ML syntax
wenzelm@37195
   112
      Markup.ML_KEYWORD -> KEYWORD1,
wenzelm@37196
   113
      Markup.ML_DELIMITER -> OPERATOR,
wenzelm@34643
   114
      Markup.ML_IDENT -> NULL,
wenzelm@37195
   115
      Markup.ML_TVAR -> NULL,
wenzelm@34642
   116
      Markup.ML_NUMERAL -> DIGIT,
wenzelm@34641
   117
      Markup.ML_CHAR -> LITERAL1,
wenzelm@34641
   118
      Markup.ML_STRING -> LITERAL1,
wenzelm@34641
   119
      Markup.ML_COMMENT -> COMMENT1,
wenzelm@34641
   120
      Markup.ML_MALFORMED -> INVALID,
immler@34552
   121
      // embedded source text
wenzelm@37195
   122
      Markup.ML_SOURCE -> COMMENT3,
wenzelm@37195
   123
      Markup.DOC_SOURCE -> COMMENT3,
wenzelm@34641
   124
      Markup.ANTIQ -> COMMENT4,
wenzelm@34641
   125
      Markup.ML_ANTIQ -> COMMENT4,
wenzelm@34641
   126
      Markup.DOC_ANTIQ -> COMMENT4,
immler@34552
   127
      // outer syntax
wenzelm@37195
   128
      Markup.KEYWORD -> KEYWORD2,
wenzelm@37195
   129
      Markup.OPERATOR -> OPERATOR,
wenzelm@37195
   130
      Markup.COMMAND -> KEYWORD1,
wenzelm@34643
   131
      Markup.IDENT -> NULL,
wenzelm@34642
   132
      Markup.VERBATIM -> COMMENT3,
wenzelm@34642
   133
      Markup.COMMENT -> COMMENT1,
wenzelm@34641
   134
      Markup.CONTROL -> COMMENT3,
wenzelm@34641
   135
      Markup.MALFORMED -> INVALID,
wenzelm@34642
   136
      Markup.STRING -> LITERAL3,
wenzelm@34641
   137
      Markup.ALTSTRING -> LITERAL1
wenzelm@34643
   138
    ).withDefaultValue(NULL)
immler@34520
   139
  }
immler@34470
   140
}
immler@34520
   141
wenzelm@34703
   142
wenzelm@34787
   143
class Isabelle_Token_Marker(model: Document_Model) extends TokenMarker
wenzelm@34591
   144
{
immler@34520
   145
  override def markTokens(prev: TokenMarker.LineContext,
wenzelm@34641
   146
      handler: TokenHandler, line_segment: Segment): TokenMarker.LineContext =
wenzelm@34641
   147
  {
wenzelm@34763
   148
    val previous = prev.asInstanceOf[Isabelle_Token_Marker.LineContext]
wenzelm@34585
   149
    val line = if (prev == null) 0 else previous.line + 1
wenzelm@34763
   150
    val context = new Isabelle_Token_Marker.LineContext(line, previous)
wenzelm@34787
   151
    val start = model.buffer.getLineStartOffset(line)
immler@34520
   152
    val stop = start + line_segment.count
immler@34544
   153
wenzelm@34827
   154
    val document = model.recent_document()
wenzelm@34787
   155
    def to: Int => Int = model.to_current(document, _)
wenzelm@34787
   156
    def from: Int => Int = model.from_current(document, _)
immler@34520
   157
wenzelm@37467
   158
    /* FIXME
wenzelm@37246
   159
    for (text_area <- Isabelle.jedit_text_areas(model.buffer)
wenzelm@37246
   160
          if Document_View(text_area).isDefined)
wenzelm@37246
   161
      Document_View(text_area).get.set_styles()
wenzelm@37467
   162
    */
wenzelm@37246
   163
wenzelm@37246
   164
    def handle_token(style: Byte, offset: Int, length: Int) =
wenzelm@37246
   165
      handler.handleToken(line_segment, style, offset, length, context)
wenzelm@37246
   166
immler@34525
   167
    var next_x = start
wenzelm@34858
   168
    for {
wenzelm@34858
   169
      (command, command_start) <- document.command_range(from(start), from(stop))
wenzelm@37005
   170
      markup <- document.current_state(command).highlight.flatten
wenzelm@34858
   171
      val abs_start = to(command_start + markup.start)
wenzelm@34858
   172
      val abs_stop = to(command_start + markup.stop)
wenzelm@34858
   173
      if (abs_stop > start)
wenzelm@34858
   174
      if (abs_start < stop)
wenzelm@34858
   175
      val token_start = (abs_start - start) max 0
wenzelm@34858
   176
      val token_length =
wenzelm@34858
   177
        (abs_stop - abs_start) -
wenzelm@34858
   178
        ((start - abs_start) max 0) -
wenzelm@34858
   179
        ((abs_stop - stop) max 0)
wenzelm@37198
   180
    }
wenzelm@37198
   181
    {
wenzelm@37246
   182
      val token_type =
wenzelm@37198
   183
        markup.info match {
wenzelm@37198
   184
          case Command.HighlightInfo(Markup.COMMAND, Some(kind)) =>
wenzelm@37198
   185
            Isabelle_Token_Marker.command_style(kind)
wenzelm@37198
   186
          case Command.HighlightInfo(kind, _) =>
wenzelm@37198
   187
            Isabelle_Token_Marker.token_style(kind)
wenzelm@37198
   188
          case _ => Token.NULL
wenzelm@37198
   189
        }
wenzelm@37198
   190
      if (start + token_start > next_x)
wenzelm@37246
   191
        handle_token(Token.COMMENT1, next_x - start, start + token_start - next_x)
wenzelm@37246
   192
      handle_token(token_type, token_start, token_length)
wenzelm@37198
   193
      next_x = start + token_start + token_length
wenzelm@37198
   194
    }
immler@34525
   195
    if (next_x < stop)
wenzelm@37246
   196
      handle_token(Token.COMMENT1, next_x - start, stop - next_x)
immler@34520
   197
wenzelm@37246
   198
    handle_token(Token.END, line_segment.count, 0)
immler@34520
   199
    handler.setLineContext(context)
wenzelm@34858
   200
    context
immler@34520
   201
  }
wenzelm@34703
   202
}