Many cleanups

This commit is contained in:
Roberto Alsina 2024-08-15 21:10:25 -03:00
parent 58e8dac038
commit a3a7b5bd9a
3 changed files with 46 additions and 44 deletions

View File

@ -68,8 +68,7 @@ module Tartrazine
line_id = linkable_line_numbers? ? "id=\"#{line_number_id_prefix}#{i + 1}\"" : ""
outp << "<span #{line_id} #{line_class} style=\"user-select: none;\">#{line_label} </span>"
line.each do |token|
fragment = "<span class=\"#{get_css_class(token[:type])}\">#{HTML.escape(token[:value])}</span>"
outp << fragment
outp << "<span class=\"#{get_css_class(token[:type])}\">#{HTML.escape(token[:value])}</span>"
end
end
outp << "</code></pre>"
@ -105,15 +104,17 @@ module Tartrazine
# Given a token type, return the CSS class to use.
def get_css_class(token : String) : String
return class_prefix + Abbreviations[token] if theme.styles.has_key?(token)
if !theme.styles.has_key? token
# Themes don't contain information for each specific
# token type. However, they may contain information
# for a parent style. Worst case, we go to the root
# (Background) style.
class_prefix + Abbreviations[theme.style_parents(token).reverse.find { |parent|
theme.styles.has_key?(parent)
}]
parent = theme.style_parents(token).reverse.find { |dad|
theme.styles.has_key?(dad)
}
theme.styles[token] = theme.styles[parent]
end
class_prefix + Abbreviations[token]
end
# Is this line in the highlighted ranges?

View File

@ -225,9 +225,9 @@ module Tartrazine
# A Lexer state. A state has a name and a list of rules.
# The state machine has a state stack containing references
# to states to decide which rules to apply.
class State
struct State
property name : String = ""
property rules = [] of Rule
property rules = [] of BaseRule
def +(other : State)
new_state = State.new

View File

@ -15,30 +15,12 @@ module Tartrazine
alias Match = BytesRegex::Match
alias MatchData = Array(Match)
class Rule
property pattern : Regex = Regex.new ""
abstract struct BaseRule
abstract def match(text : Bytes, pos, lexer) : Tuple(Bool, Int32, Array(Token))
abstract def initialize(node : XML::Node)
property actions : Array(Action) = [] of Action
def match(text : Bytes, pos, lexer) : Tuple(Bool, Int32, Array(Token))
match = pattern.match(text, pos)
# We don't match if the match doesn't move the cursor
# because that causes infinite loops
return false, pos, [] of Token if match.empty? || match[0].size == 0
tokens = [] of Token
actions.each do |action|
tokens += action.emit(match, lexer)
end
return true, pos + match[0].size, tokens
end
def initialize(node : XML::Node, multiline, dotall, ignorecase)
@xml = node.to_s
pattern = node["pattern"]
pattern = "(?m)" + pattern if multiline
@pattern = Regex.new(pattern, multiline, dotall, ignorecase, true)
add_actions(node)
end
def add_actions(node : XML::Node)
node.children.each do |child|
next unless child.element?
@ -47,9 +29,32 @@ module Tartrazine
end
end
struct Rule < BaseRule
property pattern : Regex = Regex.new ""
property actions : Array(Action) = [] of Action
def match(text : Bytes, pos, lexer) : Tuple(Bool, Int32, Array(Token))
match = pattern.match(text, pos)
# No match
return false, pos, [] of Token if match.size == 0
return true, pos + match[0].size, actions.flat_map { |action| action.emit(match, lexer) }
end
def initialize(node : XML::Node)
end
def initialize(node : XML::Node, multiline, dotall, ignorecase)
pattern = node["pattern"]
pattern = "(?m)" + pattern if multiline
@pattern = Regex.new(pattern, multiline, dotall, ignorecase, true)
add_actions(node)
end
end
# This rule includes another state. If any of the rules of the
# included state matches, this rule matches.
class IncludeStateRule < Rule
struct IncludeStateRule < BaseRule
property state : String = ""
def match(text, pos, lexer) : Tuple(Bool, Int32, Array(Token))
@ -62,7 +67,6 @@ module Tartrazine
end
def initialize(node : XML::Node)
@xml = node.to_s
include_node = node.children.find { |child|
child.name == "include"
}
@ -72,17 +76,14 @@ module Tartrazine
end
# This rule always matches, unconditionally
class UnconditionalRule < Rule
struct UnconditionalRule < BaseRule
NO_MATCH = [] of Match
def match(text, pos, lexer) : Tuple(Bool, Int32, Array(Token))
tokens = [] of Token
actions.each do |action|
tokens += action.emit([] of Match, lexer)
end
return true, pos, tokens
return true, pos, actions.flat_map { |action| action.emit(NO_MATCH, lexer) }
end
def initialize(node : XML::Node)
@xml = node.to_s
add_actions(node)
end
end