mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-06-27 06:42:04 -03:00
feat: use the native crystal highlighter
The chroma highlighter for crystal is not great, because the pygments one special cases things like heredocs and that got lost in translation. Since the crystal compiler comes with a highlighter why not use it?
This commit is contained in:
83
src/lexer.cr
83
src/lexer.cr
@ -1,6 +1,7 @@
|
||||
require "./constants/lexers"
|
||||
require "./heuristics"
|
||||
require "baked_file_system"
|
||||
require "crystal/syntax_highlighter"
|
||||
|
||||
module Tartrazine
|
||||
class LexerFiles
|
||||
@ -26,6 +27,7 @@ module Tartrazine
|
||||
end
|
||||
|
||||
private def self.lexer_by_name(name : String) : BaseLexer
|
||||
return CrystalLexer.new if name == "crystal"
|
||||
lexer_file_name = LEXERS_BY_NAME.fetch(name.downcase, nil)
|
||||
return create_delegating_lexer(name) if lexer_file_name.nil? && name.includes? "+"
|
||||
raise Exception.new("Unknown lexer: #{name}") if lexer_file_name.nil?
|
||||
@ -34,6 +36,10 @@ module Tartrazine
|
||||
end
|
||||
|
||||
private def self.lexer_by_filename(filename : String) : BaseLexer
|
||||
if filename.ends_with?(".cr")
|
||||
return CrystalLexer.new
|
||||
end
|
||||
|
||||
candidates = Set(String).new
|
||||
LEXERS_BY_FILENAME.each do |k, v|
|
||||
candidates += v.to_set if File.match?(k, File.basename(filename))
|
||||
@ -327,4 +333,81 @@ module Tartrazine
|
||||
new_state
|
||||
end
|
||||
end
|
||||
|
||||
class CustomCrystalHighlighter < Crystal::SyntaxHighlighter
|
||||
@tokens = [] of Token
|
||||
|
||||
def render_delimiter(&block)
|
||||
@tokens << {type: "LiteralString", value: block.call.to_s}
|
||||
end
|
||||
|
||||
def render_interpolation(&block)
|
||||
@tokens << {type: "LiteralStringInterpol", value: "\#{"}
|
||||
@tokens << {type: "Text", value: block.call.to_s}
|
||||
@tokens << {type: "LiteralStringInterpol", value: "}"}
|
||||
end
|
||||
|
||||
def render_string_array(&block)
|
||||
@tokens << {type: "LiteralString", value: block.call.to_s}
|
||||
end
|
||||
|
||||
# ameba:disable Metrics/CyclomaticComplexity
|
||||
def render(type : TokenType, value : String)
|
||||
case type
|
||||
when .comment?
|
||||
@tokens << {type: "Comment", value: value}
|
||||
when .number?
|
||||
@tokens << {type: "LiteralNumber", value: value}
|
||||
when .char?
|
||||
@tokens << {type: "LiteralStringChar", value: value}
|
||||
when .symbol?
|
||||
@tokens << {type: "LiteralStringSymbol", value: value}
|
||||
when .const?
|
||||
@tokens << {type: "NameConstant", value: value}
|
||||
when .string?
|
||||
@tokens << {type: "LiteralString", value: value}
|
||||
when .ident?
|
||||
@tokens << {type: "NameVariable", value: value}
|
||||
when .keyword?, .self?
|
||||
@tokens << {type: "NameKeyword", value: value}
|
||||
when .primitive_literal?
|
||||
@tokens << {type: "Literal", value: value}
|
||||
when .operator?
|
||||
@tokens << {type: "Operator", value: value}
|
||||
when Crystal::SyntaxHighlighter::TokenType::DELIMITED_TOKEN, Crystal::SyntaxHighlighter::TokenType::DELIMITER_START, Crystal::SyntaxHighlighter::TokenType::DELIMITER_END
|
||||
@tokens << {type: "LiteralString", value: value}
|
||||
else
|
||||
@tokens << {type: "Text", value: value}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
class CrystalTokenizer < Tartrazine::BaseTokenizer
|
||||
include Iterator(Token)
|
||||
@hl = CustomCrystalHighlighter.new
|
||||
@lexer : BaseLexer
|
||||
@iter : Iterator(Token)
|
||||
|
||||
# delegate next, to: @iter
|
||||
|
||||
def initialize(@lexer : BaseLexer, text : String, secondary = false)
|
||||
# Respect the `ensure_nl` config option
|
||||
if text.size > 0 && text[-1] != '\n' && @lexer.config[:ensure_nl] && !secondary
|
||||
text += "\n"
|
||||
end
|
||||
# Just do the tokenizing
|
||||
@hl.highlight(text)
|
||||
@iter = @hl.@tokens.each
|
||||
end
|
||||
|
||||
def next : Iterator::Stop | Token
|
||||
@iter.next
|
||||
end
|
||||
end
|
||||
|
||||
class CrystalLexer < BaseLexer
|
||||
def tokenizer(text : String, secondary = false) : BaseTokenizer
|
||||
CrystalTokenizer.new(self, text, secondary)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -17,7 +17,6 @@ module Tartrazine
|
||||
|
||||
abstract struct BaseRule
|
||||
abstract def match(text : Bytes, pos : Int32, tokenizer : Tokenizer) : Tuple(Bool, Int32, Array(Token))
|
||||
abstract def initialize(node : XML::Node)
|
||||
|
||||
@actions : Array(Action) = [] of Action
|
||||
|
||||
@ -40,9 +39,6 @@ module Tartrazine
|
||||
return true, pos + match[0].size, @actions.flat_map(&.emit(match, tokenizer))
|
||||
end
|
||||
|
||||
def initialize(node : XML::Node)
|
||||
end
|
||||
|
||||
def initialize(node : XML::Node, multiline, dotall, ignorecase)
|
||||
pattern = node["pattern"]
|
||||
pattern = "(?m)" + pattern if multiline
|
||||
|
@ -84,27 +84,6 @@ module Tartrazine
|
||||
|
||||
property styles = {} of String => Style
|
||||
|
||||
# Get the style for a token.
|
||||
def style(token)
|
||||
styles[token] = Style.new unless styles.has_key?(token)
|
||||
s = styles[token]
|
||||
|
||||
# We already got the data from the style hierarchy
|
||||
return s if s.complete?
|
||||
|
||||
# Form the hierarchy of parent styles
|
||||
parents = style_parents(token)
|
||||
|
||||
s = parents.map do |parent|
|
||||
styles[parent]
|
||||
end.reduce(s) do |acc, style|
|
||||
acc + style
|
||||
end
|
||||
s.complete = true
|
||||
styles[token] = s
|
||||
s
|
||||
end
|
||||
|
||||
def style_parents(token)
|
||||
parents = ["Background"]
|
||||
parts = token.underscore.split("_").map(&.capitalize)
|
||||
|
Reference in New Issue
Block a user