Compare commits

..

7 Commits

Author SHA1 Message Date
9824431317 tweak includes 2024-08-07 17:47:11 -03:00
2ad3cde7f1 tweak includes 2024-08-07 17:10:20 -03:00
aa1044ed22 ANSI formatter 2024-08-07 17:00:50 -03:00
f0d6b01362 add requires 2024-08-07 16:44:05 -03:00
e1048abe33 add requires 2024-08-07 16:42:38 -03:00
d5581a356e Baked fs 2024-08-07 16:28:26 -03:00
916ab86f60 ignore bench 2024-08-07 15:37:47 -03:00
8 changed files with 107 additions and 30 deletions

1
.gitignore vendored
View File

@ -6,3 +6,4 @@
chroma/ chroma/
pygments/ pygments/
shard.lock shard.lock
.vscode/

View File

@ -9,6 +9,8 @@ targets:
main: src/main.cr main: src/main.cr
dependencies: dependencies:
baked_file_system:
github: schovi/baked_file_system
base58: base58:
github: crystal-china/base58.cr github: crystal-china/base58.cr
sixteen: sixteen:

View File

@ -1,3 +1,9 @@
require "./actions"
require "./constants"
require "./formatter"
require "./rules"
require "./styles"
require "./tartrazine"
# These are Lexer actions. When a rule matches, it will # These are Lexer actions. When a rule matches, it will
# perform a list of actions. These actions can emit tokens # perform a list of actions. These actions can emit tokens
# or change the state machine. # or change the state machine.

View File

@ -1,6 +1,10 @@
require "./constants.cr" require "./actions"
require "./styles.cr" require "./constants"
require "./tartrazine.cr" require "./formatter"
require "./rules"
require "./styles"
require "./tartrazine"
require "colorize"
module Tartrazine module Tartrazine
# This is the base class for all formatters. # This is the base class for all formatters.
@ -11,6 +15,63 @@ module Tartrazine
raise Exception.new("Not implemented") raise Exception.new("Not implemented")
end end
def get_style_defs(theme : Theme) : String
raise Exception.new("Not implemented")
end
end
class Ansi < Formatter
def format(text : String, lexer : Lexer, theme : Theme) : String
output = String.build do |outp|
lexer.tokenize(text).each do |token|
outp << self.colorize(token[:value], token[:type], theme)
end
end
output
end
def colorize(text : String, token : String, theme : Theme) : String
style = theme.styles.fetch(token, nil)
return text if style.nil?
if theme.styles.has_key?(token)
s = theme.styles[token]
else
# Themes don't contain information for each specific
# token type. However, they may contain information
# for a parent style. Worst case, we go to the root
# (Background) style.
s = theme.styles[theme.style_parents(token).reverse.find { |parent|
theme.styles.has_key?(parent)
}]
end
text.colorize(*rgb(s.color)).back(*rgb(s.background)).to_s
end
def rgb(c : String?)
return {0_u8, 0_u8, 0_u8} unless c
r = c[0..1].to_u8(16)
g = c[2..3].to_u8(16)
b = c[4..5].to_u8(16)
{r, g, b}
end
end
class Html < Formatter
def format(text : String, lexer : Lexer, theme : Theme) : String
output = String.build do |outp|
outp << "<html><head><style>"
outp << get_style_defs(theme)
outp << "</style></head><body>"
outp << "<pre class=\"#{get_css_class("Background", theme)}\"><code class=\"#{get_css_class("Background", theme)}\">"
lexer.tokenize(text).each do |token|
fragment = "<span class=\"#{get_css_class(token[:type], theme)}\">#{token[:value]}</span>"
outp << fragment
end
outp << "</code></pre></body></html>"
end
output
end
# ameba:disable Metrics/CyclomaticComplexity # ameba:disable Metrics/CyclomaticComplexity
def get_style_defs(theme : Theme) : String def get_style_defs(theme : Theme) : String
output = String.build do |outp| output = String.build do |outp|
@ -35,23 +96,6 @@ module Tartrazine
end end
output output
end end
end
class Html < Formatter
def format(text : String, lexer : Lexer, theme : Theme) : String
output = String.build do |outp|
outp << "<html><head><style>"
outp << get_style_defs(theme)
outp << "</style></head><body>"
outp << "<pre class=\"#{get_css_class("Background", theme)}\"><code class=\"#{get_css_class("Background", theme)}\">"
lexer.tokenize(text).each do |token|
fragment = "<span class=\"#{get_css_class(token[:type], theme)}\">#{token[:value]}</span>"
outp << fragment
end
outp << "</code></pre></body></html>"
end
output
end
# Given a token type, return the CSS class to use. # Given a token type, return the CSS class to use.
def get_css_class(token, theme) def get_css_class(token, theme)

View File

@ -2,4 +2,4 @@ require "./**"
lexer = Tartrazine.lexer("crystal") lexer = Tartrazine.lexer("crystal")
theme = Tartrazine.theme(ARGV[1]) theme = Tartrazine.theme(ARGV[1])
puts Tartrazine::Html.new.format(File.read(ARGV[0]), lexer, theme) puts Tartrazine::Ansi.new.format(File.read(ARGV[0]), lexer, theme)

View File

@ -1,4 +1,9 @@
require "./actions" require "./actions"
require "./constants"
require "./formatter"
require "./rules"
require "./styles"
require "./tartrazine"
# These are lexer rules. They match with the text being parsed # These are lexer rules. They match with the text being parsed
# and perform actions, either emitting tokens or changing the # and perform actions, either emitting tokens or changing the

View File

@ -1,11 +1,21 @@
require "./actions"
require "./constants"
require "./formatter"
require "./rules"
require "./styles"
require "./tartrazine"
require "sixteen" require "sixteen"
require "xml" require "xml"
module Tartrazine module Tartrazine
def self.theme(name : String) : Theme def self.theme(name : String) : Theme
return Theme.from_base16(name[7..]) if name.starts_with? "base16_" return Theme.from_base16(name[7..]) if name.starts_with? "base16_"
path = File.join("styles", "#{name}.xml") Theme.from_xml(ThemeFiles.get("/#{name}.xml").gets_to_end)
Theme.from_xml(File.read(path)) end
class ThemeFiles
extend BakedFileSystem
bake_folder "../styles", __DIR__
end end
class Style class Style

View File

@ -1,5 +1,10 @@
require "./actions" require "./actions"
require "./constants"
require "./formatter"
require "./rules" require "./rules"
require "./styles"
require "./tartrazine"
require "baked_file_system"
require "base58" require "base58"
require "json" require "json"
require "log" require "log"
@ -33,6 +38,12 @@ module Tartrazine
end end
end end
class LexerFiles
extend BakedFileSystem
bake_folder "../lexers", __DIR__
end
# A token, the output of the tokenizer # A token, the output of the tokenizer
alias Token = NamedTuple(type: String, value: String) alias Token = NamedTuple(type: String, value: String)
@ -63,8 +74,6 @@ module Tartrazine
tokens = [] of Token tokens = [] of Token
pos = 0 pos = 0
matched = false matched = false
time = 0
count = 0
# Respect the `ensure_nl` config option # Respect the `ensure_nl` config option
if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself
@ -74,22 +83,22 @@ module Tartrazine
# Loop through the text, applying rules # Loop through the text, applying rules
while pos < text.size while pos < text.size
state = states[@state_stack.last] state = states[@state_stack.last]
Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" } # Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
state.rules.each do |rule| state.rules.each do |rule|
matched, new_pos, new_tokens = rule.match(text, pos, self) matched, new_pos, new_tokens = rule.match(text, pos, self)
if matched if matched
# Move position forward, save the tokens, # Move position forward, save the tokens,
# tokenize from the new position # tokenize from the new position
Log.trace { "MATCHED: #{rule.xml}" } # Log.trace { "MATCHED: #{rule.xml}" }
pos = new_pos pos = new_pos
tokens += new_tokens tokens += new_tokens
break break
end end
Log.trace { "NOT MATCHED: #{rule.xml}" } # Log.trace { "NOT MATCHED: #{rule.xml}" }
end end
# If no rule matches, emit an error token # If no rule matches, emit an error token
unless matched unless matched
Log.trace { "Error at #{pos}" } # Log.trace { "Error at #{pos}" }
tokens << {type: "Error", value: "#{text[pos]}"} tokens << {type: "Error", value: "#{text[pos]}"}
pos += 1 pos += 1
end end
@ -184,7 +193,7 @@ module Tartrazine
end end
def self.lexer(name : String) : Lexer def self.lexer(name : String) : Lexer
Lexer.from_xml(File.read("lexers/#{name}.xml")) Lexer.from_xml(LexerFiles.get("/#{name}.xml").gets_to_end)
end end
end end