mirror of
https://github.com/ralsina/tartrazine.git
synced 2024-11-10 05:22:23 +00:00
Compare commits
7 Commits
e0f697f1f9
...
9824431317
Author | SHA1 | Date | |
---|---|---|---|
9824431317 | |||
2ad3cde7f1 | |||
aa1044ed22 | |||
f0d6b01362 | |||
e1048abe33 | |||
d5581a356e | |||
916ab86f60 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -6,3 +6,4 @@
|
||||
chroma/
|
||||
pygments/
|
||||
shard.lock
|
||||
.vscode/
|
||||
|
@ -9,6 +9,8 @@ targets:
|
||||
main: src/main.cr
|
||||
|
||||
dependencies:
|
||||
baked_file_system:
|
||||
github: schovi/baked_file_system
|
||||
base58:
|
||||
github: crystal-china/base58.cr
|
||||
sixteen:
|
||||
|
@ -1,3 +1,9 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
# These are Lexer actions. When a rule matches, it will
|
||||
# perform a list of actions. These actions can emit tokens
|
||||
# or change the state machine.
|
||||
|
@ -1,6 +1,10 @@
|
||||
require "./constants.cr"
|
||||
require "./styles.cr"
|
||||
require "./tartrazine.cr"
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "colorize"
|
||||
|
||||
module Tartrazine
|
||||
# This is the base class for all formatters.
|
||||
@ -11,6 +15,63 @@ module Tartrazine
|
||||
raise Exception.new("Not implemented")
|
||||
end
|
||||
|
||||
def get_style_defs(theme : Theme) : String
|
||||
raise Exception.new("Not implemented")
|
||||
end
|
||||
end
|
||||
|
||||
class Ansi < Formatter
|
||||
def format(text : String, lexer : Lexer, theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
lexer.tokenize(text).each do |token|
|
||||
outp << self.colorize(token[:value], token[:type], theme)
|
||||
end
|
||||
end
|
||||
output
|
||||
end
|
||||
|
||||
def colorize(text : String, token : String, theme : Theme) : String
|
||||
style = theme.styles.fetch(token, nil)
|
||||
return text if style.nil?
|
||||
if theme.styles.has_key?(token)
|
||||
s = theme.styles[token]
|
||||
else
|
||||
# Themes don't contain information for each specific
|
||||
# token type. However, they may contain information
|
||||
# for a parent style. Worst case, we go to the root
|
||||
# (Background) style.
|
||||
s = theme.styles[theme.style_parents(token).reverse.find { |parent|
|
||||
theme.styles.has_key?(parent)
|
||||
}]
|
||||
end
|
||||
text.colorize(*rgb(s.color)).back(*rgb(s.background)).to_s
|
||||
end
|
||||
|
||||
def rgb(c : String?)
|
||||
return {0_u8, 0_u8, 0_u8} unless c
|
||||
r = c[0..1].to_u8(16)
|
||||
g = c[2..3].to_u8(16)
|
||||
b = c[4..5].to_u8(16)
|
||||
{r, g, b}
|
||||
end
|
||||
end
|
||||
|
||||
class Html < Formatter
|
||||
def format(text : String, lexer : Lexer, theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
outp << "<html><head><style>"
|
||||
outp << get_style_defs(theme)
|
||||
outp << "</style></head><body>"
|
||||
outp << "<pre class=\"#{get_css_class("Background", theme)}\"><code class=\"#{get_css_class("Background", theme)}\">"
|
||||
lexer.tokenize(text).each do |token|
|
||||
fragment = "<span class=\"#{get_css_class(token[:type], theme)}\">#{token[:value]}</span>"
|
||||
outp << fragment
|
||||
end
|
||||
outp << "</code></pre></body></html>"
|
||||
end
|
||||
output
|
||||
end
|
||||
|
||||
# ameba:disable Metrics/CyclomaticComplexity
|
||||
def get_style_defs(theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
@ -35,23 +96,6 @@ module Tartrazine
|
||||
end
|
||||
output
|
||||
end
|
||||
end
|
||||
|
||||
class Html < Formatter
|
||||
def format(text : String, lexer : Lexer, theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
outp << "<html><head><style>"
|
||||
outp << get_style_defs(theme)
|
||||
outp << "</style></head><body>"
|
||||
outp << "<pre class=\"#{get_css_class("Background", theme)}\"><code class=\"#{get_css_class("Background", theme)}\">"
|
||||
lexer.tokenize(text).each do |token|
|
||||
fragment = "<span class=\"#{get_css_class(token[:type], theme)}\">#{token[:value]}</span>"
|
||||
outp << fragment
|
||||
end
|
||||
outp << "</code></pre></body></html>"
|
||||
end
|
||||
output
|
||||
end
|
||||
|
||||
# Given a token type, return the CSS class to use.
|
||||
def get_css_class(token, theme)
|
||||
|
@ -2,4 +2,4 @@ require "./**"
|
||||
|
||||
lexer = Tartrazine.lexer("crystal")
|
||||
theme = Tartrazine.theme(ARGV[1])
|
||||
puts Tartrazine::Html.new.format(File.read(ARGV[0]), lexer, theme)
|
||||
puts Tartrazine::Ansi.new.format(File.read(ARGV[0]), lexer, theme)
|
||||
|
@ -1,4 +1,9 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
|
||||
# These are lexer rules. They match with the text being parsed
|
||||
# and perform actions, either emitting tokens or changing the
|
||||
|
@ -1,11 +1,21 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "sixteen"
|
||||
require "xml"
|
||||
|
||||
module Tartrazine
|
||||
def self.theme(name : String) : Theme
|
||||
return Theme.from_base16(name[7..]) if name.starts_with? "base16_"
|
||||
path = File.join("styles", "#{name}.xml")
|
||||
Theme.from_xml(File.read(path))
|
||||
Theme.from_xml(ThemeFiles.get("/#{name}.xml").gets_to_end)
|
||||
end
|
||||
|
||||
class ThemeFiles
|
||||
extend BakedFileSystem
|
||||
bake_folder "../styles", __DIR__
|
||||
end
|
||||
|
||||
class Style
|
||||
|
@ -1,5 +1,10 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "baked_file_system"
|
||||
require "base58"
|
||||
require "json"
|
||||
require "log"
|
||||
@ -33,6 +38,12 @@ module Tartrazine
|
||||
end
|
||||
end
|
||||
|
||||
class LexerFiles
|
||||
extend BakedFileSystem
|
||||
|
||||
bake_folder "../lexers", __DIR__
|
||||
end
|
||||
|
||||
# A token, the output of the tokenizer
|
||||
alias Token = NamedTuple(type: String, value: String)
|
||||
|
||||
@ -63,8 +74,6 @@ module Tartrazine
|
||||
tokens = [] of Token
|
||||
pos = 0
|
||||
matched = false
|
||||
time = 0
|
||||
count = 0
|
||||
|
||||
# Respect the `ensure_nl` config option
|
||||
if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself
|
||||
@ -74,22 +83,22 @@ module Tartrazine
|
||||
# Loop through the text, applying rules
|
||||
while pos < text.size
|
||||
state = states[@state_stack.last]
|
||||
Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
|
||||
# Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
|
||||
state.rules.each do |rule|
|
||||
matched, new_pos, new_tokens = rule.match(text, pos, self)
|
||||
if matched
|
||||
# Move position forward, save the tokens,
|
||||
# tokenize from the new position
|
||||
Log.trace { "MATCHED: #{rule.xml}" }
|
||||
# Log.trace { "MATCHED: #{rule.xml}" }
|
||||
pos = new_pos
|
||||
tokens += new_tokens
|
||||
break
|
||||
end
|
||||
Log.trace { "NOT MATCHED: #{rule.xml}" }
|
||||
# Log.trace { "NOT MATCHED: #{rule.xml}" }
|
||||
end
|
||||
# If no rule matches, emit an error token
|
||||
unless matched
|
||||
Log.trace { "Error at #{pos}" }
|
||||
# Log.trace { "Error at #{pos}" }
|
||||
tokens << {type: "Error", value: "#{text[pos]}"}
|
||||
pos += 1
|
||||
end
|
||||
@ -184,7 +193,7 @@ module Tartrazine
|
||||
end
|
||||
|
||||
def self.lexer(name : String) : Lexer
|
||||
Lexer.from_xml(File.read("lexers/#{name}.xml"))
|
||||
Lexer.from_xml(LexerFiles.get("/#{name}.xml").gets_to_end)
|
||||
end
|
||||
end
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user