mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-06-08 12:40:25 -03:00
Compare commits
No commits in common. "357b121af1f3744b0e74b50b172fcbac44e3599a" and "ae03e4612ec58001fcb32a9f337b968d408d6349" have entirely different histories.
357b121af1
...
ae03e4612e
@ -63,9 +63,7 @@ require "tartrazine"
|
|||||||
|
|
||||||
lexer = Tartrazine.lexer("crystal")
|
lexer = Tartrazine.lexer("crystal")
|
||||||
theme = Tartrazine.theme("catppuccin-macchiato")
|
theme = Tartrazine.theme("catppuccin-macchiato")
|
||||||
formatter = Tartrazine::Html.new
|
puts Tartrazine::Html.new.format(File.read(ARGV[0]), lexer, theme)
|
||||||
formatter.theme = theme
|
|
||||||
puts formatter.format(File.read(ARGV[0]), lexer)
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
name: tartrazine
|
name: tartrazine
|
||||||
version: 0.5.1
|
version: 0.4.0
|
||||||
|
|
||||||
authors:
|
authors:
|
||||||
- Roberto Alsina <roberto.alsina@gmail.com>
|
- Roberto Alsina <roberto.alsina@gmail.com>
|
||||||
|
@ -12,10 +12,6 @@ module Tartrazine
|
|||||||
property theme : Theme = Tartrazine.theme("default-dark")
|
property theme : Theme = Tartrazine.theme("default-dark")
|
||||||
|
|
||||||
# Format the text using the given lexer.
|
# Format the text using the given lexer.
|
||||||
def format(text : String, lexer : Lexer, io : IO = nil) : Nil
|
|
||||||
raise Exception.new("Not implemented")
|
|
||||||
end
|
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
raise Exception.new("Not implemented")
|
raise Exception.new("Not implemented")
|
||||||
end
|
end
|
||||||
|
@ -12,14 +12,9 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
outp = String::Builder.new("")
|
|
||||||
format(text, lexer, outp)
|
|
||||||
outp.to_s
|
|
||||||
end
|
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, outp : IO) : Nil
|
|
||||||
tokenizer = Tokenizer.new(lexer, text)
|
tokenizer = Tokenizer.new(lexer, text)
|
||||||
i = 0
|
i = 0
|
||||||
|
output = String.build do |outp|
|
||||||
outp << line_label(i) if line_numbers?
|
outp << line_label(i) if line_numbers?
|
||||||
tokenizer.each do |token|
|
tokenizer.each do |token|
|
||||||
outp << colorize(token[:value], token[:type])
|
outp << colorize(token[:value], token[:type])
|
||||||
@ -29,6 +24,21 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
output
|
||||||
|
end
|
||||||
|
|
||||||
|
# def format(text : String, lexer : Lexer) : String
|
||||||
|
# output = String.build do |outp|
|
||||||
|
# lexer.group_tokens_in_lines(lexer.tokenize(text)).each_with_index do |line, i|
|
||||||
|
# label = line_numbers? ? "#{i + 1}".rjust(4).ljust(5) : ""
|
||||||
|
# outp << label
|
||||||
|
# line.each do |token|
|
||||||
|
# outp << colorize(token[:value], token[:type])
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
# end
|
||||||
|
# output
|
||||||
|
# end
|
||||||
|
|
||||||
def colorize(text : String, token : String) : String
|
def colorize(text : String, token : String) : String
|
||||||
style = theme.styles.fetch(token, nil)
|
style = theme.styles.fetch(token, nil)
|
||||||
|
@ -35,26 +35,23 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
outp = String::Builder.new("")
|
text = format_text(text, lexer)
|
||||||
format(text, lexer, outp)
|
if standalone?
|
||||||
outp.to_s
|
text = wrap_standalone(text)
|
||||||
end
|
end
|
||||||
|
text
|
||||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
|
||||||
pre, post = wrap_standalone
|
|
||||||
io << pre if standalone?
|
|
||||||
format_text(text, lexer, io)
|
|
||||||
io << post if standalone?
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# Wrap text into a full HTML document, including the CSS for the theme
|
# Wrap text into a full HTML document, including the CSS for the theme
|
||||||
def wrap_standalone
|
def wrap_standalone(text) : String
|
||||||
output = String.build do |outp|
|
output = String.build do |outp|
|
||||||
outp << "<!DOCTYPE html><html><head><style>"
|
outp << "<!DOCTYPE html><html><head><style>"
|
||||||
outp << style_defs
|
outp << style_defs
|
||||||
outp << "</style></head><body>"
|
outp << "</style></head><body>"
|
||||||
|
outp << text
|
||||||
|
outp << "</body></html>"
|
||||||
end
|
end
|
||||||
{output.to_s, "</body></html>"}
|
output
|
||||||
end
|
end
|
||||||
|
|
||||||
private def line_label(i : Int32) : String
|
private def line_label(i : Int32) : String
|
||||||
@ -64,9 +61,11 @@ module Tartrazine
|
|||||||
"<span #{line_id} #{line_class} style=\"user-select: none;\">#{line_label} </span>"
|
"<span #{line_id} #{line_class} style=\"user-select: none;\">#{line_label} </span>"
|
||||||
end
|
end
|
||||||
|
|
||||||
def format_text(text : String, lexer : Lexer, outp : IO)
|
def format_text(text : String, lexer : Lexer) : String
|
||||||
|
# lines = lexer.group_tokens_in_lines(lexer.tokenize(text))
|
||||||
tokenizer = Tokenizer.new(lexer, text)
|
tokenizer = Tokenizer.new(lexer, text)
|
||||||
i = 0
|
i = 0
|
||||||
|
output = String.build do |outp|
|
||||||
if surrounding_pre?
|
if surrounding_pre?
|
||||||
pre_style = wrap_long_lines? ? "style=\"white-space: pre-wrap; word-break: break-word;\"" : ""
|
pre_style = wrap_long_lines? ? "style=\"white-space: pre-wrap; word-break: break-word;\"" : ""
|
||||||
outp << "<pre class=\"#{get_css_class("Background")}\" #{pre_style}>"
|
outp << "<pre class=\"#{get_css_class("Background")}\" #{pre_style}>"
|
||||||
@ -82,6 +81,8 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
outp << "</code></pre>"
|
outp << "</code></pre>"
|
||||||
end
|
end
|
||||||
|
output
|
||||||
|
end
|
||||||
|
|
||||||
# ameba:disable Metrics/CyclomaticComplexity
|
# ameba:disable Metrics/CyclomaticComplexity
|
||||||
def style_defs : String
|
def style_defs : String
|
||||||
|
@ -4,15 +4,8 @@ module Tartrazine
|
|||||||
class Json < Formatter
|
class Json < Formatter
|
||||||
property name = "json"
|
property name = "json"
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer, _theme : Theme) : String
|
||||||
outp = String::Builder.new("")
|
lexer.tokenize(text).to_json
|
||||||
format(text, lexer, outp)
|
|
||||||
outp.to_s
|
|
||||||
end
|
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
|
||||||
tokenizer = Tokenizer.new(lexer, text)
|
|
||||||
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -85,11 +85,13 @@ if options["-f"]
|
|||||||
lexer = Tartrazine.lexer(name: options["-l"].as(String), filename: options["FILE"].as(String))
|
lexer = Tartrazine.lexer(name: options["-l"].as(String), filename: options["FILE"].as(String))
|
||||||
|
|
||||||
input = File.open(options["FILE"].as(String)).gets_to_end
|
input = File.open(options["FILE"].as(String)).gets_to_end
|
||||||
|
output = formatter.format(input, lexer)
|
||||||
|
|
||||||
if options["-o"].nil?
|
if options["-o"].nil?
|
||||||
outf = STDOUT
|
puts output
|
||||||
else
|
else
|
||||||
outf = File.open(options["-o"].as(String), "w")
|
File.open(options["-o"].as(String), "w") do |outf|
|
||||||
|
outf << output
|
||||||
|
end
|
||||||
end
|
end
|
||||||
formatter.format(input, lexer, outf)
|
|
||||||
end
|
end
|
||||||
|
Loading…
x
Reference in New Issue
Block a user