mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-06-06 11:40:26 -03:00
Added --line-numbers for the terminal formatter
This commit is contained in:
parent
e3a1ce37b4
commit
ec8c53c823
1
TODO.md
1
TODO.md
@ -7,4 +7,5 @@
|
|||||||
* ✅ Implement CLI
|
* ✅ Implement CLI
|
||||||
* ✅ Implement lexer loader that respects aliases
|
* ✅ Implement lexer loader that respects aliases
|
||||||
* ✅ Implement lexer loader by file extension
|
* ✅ Implement lexer loader by file extension
|
||||||
|
* ✅ Add --line-numbers to terminal formatter
|
||||||
* Implement lexer loader by mime type
|
* Implement lexer loader by mime type
|
@ -2,10 +2,16 @@ require "../formatter"
|
|||||||
|
|
||||||
module Tartrazine
|
module Tartrazine
|
||||||
class Ansi < Formatter
|
class Ansi < Formatter
|
||||||
|
property? line_numbers : Bool = false
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, theme : Theme) : String
|
def format(text : String, lexer : Lexer, theme : Theme) : String
|
||||||
output = String.build do |outp|
|
output = String.build do |outp|
|
||||||
lexer.tokenize(text).each do |token|
|
lexer.group_tokens_in_lines(lexer.tokenize(text)).each_with_index do |line, i|
|
||||||
outp << self.colorize(token[:value], token[:type], theme)
|
label = line_numbers? ? "#{i + 1}".rjust(4).ljust(5) : ""
|
||||||
|
outp << label
|
||||||
|
line.each do |token|
|
||||||
|
outp << colorize(token[:value], token[:type], theme)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
output
|
output
|
||||||
|
@ -37,7 +37,7 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
|
|
||||||
def format_text(text : String, lexer : Lexer, theme : Theme) : String
|
def format_text(text : String, lexer : Lexer, theme : Theme) : String
|
||||||
lines = group_tokens_in_lines(lexer.tokenize(text))
|
lines = lexer.group_tokens_in_lines(lexer.tokenize(text))
|
||||||
output = String.build do |outp|
|
output = String.build do |outp|
|
||||||
if surrounding_pre?
|
if surrounding_pre?
|
||||||
pre_style = wrap_long_lines? ? "style=\"white-space: pre-wrap; word-break: break-word;\"" : ""
|
pre_style = wrap_long_lines? ? "style=\"white-space: pre-wrap; word-break: break-word;\"" : ""
|
||||||
@ -101,28 +101,5 @@ module Tartrazine
|
|||||||
def highlighted?(line : Int) : Bool
|
def highlighted?(line : Int) : Bool
|
||||||
highlight_lines.any?(&.includes?(line))
|
highlight_lines.any?(&.includes?(line))
|
||||||
end
|
end
|
||||||
|
|
||||||
def group_tokens_in_lines(tokens : Array(Token)) : Array(Array(Token))
|
|
||||||
split_tokens = [] of Token
|
|
||||||
tokens.each do |token|
|
|
||||||
if token[:value].includes?("\n")
|
|
||||||
values = token[:value].split("\n")
|
|
||||||
values.each_with_index do |value, index|
|
|
||||||
value += "\n" if index < values.size - 1
|
|
||||||
split_tokens << {type: token[:type], value: value}
|
|
||||||
end
|
|
||||||
else
|
|
||||||
split_tokens << token
|
|
||||||
end
|
|
||||||
end
|
|
||||||
lines = [Array(Token).new]
|
|
||||||
split_tokens.each do |token|
|
|
||||||
lines.last << token
|
|
||||||
if token[:value].includes?("\n")
|
|
||||||
lines << Array(Token).new
|
|
||||||
end
|
|
||||||
end
|
|
||||||
lines
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
24
src/lexer.cr
24
src/lexer.cr
@ -124,6 +124,30 @@ module Tartrazine
|
|||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Group tokens into lines, splitting them when a newline is found
|
||||||
|
def group_tokens_in_lines(tokens : Array(Token)) : Array(Array(Token))
|
||||||
|
split_tokens = [] of Token
|
||||||
|
tokens.each do |token|
|
||||||
|
if token[:value].includes?("\n")
|
||||||
|
values = token[:value].split("\n")
|
||||||
|
values.each_with_index do |value, index|
|
||||||
|
value += "\n" if index < values.size - 1
|
||||||
|
split_tokens << {type: token[:type], value: value}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
split_tokens << token
|
||||||
|
end
|
||||||
|
end
|
||||||
|
lines = [Array(Token).new]
|
||||||
|
split_tokens.each do |token|
|
||||||
|
lines.last << token
|
||||||
|
if token[:value].includes?("\n")
|
||||||
|
lines << Array(Token).new
|
||||||
|
end
|
||||||
|
end
|
||||||
|
lines
|
||||||
|
end
|
||||||
|
|
||||||
# ameba:disable Metrics/CyclomaticComplexity
|
# ameba:disable Metrics/CyclomaticComplexity
|
||||||
def self.from_xml(xml : String) : Lexer
|
def self.from_xml(xml : String) : Lexer
|
||||||
l = Lexer.new
|
l = Lexer.new
|
||||||
|
@ -6,9 +6,11 @@ tartrazine: a syntax highlighting tool
|
|||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
tartrazine (-h, --help)
|
tartrazine (-h, --help)
|
||||||
tartrazine FILE -f html [-t theme][--standalone][--line-numbers][-l lexer] [-o output]
|
tartrazine FILE -f html [-t theme][--standalone][--line-numbers]
|
||||||
|
[-l lexer][-o output]
|
||||||
tartrazine -f html -t theme --css
|
tartrazine -f html -t theme --css
|
||||||
tartrazine FILE -f terminal [-t theme][-l lexer][-o output]
|
tartrazine FILE -f terminal [-t theme][-l lexer][--line-numbers]
|
||||||
|
[-o output]
|
||||||
tartrazine FILE -f json [-o output]
|
tartrazine FILE -f json [-o output]
|
||||||
tartrazine --list-themes
|
tartrazine --list-themes
|
||||||
tartrazine --list-lexers
|
tartrazine --list-lexers
|
||||||
@ -61,6 +63,7 @@ if options["-f"]
|
|||||||
formatter.line_numbers = options["--line-numbers"] != nil
|
formatter.line_numbers = options["--line-numbers"] != nil
|
||||||
when "terminal"
|
when "terminal"
|
||||||
formatter = Tartrazine::Ansi.new
|
formatter = Tartrazine::Ansi.new
|
||||||
|
formatter.line_numbers = options["--line-numbers"] != nil
|
||||||
when "json"
|
when "json"
|
||||||
formatter = Tartrazine::Json.new
|
formatter = Tartrazine::Json.new
|
||||||
else
|
else
|
||||||
|
Loading…
x
Reference in New Issue
Block a user