diff --git a/TODO.md b/TODO.md index 5d76eaf..c2bfeed 100644 --- a/TODO.md +++ b/TODO.md @@ -7,4 +7,5 @@ * ✅ Implement CLI * ✅ Implement lexer loader that respects aliases * ✅ Implement lexer loader by file extension +* ✅ Add --line-numbers to terminal formatter * Implement lexer loader by mime type \ No newline at end of file diff --git a/src/formatters/ansi.cr b/src/formatters/ansi.cr index 16d9e79..e37aa66 100644 --- a/src/formatters/ansi.cr +++ b/src/formatters/ansi.cr @@ -2,10 +2,16 @@ require "../formatter" module Tartrazine class Ansi < Formatter + property? line_numbers : Bool = false + def format(text : String, lexer : Lexer, theme : Theme) : String output = String.build do |outp| - lexer.tokenize(text).each do |token| - outp << self.colorize(token[:value], token[:type], theme) + lexer.group_tokens_in_lines(lexer.tokenize(text)).each_with_index do |line, i| + label = line_numbers? ? "#{i + 1}".rjust(4).ljust(5) : "" + outp << label + line.each do |token| + outp << colorize(token[:value], token[:type], theme) + end end end output diff --git a/src/formatters/html.cr b/src/formatters/html.cr index 40af672..b179531 100644 --- a/src/formatters/html.cr +++ b/src/formatters/html.cr @@ -37,7 +37,7 @@ module Tartrazine end def format_text(text : String, lexer : Lexer, theme : Theme) : String - lines = group_tokens_in_lines(lexer.tokenize(text)) + lines = lexer.group_tokens_in_lines(lexer.tokenize(text)) output = String.build do |outp| if surrounding_pre? pre_style = wrap_long_lines? ? "style=\"white-space: pre-wrap; word-break: break-word;\"" : "" @@ -101,28 +101,5 @@ module Tartrazine def highlighted?(line : Int) : Bool highlight_lines.any?(&.includes?(line)) end - - def group_tokens_in_lines(tokens : Array(Token)) : Array(Array(Token)) - split_tokens = [] of Token - tokens.each do |token| - if token[:value].includes?("\n") - values = token[:value].split("\n") - values.each_with_index do |value, index| - value += "\n" if index < values.size - 1 - split_tokens << {type: token[:type], value: value} - end - else - split_tokens << token - end - end - lines = [Array(Token).new] - split_tokens.each do |token| - lines.last << token - if token[:value].includes?("\n") - lines << Array(Token).new - end - end - lines - end end end diff --git a/src/lexer.cr b/src/lexer.cr index 9635cc2..63e5b7d 100644 --- a/src/lexer.cr +++ b/src/lexer.cr @@ -124,6 +124,30 @@ module Tartrazine result end + # Group tokens into lines, splitting them when a newline is found + def group_tokens_in_lines(tokens : Array(Token)) : Array(Array(Token)) + split_tokens = [] of Token + tokens.each do |token| + if token[:value].includes?("\n") + values = token[:value].split("\n") + values.each_with_index do |value, index| + value += "\n" if index < values.size - 1 + split_tokens << {type: token[:type], value: value} + end + else + split_tokens << token + end + end + lines = [Array(Token).new] + split_tokens.each do |token| + lines.last << token + if token[:value].includes?("\n") + lines << Array(Token).new + end + end + lines + end + # ameba:disable Metrics/CyclomaticComplexity def self.from_xml(xml : String) : Lexer l = Lexer.new diff --git a/src/main.cr b/src/main.cr index 3dc6439..414ced9 100644 --- a/src/main.cr +++ b/src/main.cr @@ -6,9 +6,11 @@ tartrazine: a syntax highlighting tool Usage: tartrazine (-h, --help) - tartrazine FILE -f html [-t theme][--standalone][--line-numbers][-l lexer] [-o output] + tartrazine FILE -f html [-t theme][--standalone][--line-numbers] + [-l lexer][-o output] tartrazine -f html -t theme --css - tartrazine FILE -f terminal [-t theme][-l lexer][-o output] + tartrazine FILE -f terminal [-t theme][-l lexer][--line-numbers] + [-o output] tartrazine FILE -f json [-o output] tartrazine --list-themes tartrazine --list-lexers @@ -61,6 +63,7 @@ if options["-f"] formatter.line_numbers = options["--line-numbers"] != nil when "terminal" formatter = Tartrazine::Ansi.new + formatter.line_numbers = options["--line-numbers"] != nil when "json" formatter = Tartrazine::Json.new else