Make formatter a bit more convenient

This commit is contained in:
Roberto Alsina 2024-08-19 11:20:08 -03:00
parent 4a598a575b
commit 57e63f2308
4 changed files with 4 additions and 4 deletions

View File

@ -12,7 +12,7 @@ module Tartrazine
property theme : Theme = Tartrazine.theme("default-dark") property theme : Theme = Tartrazine.theme("default-dark")
# Format the text using the given lexer. # Format the text using the given lexer.
def format(text : String, lexer : Lexer, io : IO?) : String? def format(text : String, lexer : Lexer, io : IO? = nil) : String?
raise Exception.new("Not implemented") raise Exception.new("Not implemented")
end end

View File

@ -11,7 +11,7 @@ module Tartrazine
"#{i + 1}".rjust(4).ljust(5) "#{i + 1}".rjust(4).ljust(5)
end end
def format(text : String, lexer : Lexer, io : IO?) : String? def format(text : String, lexer : Lexer, io : IO? = nil) : String?
outp = io.nil? ? String::Builder.new("") : io outp = io.nil? ? String::Builder.new("") : io
tokenizer = Tokenizer.new(lexer, text) tokenizer = Tokenizer.new(lexer, text)
i = 0 i = 0

View File

@ -34,7 +34,7 @@ module Tartrazine
@weight_of_bold : Int32 = 600) @weight_of_bold : Int32 = 600)
end end
def format(text : String, lexer : Lexer, io : IO?) : String? def format(text : String, lexer : Lexer, io : IO? = nil) : String?
outp = io.nil? ? String::Builder.new("") : io outp = io.nil? ? String::Builder.new("") : io
pre, post = wrap_standalone pre, post = wrap_standalone
outp << pre if standalone? outp << pre if standalone?

View File

@ -4,7 +4,7 @@ module Tartrazine
class Json < Formatter class Json < Formatter
property name = "json" property name = "json"
def format(text : String, lexer : Lexer, io : IO?) : String? def format(text : String, lexer : Lexer, io : IO? = nil) : String?
outp = io.nil? ? String::Builder.new("") : io outp = io.nil? ? String::Builder.new("") : io
tokenizer = Tokenizer.new(lexer, text) tokenizer = Tokenizer.new(lexer, text)
outp << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json outp << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json