mirror of
https://github.com/ralsina/tartrazine.git
synced 2024-11-10 05:22:23 +00:00
0.5.1
This commit is contained in:
parent
5b0a1789dc
commit
f435d7df21
@ -1,5 +1,5 @@
|
|||||||
name: tartrazine
|
name: tartrazine
|
||||||
version: 0.4.0
|
version: 0.5.1
|
||||||
|
|
||||||
authors:
|
authors:
|
||||||
- Roberto Alsina <roberto.alsina@gmail.com>
|
- Roberto Alsina <roberto.alsina@gmail.com>
|
||||||
|
@ -14,7 +14,7 @@ module Tartrazine
|
|||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
outp = String::Builder.new("")
|
outp = String::Builder.new("")
|
||||||
format(text, lexer, outp)
|
format(text, lexer, outp)
|
||||||
return outp.to_s
|
outp.to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, outp : IO) : Nil
|
def format(text : String, lexer : Lexer, outp : IO) : Nil
|
||||||
|
@ -37,10 +37,9 @@ module Tartrazine
|
|||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
outp = String::Builder.new("")
|
outp = String::Builder.new("")
|
||||||
format(text, lexer, outp)
|
format(text, lexer, outp)
|
||||||
return outp.to_s
|
outp.to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
def format(text : String, lexer : Lexer, io : IO) : Nil
|
||||||
pre, post = wrap_standalone
|
pre, post = wrap_standalone
|
||||||
io << pre if standalone?
|
io << pre if standalone?
|
||||||
|
@ -7,10 +7,9 @@ module Tartrazine
|
|||||||
def format(text : String, lexer : Lexer) : String
|
def format(text : String, lexer : Lexer) : String
|
||||||
outp = String::Builder.new("")
|
outp = String::Builder.new("")
|
||||||
format(text, lexer, outp)
|
format(text, lexer, outp)
|
||||||
return outp.to_s
|
outp.to_s
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
||||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
def format(text : String, lexer : Lexer, io : IO) : Nil
|
||||||
tokenizer = Tokenizer.new(lexer, text)
|
tokenizer = Tokenizer.new(lexer, text)
|
||||||
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
|
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
|
||||||
|
Loading…
Reference in New Issue
Block a user