mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-09-17 10:48:12 +00:00
Compare commits
2 Commits
v0.9.0
...
357b121af1
Author | SHA1 | Date | |
---|---|---|---|
357b121af1 | |||
91b973f464 |
@@ -63,7 +63,9 @@ require "tartrazine"
|
||||
|
||||
lexer = Tartrazine.lexer("crystal")
|
||||
theme = Tartrazine.theme("catppuccin-macchiato")
|
||||
puts Tartrazine::Html.new.format(File.read(ARGV[0]), lexer, theme)
|
||||
formatter = Tartrazine::Html.new
|
||||
formatter.theme = theme
|
||||
puts formatter.format(File.read(ARGV[0]), lexer)
|
||||
```
|
||||
|
||||
## Contributing
|
||||
@@ -76,4 +78,4 @@ puts Tartrazine::Html.new.format(File.read(ARGV[0]), lexer, theme)
|
||||
|
||||
## Contributors
|
||||
|
||||
- [Roberto Alsina](https://github.com/ralsina) - creator and maintainer
|
||||
- [Roberto Alsina](https://github.com/ralsina) - creator and maintainer
|
||||
|
@@ -1,5 +1,5 @@
|
||||
name: tartrazine
|
||||
version: 0.4.0
|
||||
version: 0.5.1
|
||||
|
||||
authors:
|
||||
- Roberto Alsina <roberto.alsina@gmail.com>
|
||||
|
@@ -14,7 +14,7 @@ module Tartrazine
|
||||
def format(text : String, lexer : Lexer) : String
|
||||
outp = String::Builder.new("")
|
||||
format(text, lexer, outp)
|
||||
return outp.to_s
|
||||
outp.to_s
|
||||
end
|
||||
|
||||
def format(text : String, lexer : Lexer, outp : IO) : Nil
|
||||
|
@@ -37,10 +37,9 @@ module Tartrazine
|
||||
def format(text : String, lexer : Lexer) : String
|
||||
outp = String::Builder.new("")
|
||||
format(text, lexer, outp)
|
||||
return outp.to_s
|
||||
outp.to_s
|
||||
end
|
||||
|
||||
|
||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
||||
pre, post = wrap_standalone
|
||||
io << pre if standalone?
|
||||
|
@@ -7,10 +7,9 @@ module Tartrazine
|
||||
def format(text : String, lexer : Lexer) : String
|
||||
outp = String::Builder.new("")
|
||||
format(text, lexer, outp)
|
||||
return outp.to_s
|
||||
outp.to_s
|
||||
end
|
||||
|
||||
|
||||
def format(text : String, lexer : Lexer, io : IO) : Nil
|
||||
tokenizer = Tokenizer.new(lexer, text)
|
||||
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
|
||||
|
Reference in New Issue
Block a user