This commit is contained in:
Roberto Alsina 2024-08-07 16:28:26 -03:00
parent 916ab86f60
commit d5581a356e
3 changed files with 19 additions and 9 deletions

View File

@ -9,6 +9,8 @@ targets:
main: src/main.cr main: src/main.cr
dependencies: dependencies:
baked_file_system:
github: schovi/baked_file_system
base58: base58:
github: crystal-china/base58.cr github: crystal-china/base58.cr
sixteen: sixteen:

View File

@ -4,8 +4,12 @@ require "xml"
module Tartrazine module Tartrazine
def self.theme(name : String) : Theme def self.theme(name : String) : Theme
return Theme.from_base16(name[7..]) if name.starts_with? "base16_" return Theme.from_base16(name[7..]) if name.starts_with? "base16_"
path = File.join("styles", "#{name}.xml") Theme.from_xml(ThemeFiles.get("/#{name}.xml").gets_to_end)
Theme.from_xml(File.read(path)) end
class ThemeFiles
extend BakedFileSystem
bake_folder "../styles", __DIR__
end end
class Style class Style

View File

@ -33,6 +33,12 @@ module Tartrazine
end end
end end
class LexerFiles
extend BakedFileSystem
bake_folder "../lexers", __DIR__
end
# A token, the output of the tokenizer # A token, the output of the tokenizer
alias Token = NamedTuple(type: String, value: String) alias Token = NamedTuple(type: String, value: String)
@ -63,8 +69,6 @@ module Tartrazine
tokens = [] of Token tokens = [] of Token
pos = 0 pos = 0
matched = false matched = false
time = 0
count = 0
# Respect the `ensure_nl` config option # Respect the `ensure_nl` config option
if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself
@ -74,22 +78,22 @@ module Tartrazine
# Loop through the text, applying rules # Loop through the text, applying rules
while pos < text.size while pos < text.size
state = states[@state_stack.last] state = states[@state_stack.last]
Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" } # Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
state.rules.each do |rule| state.rules.each do |rule|
matched, new_pos, new_tokens = rule.match(text, pos, self) matched, new_pos, new_tokens = rule.match(text, pos, self)
if matched if matched
# Move position forward, save the tokens, # Move position forward, save the tokens,
# tokenize from the new position # tokenize from the new position
Log.trace { "MATCHED: #{rule.xml}" } # Log.trace { "MATCHED: #{rule.xml}" }
pos = new_pos pos = new_pos
tokens += new_tokens tokens += new_tokens
break break
end end
Log.trace { "NOT MATCHED: #{rule.xml}" } # Log.trace { "NOT MATCHED: #{rule.xml}" }
end end
# If no rule matches, emit an error token # If no rule matches, emit an error token
unless matched unless matched
Log.trace { "Error at #{pos}" } # Log.trace { "Error at #{pos}" }
tokens << {type: "Error", value: "#{text[pos]}"} tokens << {type: "Error", value: "#{text[pos]}"}
pos += 1 pos += 1
end end
@ -184,7 +188,7 @@ module Tartrazine
end end
def self.lexer(name : String) : Lexer def self.lexer(name : String) : Lexer
Lexer.from_xml(File.read("lexers/#{name}.xml")) Lexer.from_xml(LexerFiles.get("/#{name}.xml").gets_to_end)
end end
end end