From d5581a356e5de212ba962c489494c1242d8cce72 Mon Sep 17 00:00:00 2001 From: Roberto Alsina Date: Wed, 7 Aug 2024 16:28:26 -0300 Subject: [PATCH] Baked fs --- shard.yml | 2 ++ src/styles.cr | 8 ++++++-- src/tartrazine.cr | 18 +++++++++++------- 3 files changed, 19 insertions(+), 9 deletions(-) diff --git a/shard.yml b/shard.yml index 946fac5..f4e79e9 100644 --- a/shard.yml +++ b/shard.yml @@ -9,6 +9,8 @@ targets: main: src/main.cr dependencies: + baked_file_system: + github: schovi/baked_file_system base58: github: crystal-china/base58.cr sixteen: diff --git a/src/styles.cr b/src/styles.cr index 7d7dfc8..8472e88 100644 --- a/src/styles.cr +++ b/src/styles.cr @@ -4,8 +4,12 @@ require "xml" module Tartrazine def self.theme(name : String) : Theme return Theme.from_base16(name[7..]) if name.starts_with? "base16_" - path = File.join("styles", "#{name}.xml") - Theme.from_xml(File.read(path)) + Theme.from_xml(ThemeFiles.get("/#{name}.xml").gets_to_end) + end + + class ThemeFiles + extend BakedFileSystem + bake_folder "../styles", __DIR__ end class Style diff --git a/src/tartrazine.cr b/src/tartrazine.cr index a6a1f03..a75c3c0 100644 --- a/src/tartrazine.cr +++ b/src/tartrazine.cr @@ -33,6 +33,12 @@ module Tartrazine end end + class LexerFiles + extend BakedFileSystem + + bake_folder "../lexers", __DIR__ + end + # A token, the output of the tokenizer alias Token = NamedTuple(type: String, value: String) @@ -63,8 +69,6 @@ module Tartrazine tokens = [] of Token pos = 0 matched = false - time = 0 - count = 0 # Respect the `ensure_nl` config option if text.size > 0 && text[-1] != '\n' && config[:ensure_nl] && !usingself @@ -74,22 +78,22 @@ module Tartrazine # Loop through the text, applying rules while pos < text.size state = states[@state_stack.last] - Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" } + # Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" } state.rules.each do |rule| matched, new_pos, new_tokens = rule.match(text, pos, self) if matched # Move position forward, save the tokens, # tokenize from the new position - Log.trace { "MATCHED: #{rule.xml}" } + # Log.trace { "MATCHED: #{rule.xml}" } pos = new_pos tokens += new_tokens break end - Log.trace { "NOT MATCHED: #{rule.xml}" } + # Log.trace { "NOT MATCHED: #{rule.xml}" } end # If no rule matches, emit an error token unless matched - Log.trace { "Error at #{pos}" } + # Log.trace { "Error at #{pos}" } tokens << {type: "Error", value: "#{text[pos]}"} pos += 1 end @@ -184,7 +188,7 @@ module Tartrazine end def self.lexer(name : String) : Lexer - Lexer.from_xml(File.read("lexers/#{name}.xml")) + Lexer.from_xml(LexerFiles.get("/#{name}.xml").gets_to_end) end end