mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-09-07 22:33:09 +00:00
Compare commits
16 Commits
e38feb0736
...
v0.1.1
Author | SHA1 | Date | |
---|---|---|---|
a0ff4e0118 | |||
ece3d4163a | |||
3180168261 | |||
5c074344d5 | |||
d3439563f2 | |||
8167af78f0 | |||
ba50934005 | |||
d293ec8d76 | |||
b43501da98 | |||
9824431317 | |||
2ad3cde7f1 | |||
aa1044ed22 | |||
f0d6b01362 | |||
e1048abe33 | |||
d5581a356e | |||
916ab86f60 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -6,3 +6,4 @@
|
||||
chroma/
|
||||
pygments/
|
||||
shard.lock
|
||||
.vscode/
|
||||
|
15
Dockerfile.static
Normal file
15
Dockerfile.static
Normal file
@@ -0,0 +1,15 @@
|
||||
FROM --platform=${TARGETPLATFORM:-linux/amd64} alpine:3.20 AS build
|
||||
RUN apk add --no-cache \
|
||||
crystal \
|
||||
shards \
|
||||
yaml-dev \
|
||||
yaml-static \
|
||||
openssl-dev \
|
||||
openssl-libs-static \
|
||||
libxml2-dev \
|
||||
libxml2-static \
|
||||
zlib-dev \
|
||||
zlib-static \
|
||||
xz-dev \
|
||||
xz-static \
|
||||
make
|
7
Makefile
Normal file
7
Makefile
Normal file
@@ -0,0 +1,7 @@
|
||||
build: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
||||
shards build -Dstrict_multi_assign -Dno_number_autocast
|
||||
release: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
||||
shards build --release
|
||||
static: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
||||
shards build --release --static
|
||||
strip bin/tartrazine
|
16
build_static.sh
Executable file
16
build_static.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
docker run --rm --privileged \
|
||||
multiarch/qemu-user-static \
|
||||
--reset -p yes
|
||||
|
||||
# Build for AMD64
|
||||
docker build . -f Dockerfile.static -t tartrazine-builder
|
||||
docker run -ti --rm -v "$PWD":/app --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && make static"
|
||||
mv bin/tartrazine bin/tartrazine-static-linux-amd64
|
||||
|
||||
# Build for ARM64
|
||||
docker build . -f Dockerfile.static --platform linux/arm64 -t tartrazine-builder
|
||||
docker run -ti --rm -v "$PWD":/app --platform linux/arm64 --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && make static"
|
||||
mv bin/tartrazine bin/tartrazine-static-linux-arm64
|
@@ -1,5 +1,5 @@
|
||||
name: tartrazine
|
||||
version: 0.1.0
|
||||
version: 0.1.1
|
||||
|
||||
authors:
|
||||
- Roberto Alsina <roberto.alsina@gmail.com>
|
||||
@@ -9,13 +9,12 @@ targets:
|
||||
main: src/main.cr
|
||||
|
||||
dependencies:
|
||||
baked_file_system:
|
||||
github: schovi/baked_file_system
|
||||
base58:
|
||||
github: crystal-china/base58.cr
|
||||
sixteen:
|
||||
github: ralsina/sixteen
|
||||
branch: main
|
||||
cre2:
|
||||
git: "https://git.ralsina.me/ralsina/cre2.git"
|
||||
|
||||
crystal: ">= 1.13.0"
|
||||
|
||||
|
@@ -1,4 +1,9 @@
|
||||
require "xml"
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
|
||||
# These are Lexer actions. When a rule matches, it will
|
||||
# perform a list of actions. These actions can emit tokens
|
||||
@@ -26,12 +31,11 @@ module Tartrazine
|
||||
end
|
||||
|
||||
# ameba:disable Metrics/CyclomaticComplexity
|
||||
def emit(match : MatchData,
|
||||
lexer : Lexer, match_group = 0) : Array(Token)
|
||||
def emit(match : Regex::MatchData?, lexer : Lexer, match_group = 0) : Array(Token)
|
||||
case type
|
||||
when "token"
|
||||
raise Exception.new "Can't have a token without a match" if match.nil? || match[0].size == 0
|
||||
[Token.new(type: xml["type"], value: match[0])]
|
||||
raise Exception.new "Can't have a token without a match" if match.nil?
|
||||
[Token.new(type: xml["type"], value: match[match_group])]
|
||||
when "push"
|
||||
states_to_push = xml.attributes.select { |attrib|
|
||||
attrib.name == "state"
|
||||
@@ -64,37 +68,35 @@ module Tartrazine
|
||||
when "bygroups"
|
||||
# FIXME: handle
|
||||
# ><bygroups>
|
||||
# <token type="Punctuation"/>https://github.com/google/re2/wiki/Syntax
|
||||
# <token type="Punctuation"/>
|
||||
# None
|
||||
# <token type="LiteralStringRegex"/>
|
||||
#
|
||||
# where that None means skipping a group
|
||||
#
|
||||
raise Exception.new "Can't have a bygroups without a match" if match.nil? || match[0].size == 0
|
||||
raise Exception.new "Can't have a token without a match" if match.nil?
|
||||
|
||||
# Each group matches an action. If the group match is empty,
|
||||
# the action is skipped.
|
||||
result = [] of Token
|
||||
@actions.each_with_index do |e, i|
|
||||
next if match[i].size == 0
|
||||
result += e.emit(match, lexer, i)
|
||||
next if match[i + 1]?.nil?
|
||||
result += e.emit(match, lexer, i + 1)
|
||||
end
|
||||
result
|
||||
when "using"
|
||||
# Shunt to another lexer entirely
|
||||
return [] of Token if match.nil? || match[0].size == 0
|
||||
return [] of Token if match.nil?
|
||||
lexer_name = xml["lexer"].downcase
|
||||
# Log.trace { "to tokenize: #{match[match_group]}" }
|
||||
to_tokenize = match[match_group]
|
||||
Tartrazine.lexer(lexer_name).tokenize(to_tokenize, usingself: true)
|
||||
Log.trace { "to tokenize: #{match[match_group]}" }
|
||||
Tartrazine.lexer(lexer_name).tokenize(match[match_group], usingself: true)
|
||||
when "usingself"
|
||||
# Shunt to another copy of this lexer
|
||||
return [] of Token if match.nil? || match[0].size == 0
|
||||
return [] of Token if match.nil?
|
||||
|
||||
new_lexer = Lexer.from_xml(lexer.xml)
|
||||
# Log.trace { "to tokenize: #{match[match_group]}" }
|
||||
to_tokenize = match[match_group]
|
||||
new_lexer.tokenize(to_tokenize, usingself: true)
|
||||
Log.trace { "to tokenize: #{match[match_group]}" }
|
||||
new_lexer.tokenize(match[match_group], usingself: true)
|
||||
when "combined"
|
||||
# Combine two states into one anonymous state
|
||||
states = xml.attributes.select { |attrib|
|
||||
|
@@ -1,6 +1,10 @@
|
||||
require "./constants.cr"
|
||||
require "./styles.cr"
|
||||
require "./tartrazine.cr"
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "colorize"
|
||||
|
||||
module Tartrazine
|
||||
# This is the base class for all formatters.
|
||||
@@ -11,30 +15,43 @@ module Tartrazine
|
||||
raise Exception.new("Not implemented")
|
||||
end
|
||||
|
||||
# ameba:disable Metrics/CyclomaticComplexity
|
||||
def get_style_defs(theme : Theme) : String
|
||||
raise Exception.new("Not implemented")
|
||||
end
|
||||
end
|
||||
|
||||
class Ansi < Formatter
|
||||
def format(text : String, lexer : Lexer, theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
theme.styles.each do |token, style|
|
||||
outp << ".#{get_css_class(token, theme)} {"
|
||||
# These are set or nil
|
||||
outp << "color: #{style.color};" if style.color
|
||||
outp << "background-color: #{style.background};" if style.background
|
||||
outp << "border: 1px solid #{style.border};" if style.border
|
||||
|
||||
# These are true/false/nil
|
||||
outp << "border: none;" if style.border == false
|
||||
outp << "font-weight: bold;" if style.bold
|
||||
outp << "font-weight: 400;" if style.bold == false
|
||||
outp << "font-style: italic;" if style.italic
|
||||
outp << "font-style: normal;" if style.italic == false
|
||||
outp << "text-decoration: underline;" if style.underline
|
||||
outp << "text-decoration: none;" if style.underline == false
|
||||
|
||||
outp << "}"
|
||||
lexer.tokenize(text).each do |token|
|
||||
outp << self.colorize(token[:value], token[:type], theme)
|
||||
end
|
||||
end
|
||||
output
|
||||
end
|
||||
|
||||
def colorize(text : String, token : String, theme : Theme) : String
|
||||
style = theme.styles.fetch(token, nil)
|
||||
return text if style.nil?
|
||||
if theme.styles.has_key?(token)
|
||||
s = theme.styles[token]
|
||||
else
|
||||
# Themes don't contain information for each specific
|
||||
# token type. However, they may contain information
|
||||
# for a parent style. Worst case, we go to the root
|
||||
# (Background) style.
|
||||
s = theme.styles[theme.style_parents(token).reverse.find { |parent|
|
||||
theme.styles.has_key?(parent)
|
||||
}]
|
||||
end
|
||||
colorized = text.colorize
|
||||
s.color.try { |c| colorized = colorized.fore(c.colorize) }
|
||||
# Intentionally not setting background color
|
||||
colorized.mode(:bold) if s.bold
|
||||
colorized.mode(:italic) if s.italic
|
||||
colorized.mode(:underline) if s.underline
|
||||
colorized.to_s
|
||||
end
|
||||
end
|
||||
|
||||
class Html < Formatter
|
||||
@@ -53,6 +70,31 @@ module Tartrazine
|
||||
output
|
||||
end
|
||||
|
||||
# ameba:disable Metrics/CyclomaticComplexity
|
||||
def get_style_defs(theme : Theme) : String
|
||||
output = String.build do |outp|
|
||||
theme.styles.each do |token, style|
|
||||
outp << ".#{get_css_class(token, theme)} {"
|
||||
# These are set or nil
|
||||
outp << "color: #{style.color.try &.hex};" if style.color
|
||||
outp << "background-color: #{style.background.try &.hex};" if style.background
|
||||
outp << "border: 1px solid #{style.border.try &.hex};" if style.border
|
||||
|
||||
# These are true/false/nil
|
||||
outp << "border: none;" if style.border == false
|
||||
outp << "font-weight: bold;" if style.bold
|
||||
outp << "font-weight: 400;" if style.bold == false
|
||||
outp << "font-style: italic;" if style.italic
|
||||
outp << "font-style: normal;" if style.italic == false
|
||||
outp << "text-decoration: underline;" if style.underline
|
||||
outp << "text-decoration: none;" if style.underline == false
|
||||
|
||||
outp << "}"
|
||||
end
|
||||
end
|
||||
output
|
||||
end
|
||||
|
||||
# Given a token type, return the CSS class to use.
|
||||
def get_css_class(token, theme)
|
||||
return Abbreviations[token] if theme.styles.has_key?(token)
|
||||
|
51
src/rules.cr
51
src/rules.cr
@@ -1,5 +1,9 @@
|
||||
require "./actions"
|
||||
# require "cre2"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
|
||||
# These are lexer rules. They match with the text being parsed
|
||||
# and perform actions, either emitting tokens or changing the
|
||||
@@ -7,12 +11,8 @@ require "./actions"
|
||||
module Tartrazine
|
||||
# This rule matches via a regex pattern
|
||||
|
||||
# alias Regex = CRe2::Regex
|
||||
# alias MatchData = CRe2::MatchDataLike | Regex::MatchData | Nil
|
||||
alias MatchData = Regex::MatchData | Nil
|
||||
|
||||
class Rule
|
||||
property pattern : Regex = Regex.new ""
|
||||
property pattern : Regex = Re2.new ""
|
||||
property actions : Array(Action) = [] of Action
|
||||
property xml : String = "foo"
|
||||
|
||||
@@ -20,8 +20,7 @@ module Tartrazine
|
||||
match = pattern.match(text, pos)
|
||||
# We don't match if the match doesn't move the cursor
|
||||
# because that causes infinite loops
|
||||
|
||||
return false, pos, [] of Token if match.nil?
|
||||
return false, pos, [] of Token if match.nil? || match.end == 0
|
||||
# Log.trace { "#{match}, #{pattern.inspect}, #{text}, #{pos}" }
|
||||
tokens = [] of Token
|
||||
# Emit the tokens
|
||||
@@ -29,17 +28,18 @@ module Tartrazine
|
||||
# Emit the token
|
||||
tokens += action.emit(match, lexer)
|
||||
end
|
||||
# Log.trace { "#{xml}, #{match.end}, #{tokens}" }
|
||||
return true, match[0].size, tokens
|
||||
Log.trace { "#{xml}, #{match.end}, #{tokens}" }
|
||||
return true, match.end, tokens
|
||||
end
|
||||
|
||||
def initialize(node : XML::Node, multiline, dotall, ignorecase)
|
||||
@xml = node.to_s
|
||||
options = Regex::Options::ANCHORED
|
||||
options |= Regex::Options::MULTILINE if multiline
|
||||
options |= Regex::Options::DOTALL if dotall
|
||||
options |= Regex::Options::IGNORE_CASE if ignorecase
|
||||
@pattern = Regex.new(node["pattern"], options)
|
||||
@pattern = Re2.new(
|
||||
node["pattern"],
|
||||
multiline,
|
||||
dotall,
|
||||
ignorecase,
|
||||
anchored: true)
|
||||
add_actions(node)
|
||||
end
|
||||
|
||||
@@ -91,4 +91,25 @@ module Tartrazine
|
||||
add_actions(node)
|
||||
end
|
||||
end
|
||||
|
||||
# This is a hack to workaround that Crystal seems to disallow
|
||||
# having regexes multiline but not dot_all
|
||||
class Re2 < Regex
|
||||
@source = "fa"
|
||||
@options = Regex::Options::None
|
||||
@jit = true
|
||||
|
||||
def initialize(pattern : String, multiline = false, dotall = false, ignorecase = false, anchored = false)
|
||||
flags = LibPCRE2::UTF | LibPCRE2::DUPNAMES |
|
||||
LibPCRE2::UCP
|
||||
flags |= LibPCRE2::MULTILINE if multiline
|
||||
flags |= LibPCRE2::DOTALL if dotall
|
||||
flags |= LibPCRE2::CASELESS if ignorecase
|
||||
flags |= LibPCRE2::ANCHORED if anchored
|
||||
flags |= LibPCRE2::NO_UTF_CHECK
|
||||
@re = Regex::PCRE2.compile(pattern, flags) do |error_message|
|
||||
raise Exception.new(error_message)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@@ -1,11 +1,23 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "sixteen"
|
||||
require "xml"
|
||||
|
||||
module Tartrazine
|
||||
alias Color = Sixteen::Color
|
||||
|
||||
def self.theme(name : String) : Theme
|
||||
return Theme.from_base16(name[7..]) if name.starts_with? "base16_"
|
||||
path = File.join("styles", "#{name}.xml")
|
||||
Theme.from_xml(File.read(path))
|
||||
Theme.from_xml(ThemeFiles.get("/#{name}.xml").gets_to_end)
|
||||
end
|
||||
|
||||
class ThemeFiles
|
||||
extend BakedFileSystem
|
||||
bake_folder "../styles", __DIR__
|
||||
end
|
||||
|
||||
class Style
|
||||
@@ -19,9 +31,9 @@ module Tartrazine
|
||||
|
||||
# These properties are either set or nil
|
||||
# (inherit from parent style)
|
||||
property background : String?
|
||||
property border : String?
|
||||
property color : String?
|
||||
property background : Color?
|
||||
property border : Color?
|
||||
property color : Color?
|
||||
|
||||
# Styles are incomplete by default and inherit
|
||||
# from parents. If this is true, this style
|
||||
@@ -91,33 +103,33 @@ module Tartrazine
|
||||
# The color assignments are adapted from
|
||||
# https://github.com/mohd-akram/base16-pygments/
|
||||
|
||||
theme.styles["Background"] = Style.new(color: t.palette["base05"], background: t.palette["base00"])
|
||||
theme.styles["Text"] = Style.new(color: t.palette["base05"])
|
||||
theme.styles["Error"] = Style.new(color: t.palette["base08"])
|
||||
theme.styles["Comment"] = Style.new(color: t.palette["base03"])
|
||||
theme.styles["CommentPreproc"] = Style.new(color: t.palette["base0F"])
|
||||
theme.styles["CommentPreprocFile"] = Style.new(color: t.palette["base0B"])
|
||||
theme.styles["Keyword"] = Style.new(color: t.palette["base0E"])
|
||||
theme.styles["KeywordType"] = Style.new(color: t.palette["base08"])
|
||||
theme.styles["NameAttribute"] = Style.new(color: t.palette["base0D"])
|
||||
theme.styles["NameBuiltin"] = Style.new(color: t.palette["base08"])
|
||||
theme.styles["NameBuiltinPseudo"] = Style.new(color: t.palette["base08"])
|
||||
theme.styles["NameClass"] = Style.new(color: t.palette["base0D"])
|
||||
theme.styles["NameConstant"] = Style.new(color: t.palette["base09"])
|
||||
theme.styles["NameDecorator"] = Style.new(color: t.palette["base09"])
|
||||
theme.styles["NameFunction"] = Style.new(color: t.palette["base0D"])
|
||||
theme.styles["NameNamespace"] = Style.new(color: t.palette["base0D"])
|
||||
theme.styles["NameTag"] = Style.new(color: t.palette["base0E"])
|
||||
theme.styles["NameVariable"] = Style.new(color: t.palette["base0D"])
|
||||
theme.styles["NameVariableInstance"] = Style.new(color: t.palette["base08"])
|
||||
theme.styles["LiteralNumber"] = Style.new(color: t.palette["base09"])
|
||||
theme.styles["Operator"] = Style.new(color: t.palette["base0C"])
|
||||
theme.styles["OperatorWord"] = Style.new(color: t.palette["base0E"])
|
||||
theme.styles["Literal"] = Style.new(color: t.palette["base0B"])
|
||||
theme.styles["LiteralString"] = Style.new(color: t.palette["base0B"])
|
||||
theme.styles["LiteralStringInterpol"] = Style.new(color: t.palette["base0F"])
|
||||
theme.styles["LiteralStringRegex"] = Style.new(color: t.palette["base0C"])
|
||||
theme.styles["LiteralStringSymbol"] = Style.new(color: t.palette["base09"])
|
||||
theme.styles["Background"] = Style.new(color: t["base05"], background: t["base00"])
|
||||
theme.styles["Text"] = Style.new(color: t["base05"])
|
||||
theme.styles["Error"] = Style.new(color: t["base08"])
|
||||
theme.styles["Comment"] = Style.new(color: t["base03"])
|
||||
theme.styles["CommentPreproc"] = Style.new(color: t["base0F"])
|
||||
theme.styles["CommentPreprocFile"] = Style.new(color: t["base0B"])
|
||||
theme.styles["Keyword"] = Style.new(color: t["base0E"])
|
||||
theme.styles["KeywordType"] = Style.new(color: t["base08"])
|
||||
theme.styles["NameAttribute"] = Style.new(color: t["base0D"])
|
||||
theme.styles["NameBuiltin"] = Style.new(color: t["base08"])
|
||||
theme.styles["NameBuiltinPseudo"] = Style.new(color: t["base08"])
|
||||
theme.styles["NameClass"] = Style.new(color: t["base0D"])
|
||||
theme.styles["NameConstant"] = Style.new(color: t["base09"])
|
||||
theme.styles["NameDecorator"] = Style.new(color: t["base09"])
|
||||
theme.styles["NameFunction"] = Style.new(color: t["base0D"])
|
||||
theme.styles["NameNamespace"] = Style.new(color: t["base0D"])
|
||||
theme.styles["NameTag"] = Style.new(color: t["base0E"])
|
||||
theme.styles["NameVariable"] = Style.new(color: t["base0D"])
|
||||
theme.styles["NameVariableInstance"] = Style.new(color: t["base08"])
|
||||
theme.styles["LiteralNumber"] = Style.new(color: t["base09"])
|
||||
theme.styles["Operator"] = Style.new(color: t["base0C"])
|
||||
theme.styles["OperatorWord"] = Style.new(color: t["base0E"])
|
||||
theme.styles["Literal"] = Style.new(color: t["base0B"])
|
||||
theme.styles["LiteralString"] = Style.new(color: t["base0B"])
|
||||
theme.styles["LiteralStringInterpol"] = Style.new(color: t["base0F"])
|
||||
theme.styles["LiteralStringRegex"] = Style.new(color: t["base0C"])
|
||||
theme.styles["LiteralStringSymbol"] = Style.new(color: t["base09"])
|
||||
theme
|
||||
end
|
||||
|
||||
@@ -144,9 +156,9 @@ module Tartrazine
|
||||
s.underline = true if style.includes?("underline")
|
||||
s.underline = false if style.includes?("nounderline")
|
||||
|
||||
s.color = style.find(&.starts_with?("#")).try &.split("#").last
|
||||
s.background = style.find(&.starts_with?("bg:#")).try &.split("#").last
|
||||
s.border = style.find(&.starts_with?("border:#")).try &.split("#").last
|
||||
s.color = style.find(&.starts_with?("#")).try { |v| Color.new v.split("#").last }
|
||||
s.background = style.find(&.starts_with?("bg:#")).try { |v| Color.new v.split("#").last }
|
||||
s.border = style.find(&.starts_with?("border:#")).try { |v| Color.new v.split("#").last }
|
||||
|
||||
theme.styles[node["type"]] = s
|
||||
end
|
||||
|
@@ -1,5 +1,10 @@
|
||||
require "./actions"
|
||||
require "./constants"
|
||||
require "./formatter"
|
||||
require "./rules"
|
||||
require "./styles"
|
||||
require "./tartrazine"
|
||||
require "baked_file_system"
|
||||
require "base58"
|
||||
require "json"
|
||||
require "log"
|
||||
@@ -7,7 +12,7 @@ require "xml"
|
||||
|
||||
module Tartrazine
|
||||
extend self
|
||||
VERSION = "0.1.0"
|
||||
VERSION = "0.1.1"
|
||||
|
||||
Log = ::Log.for("tartrazine")
|
||||
|
||||
@@ -33,6 +38,12 @@ module Tartrazine
|
||||
end
|
||||
end
|
||||
|
||||
class LexerFiles
|
||||
extend BakedFileSystem
|
||||
|
||||
bake_folder "../lexers", __DIR__
|
||||
end
|
||||
|
||||
# A token, the output of the tokenizer
|
||||
alias Token = NamedTuple(type: String, value: String)
|
||||
|
||||
@@ -72,22 +83,22 @@ module Tartrazine
|
||||
# Loop through the text, applying rules
|
||||
while pos < text.size
|
||||
state = states[@state_stack.last]
|
||||
Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
|
||||
# Log.trace { "Stack is #{@state_stack} State is #{state.name}, pos is #{pos}, text is #{text[pos..pos + 10]}" }
|
||||
state.rules.each do |rule|
|
||||
matched, new_pos, new_tokens = rule.match(text, pos, self)
|
||||
if matched
|
||||
# Move position forward, save the tokens,
|
||||
# tokenize from the new position
|
||||
Log.trace { "MATCHED: #{rule.xml}" }
|
||||
# Log.trace { "MATCHED: #{rule.xml}" }
|
||||
pos = new_pos
|
||||
tokens += new_tokens
|
||||
break
|
||||
end
|
||||
Log.trace { "NOT MATCHED: #{rule.xml}" }
|
||||
# Log.trace { "NOT MATCHED: #{rule.xml}" }
|
||||
end
|
||||
# If no rule matches, emit an error token
|
||||
unless matched
|
||||
Log.trace { "Error at #{pos}" }
|
||||
# Log.trace { "Error at #{pos}" }
|
||||
tokens << {type: "Error", value: "#{text[pos]}"}
|
||||
pos += 1
|
||||
end
|
||||
@@ -182,7 +193,7 @@ module Tartrazine
|
||||
end
|
||||
|
||||
def self.lexer(name : String) : Lexer
|
||||
Lexer.from_xml(File.read("lexers/#{name}.xml"))
|
||||
Lexer.from_xml(LexerFiles.get("/#{name}.xml").gets_to_end)
|
||||
end
|
||||
end
|
||||
|
||||
|
Reference in New Issue
Block a user