9 Commits

8 changed files with 66 additions and 14 deletions

22
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,22 @@
name: Tests
on:
# This can't yet run automatically, because tests fail because of
# different versions of chroma. Need to get the same one in my
# local env and in CI
workflow_dispatch:
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-24.04
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests
run: |
sudo apt-get update && sudo apt-get install golang-chroma -y
shards install
crystal tool format --check
crystal spec -v

26
.github/workflows/coverage.yml vendored Normal file
View File

@@ -0,0 +1,26 @@
name: Coverage
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests using kcov
run: |
sudo apt update && sudo apt install kcov
shards install
crystal build src/run_tests.cr
kcov --clean --include-path=./src coverage ./run_tests
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
./codecov -t ${CODECOV_TOKEN} -s coverage
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -35,7 +35,7 @@ tasks:
phony: true
always_run: true
dependencies:
- bin/hace
- bin/tartrazine
commands: |
rm ${HOME}/.local/bin/{{name}}
cp bin/hace ${HOME}/.local/bin/{{name}}

View File

@@ -73,7 +73,7 @@ end
# Helper that creates lexer and tokenizes
def tokenize(lexer_name, text)
tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text)
Tartrazine::Lexer.collapse_tokens(tokenizer.to_a)
Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a)
end
# Helper that tokenizes using chroma to validate the lexer
@@ -85,5 +85,5 @@ def chroma_tokenize(lexer_name, text)
["-f", "json", "-l", lexer_name],
input: input, output: output
)
Tartrazine::Lexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
Tartrazine::RegexLexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
end

View File

@@ -12,7 +12,7 @@ module Tartrazine
def format(text : String, lexer : BaseLexer, io : IO) : Nil
tokenizer = lexer.tokenizer(text)
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
io << Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a).to_json
end
end
end

View File

@@ -1,5 +1,6 @@
require "baked_file_system"
require "./constants/lexers"
require "./heuristics"
require "baked_file_system"
module Tartrazine
class LexerFiles
@@ -14,14 +15,14 @@ module Tartrazine
return lexer_by_filename(filename) if filename
return lexer_by_mimetype(mimetype) if mimetype
Lexer.from_xml(LexerFiles.get("/#{LEXERS_BY_NAME["plaintext"]}.xml").gets_to_end)
RegexLexer.from_xml(LexerFiles.get("/#{LEXERS_BY_NAME["plaintext"]}.xml").gets_to_end)
end
private def self.lexer_by_mimetype(mimetype : String) : BaseLexer
lexer_file_name = LEXERS_BY_MIMETYPE.fetch(mimetype, nil)
raise Exception.new("Unknown mimetype: #{mimetype}") if lexer_file_name.nil?
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
end
private def self.lexer_by_name(name : String) : BaseLexer
@@ -29,7 +30,7 @@ module Tartrazine
return create_delegating_lexer(name) if lexer_file_name.nil? && name.includes? "+"
raise Exception.new("Unknown lexer: #{name}") if lexer_file_name.nil?
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
end
private def self.lexer_by_filename(filename : String) : BaseLexer
@@ -52,7 +53,7 @@ module Tartrazine
end
end
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
end
private def self.lexer_by_content(fname : String) : String?
@@ -152,7 +153,9 @@ module Tartrazine
end
end
abstract class BaseLexer
alias BaseLexer = Lexer
abstract class Lexer
property config = {
name: "",
priority: 0.0,
@@ -174,7 +177,7 @@ module Tartrazine
# For explanations on what actions and states do
# the Pygments documentation is a good place to start.
# https://pygments.org/docs/lexerdevelopment/
class Lexer < BaseLexer
class RegexLexer < BaseLexer
# Collapse consecutive tokens of the same type for easier comparison
# and smaller output
def self.collapse_tokens(tokens : Array(Tartrazine::Token)) : Array(Tartrazine::Token)
@@ -198,7 +201,7 @@ module Tartrazine
end
def self.from_xml(xml : String) : Lexer
l = Lexer.new
l = RegexLexer.new
lexer = XML.parse(xml).first_element_child
if lexer
config = lexer.children.find { |node|
@@ -263,7 +266,7 @@ module Tartrazine
#
# This is useful for things like template languages, where
# you have Jinja + HTML or Jinja + CSS and so on.
class DelegatingLexer < BaseLexer
class DelegatingLexer < Lexer
property language_lexer : BaseLexer
property root_lexer : BaseLexer

View File

@@ -1,5 +1,5 @@
require "docopt"
require "./**"
require "./tartrazine"
HELP = <<-HELP
tartrazine: a syntax highlighting tool

View File

@@ -1,5 +1,6 @@
require "./actions"
require "./formatter"
require "./formatters/**"
require "./rules"
require "./styles"
require "./tartrazine"