Compare commits

..

No commits in common. "411c969cc4cf4edea031ac1457f7fa4ab730215d" and "8eb8b8cb48e1fb59e938be2b8ad550778d817fc2" have entirely different histories.

4 changed files with 3 additions and 51 deletions

View File

@ -1,22 +0,0 @@
name: Tests
on:
# This can't yet run automatically, because tests fail because of
# different versions of chroma. Need to get the same one in my
# local env and in CI
workflow_dispatch:
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-24.04
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests
run: |
sudo apt-get update && sudo apt-get install golang-chroma -y
shards install
crystal tool format --check
crystal spec -v

View File

@ -1,26 +0,0 @@
name: Coverage
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests using kcov
run: |
sudo apt update && sudo apt install kcov
shards install
crystal build src/run_tests.cr
kcov --clean --include-path=./src coverage ./run_tests
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
./codecov -t ${CODECOV_TOKEN} -s coverage
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@ -35,7 +35,7 @@ tasks:
phony: true phony: true
always_run: true always_run: true
dependencies: dependencies:
- bin/tartrazine - bin/hace
commands: | commands: |
rm ${HOME}/.local/bin/{{name}} rm ${HOME}/.local/bin/{{name}}
cp bin/hace ${HOME}/.local/bin/{{name}} cp bin/hace ${HOME}/.local/bin/{{name}}

View File

@ -73,7 +73,7 @@ end
# Helper that creates lexer and tokenizes # Helper that creates lexer and tokenizes
def tokenize(lexer_name, text) def tokenize(lexer_name, text)
tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text) tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text)
Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a) Tartrazine::Lexer.collapse_tokens(tokenizer.to_a)
end end
# Helper that tokenizes using chroma to validate the lexer # Helper that tokenizes using chroma to validate the lexer
@ -85,5 +85,5 @@ def chroma_tokenize(lexer_name, text)
["-f", "json", "-l", lexer_name], ["-f", "json", "-l", lexer_name],
input: input, output: output input: input, output: output
) )
Tartrazine::RegexLexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s)) Tartrazine::Lexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
end end