Compare commits

..

7 Commits

4 changed files with 51 additions and 3 deletions

22
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: Tests
on:
# This can't yet run automatically, because tests fail because of
# different versions of chroma. Need to get the same one in my
# local env and in CI
workflow_dispatch:
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-24.04
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests
run: |
sudo apt-get update && sudo apt-get install golang-chroma -y
shards install
crystal tool format --check
crystal spec -v

26
.github/workflows/coverage.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: Coverage
on:
workflow_dispatch:
schedule:
- cron: "0 1 * * *"
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Download source
uses: actions/checkout@v4
- name: Install Crystal
uses: crystal-lang/install-crystal@v1
- name: Run tests using kcov
run: |
sudo apt update && sudo apt install kcov
shards install
crystal build src/run_tests.cr
kcov --clean --include-path=./src coverage ./run_tests
curl -Os https://uploader.codecov.io/latest/linux/codecov
chmod +x codecov
./codecov -t ${CODECOV_TOKEN} -s coverage
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@ -35,7 +35,7 @@ tasks:
phony: true
always_run: true
dependencies:
- bin/hace
- bin/tartrazine
commands: |
rm ${HOME}/.local/bin/{{name}}
cp bin/hace ${HOME}/.local/bin/{{name}}

View File

@ -73,7 +73,7 @@ end
# Helper that creates lexer and tokenizes
def tokenize(lexer_name, text)
tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text)
Tartrazine::Lexer.collapse_tokens(tokenizer.to_a)
Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a)
end
# Helper that tokenizes using chroma to validate the lexer
@ -85,5 +85,5 @@ def chroma_tokenize(lexer_name, text)
["-f", "json", "-l", lexer_name],
input: input, output: output
)
Tartrazine::Lexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
Tartrazine::RegexLexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
end