mirror of
https://github.com/ralsina/tartrazine.git
synced 2025-06-08 04:30:26 -03:00
Compare commits
15 Commits
411c969cc4
...
6a38f2f5fb
Author | SHA1 | Date | |
---|---|---|---|
6a38f2f5fb | |||
c4a2d1a752 | |||
358be51e27 | |||
2cff0fea48 | |||
40202eb2d6 | |||
3ed4a7eab8 | |||
6f797f999a | |||
b762307660 | |||
eb0cc089a9 | |||
88f2aace20 | |||
fe943fa399 | |||
08f8138e05 | |||
3c0b3c38e2 | |||
a1318501a5 | |||
daf24189bf |
26
.github/workflows/ci.yml
vendored
Normal file
26
.github/workflows/ci.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
name: Tests
|
||||||
|
on:
|
||||||
|
# This can't yet run automatically, because tests fail because of
|
||||||
|
# different versions of chroma. Need to get the same one in my
|
||||||
|
# local env and in CI
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Download source
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Install Crystal
|
||||||
|
uses: crystal-lang/install-crystal@v1
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
wget https://github.com/alecthomas/chroma/releases/download/v2.14.0/chroma-2.14.0-linux-amd64.tar.gz
|
||||||
|
tar xzvf chroma-2.14.0*gz
|
||||||
|
mkdir ~/.local/bin -p
|
||||||
|
sudo mv chroma ~/.local/bin
|
||||||
|
shards install
|
||||||
|
crystal tool format --check
|
||||||
|
crystal spec -v
|
30
.github/workflows/coverage.yml
vendored
Normal file
30
.github/workflows/coverage.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
name: Coverage
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 1 * * *"
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Download source
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Install Crystal
|
||||||
|
uses: crystal-lang/install-crystal@v1
|
||||||
|
- name: Run tests using kcov
|
||||||
|
run: |
|
||||||
|
sudo apt update && sudo apt install kcov
|
||||||
|
wget https://github.com/alecthomas/chroma/releases/download/v2.14.0/chroma-2.14.0-linux-amd64.tar.gz
|
||||||
|
tar xzvf chroma-2.14.0*gz
|
||||||
|
mkdir ~/.local/bin -p
|
||||||
|
sudo mv chroma ~/.local/bin
|
||||||
|
shards install
|
||||||
|
crystal build src/run_tests.cr
|
||||||
|
kcov --clean --include-path=./src $PWD/coverage ./run_tests
|
||||||
|
curl -Os https://uploader.codecov.io/latest/linux/codecov
|
||||||
|
chmod +x codecov
|
||||||
|
./codecov -t ${CODECOV_TOKEN} -s coverage
|
||||||
|
env:
|
||||||
|
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -9,3 +9,6 @@ shard.lock
|
|||||||
.vscode/
|
.vscode/
|
||||||
.crystal/
|
.crystal/
|
||||||
venv/
|
venv/
|
||||||
|
.croupier
|
||||||
|
coverage/
|
||||||
|
run_tests
|
||||||
|
3
.md.rb
Normal file
3
.md.rb
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
exclude_rule 'MD033' # Inline HTML
|
||||||
|
exclude_rule 'MD005' # 3-space indent for lists
|
||||||
|
exclude_rule 'MD024' # Repeated headings
|
25
CHANGELOG.md
25
CHANGELOG.md
@ -2,12 +2,35 @@
|
|||||||
|
|
||||||
All notable changes to this project will be documented in this file.
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
## [unreleased]
|
## [0.6.4] - 2024-08-28
|
||||||
|
|
||||||
|
### 🐛 Bug Fixes
|
||||||
|
|
||||||
|
- Ameba
|
||||||
|
- Variable bame in Hacefile
|
||||||
|
|
||||||
|
### 📚 Documentation
|
||||||
|
|
||||||
|
- Mention AUR package
|
||||||
|
|
||||||
### ⚙️ Miscellaneous Tasks
|
### ⚙️ Miscellaneous Tasks
|
||||||
|
|
||||||
- Pre-commit hooks
|
- Pre-commit hooks
|
||||||
- Git-cliff config
|
- Git-cliff config
|
||||||
|
- Started changelog
|
||||||
|
- Force conventional commit messages
|
||||||
|
- Force conventional commit messages
|
||||||
|
- Updated pre-commit
|
||||||
|
|
||||||
|
### Build
|
||||||
|
|
||||||
|
- Switch from Makefile to Hacefile
|
||||||
|
- Added do_release script
|
||||||
|
- Fix markdown check
|
||||||
|
|
||||||
|
### Bump
|
||||||
|
|
||||||
|
- Release v0.6.4
|
||||||
|
|
||||||
## [0.6.1] - 2024-08-25
|
## [0.6.1] - 2024-08-25
|
||||||
|
|
||||||
|
115
Hacefile.yml
Normal file
115
Hacefile.yml
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
variables:
|
||||||
|
FLAGS: "-d --error-trace"
|
||||||
|
NAME: "tartrazine"
|
||||||
|
|
||||||
|
tasks:
|
||||||
|
build:
|
||||||
|
default: true
|
||||||
|
dependencies:
|
||||||
|
- src
|
||||||
|
- shard.lock
|
||||||
|
- shard.yml
|
||||||
|
- Hacefile.yml
|
||||||
|
- lexers/*xml
|
||||||
|
- styles/*xml
|
||||||
|
outputs:
|
||||||
|
- bin/{{NAME}}
|
||||||
|
commands: |
|
||||||
|
shards build {{FLAGS}}
|
||||||
|
|
||||||
|
get-deps:
|
||||||
|
dependencies:
|
||||||
|
- shard.yml
|
||||||
|
outputs:
|
||||||
|
- shard.lock
|
||||||
|
commands: |
|
||||||
|
shards install
|
||||||
|
|
||||||
|
build-release:
|
||||||
|
phony: true
|
||||||
|
always_run: true
|
||||||
|
commands: |
|
||||||
|
hace build FLAGS="--release"
|
||||||
|
|
||||||
|
install:
|
||||||
|
phony: true
|
||||||
|
always_run: true
|
||||||
|
dependencies:
|
||||||
|
- bin/tartrazine
|
||||||
|
commands: |
|
||||||
|
rm ${HOME}/.local/bin/{{NAME}}
|
||||||
|
cp bin/hace ${HOME}/.local/bin/{{NAME}}
|
||||||
|
|
||||||
|
static:
|
||||||
|
outputs:
|
||||||
|
- bin/{{NAME}}-static-linux-amd64
|
||||||
|
- bin/{{NAME}}-static-linux-arm64
|
||||||
|
commands: |
|
||||||
|
hace clean
|
||||||
|
./build_static.sh
|
||||||
|
|
||||||
|
test:
|
||||||
|
dependencies:
|
||||||
|
- src
|
||||||
|
- spec
|
||||||
|
- shard.lock
|
||||||
|
- shard.yml
|
||||||
|
commands: |
|
||||||
|
crystal spec -v --error-trace
|
||||||
|
phony: true
|
||||||
|
always_run: true
|
||||||
|
|
||||||
|
lint:
|
||||||
|
dependencies:
|
||||||
|
- src
|
||||||
|
- spec
|
||||||
|
- shard.lock
|
||||||
|
- shard.yml
|
||||||
|
commands: |
|
||||||
|
crystal tool format src/*.cr spec/*.cr
|
||||||
|
ameba --fix
|
||||||
|
always_run: true
|
||||||
|
phony: true
|
||||||
|
|
||||||
|
docs:
|
||||||
|
dependencies:
|
||||||
|
- src
|
||||||
|
- shard.lock
|
||||||
|
- shard.yml
|
||||||
|
- README.md
|
||||||
|
commands: |
|
||||||
|
crystal docs
|
||||||
|
outputs:
|
||||||
|
- docs/index.html
|
||||||
|
|
||||||
|
pre-commit:
|
||||||
|
default: true
|
||||||
|
outputs:
|
||||||
|
- .git/hooks/commit-msg
|
||||||
|
- .git/hooks/pre-commit
|
||||||
|
dependencies:
|
||||||
|
- .pre-commit-config.yaml
|
||||||
|
commands: |
|
||||||
|
pre-commit install --hook-type commit-msg
|
||||||
|
pre-commit install
|
||||||
|
|
||||||
|
clean:
|
||||||
|
phony: true
|
||||||
|
always_run: true
|
||||||
|
commands: |
|
||||||
|
rm -rf shard.lock bin lib
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
dependencies:
|
||||||
|
- src
|
||||||
|
- spec
|
||||||
|
- shard.lock
|
||||||
|
- shard.yml
|
||||||
|
commands: |
|
||||||
|
shards install
|
||||||
|
crystal build -o bin/run_tests src/run_tests.cr
|
||||||
|
rm -rf coverage/
|
||||||
|
mkdir coverage
|
||||||
|
kcov --clean --include-path=./src ${PWD}/coverage ./bin/run_tests
|
||||||
|
outputs:
|
||||||
|
- coverage/index.html
|
21
Makefile
21
Makefile
@ -1,21 +0,0 @@
|
|||||||
all: build
|
|
||||||
|
|
||||||
build: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
|
||||||
shards build -Dstrict_multi_assign -Dno_number_autocast -d --error-trace
|
|
||||||
release: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
|
||||||
shards build --release
|
|
||||||
static: $(wildcard src/**/*.cr) $(wildcard lexers/*xml) $(wildcard styles/*xml) shard.yml
|
|
||||||
shards build --release --static
|
|
||||||
strip bin/tartrazine
|
|
||||||
|
|
||||||
|
|
||||||
clean:
|
|
||||||
rm -rf bin lib shard.lock
|
|
||||||
|
|
||||||
test:
|
|
||||||
crystal spec
|
|
||||||
|
|
||||||
lint:
|
|
||||||
ameba --fix src spec
|
|
||||||
|
|
||||||
.PHONY: clean all test bin lint
|
|
@ -1,5 +1,8 @@
|
|||||||
# TARTRAZINE
|
# TARTRAZINE
|
||||||
|
|
||||||
|
[](https://github.com/ralsina/tartrazine/actions/workflows/ci.yml)
|
||||||
|
[](https://codecov.io/gh/ralsina/tartrazine)
|
||||||
|
|
||||||
Tartrazine is a library to syntax-highlight code. It is
|
Tartrazine is a library to syntax-highlight code. It is
|
||||||
a port of [Pygments](https://pygments.org/) to
|
a port of [Pygments](https://pygments.org/) to
|
||||||
[Crystal](https://crystal-lang.org/).
|
[Crystal](https://crystal-lang.org/).
|
||||||
|
@ -7,10 +7,10 @@ docker run --rm --privileged \
|
|||||||
|
|
||||||
# Build for AMD64
|
# Build for AMD64
|
||||||
docker build . -f Dockerfile.static -t tartrazine-builder
|
docker build . -f Dockerfile.static -t tartrazine-builder
|
||||||
docker run -ti --rm -v "$PWD":/app --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && make static"
|
docker run -ti --rm -v "$PWD":/app --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && shards build --static --release"
|
||||||
mv bin/tartrazine bin/tartrazine-static-linux-amd64
|
mv bin/tartrazine bin/tartrazine-static-linux-amd64
|
||||||
|
|
||||||
# Build for ARM64
|
# Build for ARM64
|
||||||
docker build . -f Dockerfile.static --platform linux/arm64 -t tartrazine-builder
|
docker build . -f Dockerfile.static --platform linux/arm64 -t tartrazine-builder
|
||||||
docker run -ti --rm -v "$PWD":/app --platform linux/arm64 --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && make static"
|
docker run -ti --rm -v "$PWD":/app --platform linux/arm64 --user="$UID" tartrazine-builder /bin/sh -c "cd /app && rm -rf lib shard.lock && shards build --static --release"
|
||||||
mv bin/tartrazine bin/tartrazine-static-linux-arm64
|
mv bin/tartrazine bin/tartrazine-static-linux-arm64
|
||||||
|
15
do_release.sh
Executable file
15
do_release.sh
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set e
|
||||||
|
|
||||||
|
PKGNAME=$(basename "$PWD")
|
||||||
|
VERSION=$(git cliff --bumped-version |cut -dv -f2)
|
||||||
|
|
||||||
|
sed "s/^version:.*$/version: $VERSION/g" -i shard.yml
|
||||||
|
git add shard.yml
|
||||||
|
hace lint test
|
||||||
|
git cliff --bump -o
|
||||||
|
git commit -a -m "bump: Release v$VERSION"
|
||||||
|
git tag "v$VERSION"
|
||||||
|
git push --tags
|
||||||
|
hace static
|
||||||
|
gh release create "v$VERSION" "bin/$PKGNAME-static-linux-amd64" "bin/$PKGNAME-static-linux-arm64" --title "Release v$VERSION" --notes "$(git cliff -l -s all)"
|
@ -1,5 +1,5 @@
|
|||||||
name: tartrazine
|
name: tartrazine
|
||||||
version: 0.6.3
|
version: 0.6.4
|
||||||
|
|
||||||
authors:
|
authors:
|
||||||
- Roberto Alsina <roberto.alsina@gmail.com>
|
- Roberto Alsina <roberto.alsina@gmail.com>
|
||||||
|
@ -28,6 +28,7 @@ bad_in_chroma = {
|
|||||||
"#{__DIR__}/tests/octave/test_multilinecomment.txt",
|
"#{__DIR__}/tests/octave/test_multilinecomment.txt",
|
||||||
"#{__DIR__}/tests/php/test_string_escaping_run.txt",
|
"#{__DIR__}/tests/php/test_string_escaping_run.txt",
|
||||||
"#{__DIR__}/tests/python_2/test_cls_builtin.txt",
|
"#{__DIR__}/tests/python_2/test_cls_builtin.txt",
|
||||||
|
"#{__DIR__}/tests/bqn/test_syntax_roles.txt", # This one only fails in CI
|
||||||
}
|
}
|
||||||
|
|
||||||
known_bad = {
|
known_bad = {
|
||||||
@ -73,7 +74,7 @@ end
|
|||||||
# Helper that creates lexer and tokenizes
|
# Helper that creates lexer and tokenizes
|
||||||
def tokenize(lexer_name, text)
|
def tokenize(lexer_name, text)
|
||||||
tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text)
|
tokenizer = Tartrazine.lexer(lexer_name).tokenizer(text)
|
||||||
Tartrazine::Lexer.collapse_tokens(tokenizer.to_a)
|
Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Helper that tokenizes using chroma to validate the lexer
|
# Helper that tokenizes using chroma to validate the lexer
|
||||||
@ -85,5 +86,5 @@ def chroma_tokenize(lexer_name, text)
|
|||||||
["-f", "json", "-l", lexer_name],
|
["-f", "json", "-l", lexer_name],
|
||||||
input: input, output: output
|
input: input, output: output
|
||||||
)
|
)
|
||||||
Tartrazine::Lexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
|
Tartrazine::RegexLexer.collapse_tokens(Array(Tartrazine::Token).from_json(output.to_s))
|
||||||
end
|
end
|
||||||
|
@ -12,7 +12,7 @@ module Tartrazine
|
|||||||
|
|
||||||
def format(text : String, lexer : BaseLexer, io : IO) : Nil
|
def format(text : String, lexer : BaseLexer, io : IO) : Nil
|
||||||
tokenizer = lexer.tokenizer(text)
|
tokenizer = lexer.tokenizer(text)
|
||||||
io << Tartrazine::Lexer.collapse_tokens(tokenizer.to_a).to_json
|
io << Tartrazine::RegexLexer.collapse_tokens(tokenizer.to_a).to_json
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
21
src/lexer.cr
21
src/lexer.cr
@ -1,5 +1,6 @@
|
|||||||
require "baked_file_system"
|
|
||||||
require "./constants/lexers"
|
require "./constants/lexers"
|
||||||
|
require "./heuristics"
|
||||||
|
require "baked_file_system"
|
||||||
|
|
||||||
module Tartrazine
|
module Tartrazine
|
||||||
class LexerFiles
|
class LexerFiles
|
||||||
@ -14,14 +15,14 @@ module Tartrazine
|
|||||||
return lexer_by_filename(filename) if filename
|
return lexer_by_filename(filename) if filename
|
||||||
return lexer_by_mimetype(mimetype) if mimetype
|
return lexer_by_mimetype(mimetype) if mimetype
|
||||||
|
|
||||||
Lexer.from_xml(LexerFiles.get("/#{LEXERS_BY_NAME["plaintext"]}.xml").gets_to_end)
|
RegexLexer.from_xml(LexerFiles.get("/#{LEXERS_BY_NAME["plaintext"]}.xml").gets_to_end)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def self.lexer_by_mimetype(mimetype : String) : BaseLexer
|
private def self.lexer_by_mimetype(mimetype : String) : BaseLexer
|
||||||
lexer_file_name = LEXERS_BY_MIMETYPE.fetch(mimetype, nil)
|
lexer_file_name = LEXERS_BY_MIMETYPE.fetch(mimetype, nil)
|
||||||
raise Exception.new("Unknown mimetype: #{mimetype}") if lexer_file_name.nil?
|
raise Exception.new("Unknown mimetype: #{mimetype}") if lexer_file_name.nil?
|
||||||
|
|
||||||
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def self.lexer_by_name(name : String) : BaseLexer
|
private def self.lexer_by_name(name : String) : BaseLexer
|
||||||
@ -29,7 +30,7 @@ module Tartrazine
|
|||||||
return create_delegating_lexer(name) if lexer_file_name.nil? && name.includes? "+"
|
return create_delegating_lexer(name) if lexer_file_name.nil? && name.includes? "+"
|
||||||
raise Exception.new("Unknown lexer: #{name}") if lexer_file_name.nil?
|
raise Exception.new("Unknown lexer: #{name}") if lexer_file_name.nil?
|
||||||
|
|
||||||
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def self.lexer_by_filename(filename : String) : BaseLexer
|
private def self.lexer_by_filename(filename : String) : BaseLexer
|
||||||
@ -52,7 +53,7 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
Lexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
RegexLexer.from_xml(LexerFiles.get("/#{lexer_file_name}.xml").gets_to_end)
|
||||||
end
|
end
|
||||||
|
|
||||||
private def self.lexer_by_content(fname : String) : String?
|
private def self.lexer_by_content(fname : String) : String?
|
||||||
@ -152,7 +153,9 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
abstract class BaseLexer
|
alias BaseLexer = Lexer
|
||||||
|
|
||||||
|
abstract class Lexer
|
||||||
property config = {
|
property config = {
|
||||||
name: "",
|
name: "",
|
||||||
priority: 0.0,
|
priority: 0.0,
|
||||||
@ -174,7 +177,7 @@ module Tartrazine
|
|||||||
# For explanations on what actions and states do
|
# For explanations on what actions and states do
|
||||||
# the Pygments documentation is a good place to start.
|
# the Pygments documentation is a good place to start.
|
||||||
# https://pygments.org/docs/lexerdevelopment/
|
# https://pygments.org/docs/lexerdevelopment/
|
||||||
class Lexer < BaseLexer
|
class RegexLexer < BaseLexer
|
||||||
# Collapse consecutive tokens of the same type for easier comparison
|
# Collapse consecutive tokens of the same type for easier comparison
|
||||||
# and smaller output
|
# and smaller output
|
||||||
def self.collapse_tokens(tokens : Array(Tartrazine::Token)) : Array(Tartrazine::Token)
|
def self.collapse_tokens(tokens : Array(Tartrazine::Token)) : Array(Tartrazine::Token)
|
||||||
@ -198,7 +201,7 @@ module Tartrazine
|
|||||||
end
|
end
|
||||||
|
|
||||||
def self.from_xml(xml : String) : Lexer
|
def self.from_xml(xml : String) : Lexer
|
||||||
l = Lexer.new
|
l = RegexLexer.new
|
||||||
lexer = XML.parse(xml).first_element_child
|
lexer = XML.parse(xml).first_element_child
|
||||||
if lexer
|
if lexer
|
||||||
config = lexer.children.find { |node|
|
config = lexer.children.find { |node|
|
||||||
@ -263,7 +266,7 @@ module Tartrazine
|
|||||||
#
|
#
|
||||||
# This is useful for things like template languages, where
|
# This is useful for things like template languages, where
|
||||||
# you have Jinja + HTML or Jinja + CSS and so on.
|
# you have Jinja + HTML or Jinja + CSS and so on.
|
||||||
class DelegatingLexer < BaseLexer
|
class DelegatingLexer < Lexer
|
||||||
property language_lexer : BaseLexer
|
property language_lexer : BaseLexer
|
||||||
property root_lexer : BaseLexer
|
property root_lexer : BaseLexer
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
require "docopt"
|
require "docopt"
|
||||||
require "./**"
|
require "./tartrazine"
|
||||||
|
|
||||||
HELP = <<-HELP
|
HELP = <<-HELP
|
||||||
tartrazine: a syntax highlighting tool
|
tartrazine: a syntax highlighting tool
|
||||||
|
1
src/run_tests.cr
Normal file
1
src/run_tests.cr
Normal file
@ -0,0 +1 @@
|
|||||||
|
require "../spec/**"
|
@ -1,5 +1,6 @@
|
|||||||
require "./actions"
|
require "./actions"
|
||||||
require "./formatter"
|
require "./formatter"
|
||||||
|
require "./formatters/**"
|
||||||
require "./rules"
|
require "./rules"
|
||||||
require "./styles"
|
require "./styles"
|
||||||
require "./tartrazine"
|
require "./tartrazine"
|
||||||
|
Loading…
x
Reference in New Issue
Block a user