2
0
Fork 0

refactor: port the project to docutils and poetry

This commit is contained in:
Thomas Touhey 2023-12-01 02:45:10 +01:00
parent b58c3d6ce1
commit 26a0decd8a
56 changed files with 3551 additions and 4258 deletions

View File

@ -1,9 +1,3 @@
#*******************************************************************************
# .editorconfig -- cross-editor configuration.
#
# Find out more at 'editorconfig.org'.
#*******************************************************************************
# Top-most EditorConfig file.
root = true
# Unix-style newlines with a newline ending every file.
@ -13,5 +7,3 @@ end_of_line = lf
insert_final_newline = true
indent_style = tab
indent_size = 4
# End of file.

438
.gitignore vendored
View File

@ -1,9 +1,429 @@
__pycache__
/test.py
/*.egg-info
/dist
/.spyproject
/build
/docs/_build
/venv
/README.html
# Created by https://www.toptal.com/developers/gitignore/api/linux,osx,macos,sublimetext,pycharm,python,kate,kdevelop4,terraform
# Edit at https://www.toptal.com/developers/gitignore?templates=linux,osx,macos,sublimetext,pycharm,python,kate,kdevelop4,terraform
### Kate ###
# Swap Files #
.*.kate-swp
.swp.*
### KDevelop4 ###
*.kdev4
.kdev4/
### Linux ###
*~
# temporary files which can be created if a process still has a handle open of a deleted file
.fuse_hidden*
# KDE directory preferences
.directory
# Linux trash folder which might appear on any partition or disk
.Trash-*
# .nfs files are created when an open file is removed but is still being accessed
.nfs*
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### macOS Patch ###
# iCloud generated files
*.icloud
### OSX ###
# General
# Icon must end with two \r
# Thumbnails
# Files that might appear in the root of a volume
# Directories potentially created on remote AFP share
### PyCharm ###
# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
# User-specific stuff
.idea/**/workspace.xml
.idea/**/tasks.xml
.idea/**/usage.statistics.xml
.idea/**/dictionaries
.idea/**/shelf
# AWS User-specific
.idea/**/aws.xml
# Generated files
.idea/**/contentModel.xml
# Sensitive or high-churn files
.idea/**/dataSources/
.idea/**/dataSources.ids
.idea/**/dataSources.local.xml
.idea/**/sqlDataSources.xml
.idea/**/dynamic.xml
.idea/**/uiDesigner.xml
.idea/**/dbnavigator.xml
# Gradle
.idea/**/gradle.xml
.idea/**/libraries
# Gradle and Maven with auto-import
# When using Gradle or Maven with auto-import, you should exclude module files,
# since they will be recreated, and may cause churn. Uncomment if using
# auto-import.
# .idea/artifacts
# .idea/compiler.xml
# .idea/jarRepositories.xml
# .idea/modules.xml
# .idea/*.iml
# .idea/modules
# *.iml
# *.ipr
# CMake
cmake-build-*/
# Mongo Explorer plugin
.idea/**/mongoSettings.xml
# File-based project format
*.iws
# IntelliJ
out/
# mpeltonen/sbt-idea plugin
.idea_modules/
# JIRA plugin
atlassian-ide-plugin.xml
# Cursive Clojure plugin
.idea/replstate.xml
# SonarLint plugin
.idea/sonarlint/
# Crashlytics plugin (for Android Studio and IntelliJ)
com_crashlytics_export_strings.xml
crashlytics.properties
crashlytics-build.properties
fabric.properties
# Editor-based Rest Client
.idea/httpRequests
# Android studio 3.1+ serialized cache file
.idea/caches/build_file_checksums.ser
### PyCharm Patch ###
# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
# *.iml
# modules.xml
# .idea/misc.xml
# *.ipr
# Sonarlint plugin
# https://plugins.jetbrains.com/plugin/7973-sonarlint
.idea/**/sonarlint/
# SonarQube Plugin
# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
.idea/**/sonarIssues.xml
# Markdown Navigator plugin
# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
.idea/**/markdown-navigator.xml
.idea/**/markdown-navigator-enh.xml
.idea/**/markdown-navigator/
# Cache file creation bug
# See https://youtrack.jetbrains.com/issue/JBR-2257
.idea/$CACHE_FILE$
# CodeStream plugin
# https://plugins.jetbrains.com/plugin/12206-codestream
.idea/codestream.xml
# Azure Toolkit for IntelliJ plugin
# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
.idea/**/azureSettings.xml
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json
### SublimeText ###
# Cache files for Sublime Text
*.tmlanguage.cache
*.tmPreferences.cache
*.stTheme.cache
# Workspace files are user-specific
*.sublime-workspace
# Project files should be checked into the repository, unless a significant
# proportion of contributors will probably not be using Sublime Text
# *.sublime-project
# SFTP configuration file
sftp-config.json
sftp-config-alt*.json
# Package control specific files
Package Control.last-run
Package Control.ca-list
Package Control.ca-bundle
Package Control.system-ca-bundle
Package Control.cache/
Package Control.ca-certs/
Package Control.merged-ca-bundle
Package Control.user-ca-bundle
oscrypto-ca-bundle.crt
bh_unicode_properties.cache
# Sublime-github package stores a github token in this file
# https://packagecontrol.io/packages/sublime-github
GitHub.sublime-settings
### Terraform ###
# Local .terraform directories
**/.terraform/*
# .tfstate files
*.tfstate
*.tfstate.*
# Crash log files
crash.log
crash.*.log
# Exclude all .tfvars files, which are likely to contain sensitive data, such as
# password, private keys, and other secrets. These should not be part of version
# control as they are data points which are potentially sensitive and subject
# to change depending on the environment.
*.tfvars
*.tfvars.json
# Ignore override files as they are usually used to override resources locally and so
# are not checked in
override.tf
override.tf.json
*_override.tf
*_override.tf.json
# Include override files you do wish to add to version control using negated pattern
# !example_override.tf
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
# example: *tfplan*
# Ignore CLI configuration files
.terraformrc
terraform.rc
# End of https://www.toptal.com/developers/gitignore/api/linux,osx,macos,sublimetext,pycharm,python,kate,kdevelop4,terraform
.terraform.lock.hcl
.envrc
.vault-pass

67
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,67 @@
default_language_version:
python: python3.8
repos:
- repo: https://github.com/commitizen-tools/commitizen
rev: v2.38.0
hooks:
- id: commitizen
stages: [commit-msg]
- repo: https://github.com/pycqa/isort.git
rev: 5.12.0
hooks:
- id: isort
args: ["--profile", "black", "--filter-files"]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.3.0
hooks:
- id: trailing-whitespace
- id: check-merge-conflict
- id: check-ast
- id: check-json
- id: mixed-line-ending
- id: end-of-file-fixer
- repo: https://github.com/asottile/pyupgrade
rev: v3.2.3
hooks:
- id: pyupgrade
args: []
- repo: https://github.com/psf/black
rev: 22.10.0
hooks:
- id: black
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
hooks:
- id: flake8
additional_dependencies:
- flake8-annotations
- flake8-bandit
- flake8-commas
- flake8-docstrings
- flake8-pyproject
- flake8-rst-docstrings
- flake8-use-fstring
- flake8-walrus
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.5.0
hooks:
- id: mypy
additional_dependencies:
- sqlalchemy[mypy]
- types-docutils
- types-paramiko
- types-redis
- types-requests
- types-toml
- repo: https://github.com/hadolint/hadolint
rev: v2.12.0
hooks:
- id: hadolint-docker

1
.tool-versions Normal file
View File

@ -0,0 +1 @@
python 3.8.3

View File

@ -1,10 +0,0 @@
include README.rst
include LICENSE.txt
include MANIFEST.in
include setup.py
include setup.cfg
include docs/*.rst
include docs/conf.py
include docs/Makefile
include docs/make.bat

View File

@ -1,28 +1,31 @@
#!/usr/bin/make -f
PE := pipenv run
ST := $(PE) ./setup.py
DNAME := dist/$(shell $(ST) --name)-$(shell $(ST) --version).tar.gz
include Makefile.msg
test tests:
@$(PE) pytest -s -q
help:
$(call amsg,Available targets are:)
$(call amsg,)
$(call amsg,- install)
$(call amsg,- lint)
$(call amsg,- test)
prepare:
@pipenv install --dev
update:
@pipenv update --dev
install:
$(call bmsg,Installing poetry and dependencies.)
$(call qcmd,pip install -U poetry)
$(call qcmd,poetry install)
$(call qcmd,poetry run pre-commit install)
docs:
@$(ST) build_sphinx
lint:
$(call bcmd,pre-commit,run,-poetry run pre-commit run --all-files)
checkdocs:
@$(ST) checkdocs
test:
$(call qcmd,rm -rf htmlcov)
$(call bcmd,pytest,--cov, \
poetry run pytest --cov-report html $(O) $(SPECIFIC_TESTS))
$(call bmsg,HTML coverage is available under the following directory:)
$(call bmsg,file://$(realpath .)/htmlcov/index.html)
dist: $(DNAME)
$(DNAME):
@$(ST) sdist
clean:
$(call rmsg,Cleaning build and cache directories.)
$(call qcmd,rm -rf build .coverage htmlcov .mypy_cache .pytest_cache)
upload: $(DNAME)
@twine upload $(DNAME)
.PHONY: test tests dist docs
# End of file.
.PHONY: help install lint test clean

54
Makefile.msg Executable file
View File

@ -0,0 +1,54 @@
#!/usr/bin/make -f
# Used colors ANSI modifiers escape codes
color_green := 32
color_red := 31
color_yellow := 33
# Newline - comes handy in some situations
define \n
endef
# Command message - display basic info about the command, and run it.
define cmd
@$(if $(MAKE_FULL_LOG),,\
printf "\033[1;""$4""m>\033[0m \033[1m%s\033[0m %s\n" "$1" "$2";)
$(if $(MAKE_FULL_LOG),,@)$3
endef
# Quiet command - make it non-quiet if full log is enabled.
define qcmd
$(if $(MAKE_FULL_LOG),,@)$1
endef
# Unconditionnal message.
define amsg
@printf "\033[1;""$(color_green)""m>\033[0m \033[1m%s\033[0m\n" "$1"
endef
# Normal message - display it.
define msg
$(if $(MAKE_FULL_LOG),,\
@printf "\033[1;""$2""m>\033[0m \033[1m%s\033[0m\n" "$1")
endef
# Build command
define bcmd
$(call cmd,$1,$2,$3,$(color_green))
endef
# Build message
define bmsg
$(call msg,$1,$(color_green))
endef
# Remove message
define rmsg
$(call msg,$1,$(color_red))
endef
# Install message
define imsg
$(call msg,$1,$(color_yellow))
endef

18
Pipfile
View File

@ -1,18 +0,0 @@
[[source]]
url = 'https://pypi.python.org/simple'
verify_ssl = true
name = 'pypi'
[requires]
python_version = '3.8'
[packages]
regex = '*'
thcolor = '*'
[dev-packages]
sphinx = '*'
sphinx-rtd-theme = "*"
"collective.checkdocs" = '*'
pudb = '*'
pytest = '*'

349
Pipfile.lock generated
View File

@ -1,349 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "0c7b9eefc6928bde82b4bea8ae1103e923d189c8a511d5ee913a280ac530bc6a"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.8"
},
"sources": [
{
"name": "pypi",
"url": "https://pypi.python.org/simple",
"verify_ssl": true
}
]
},
"default": {
"regex": {
"hashes": [
"sha256:08997a37b221a3e27d68ffb601e45abfb0093d39ee770e4257bd2f5115e8cb0a",
"sha256:112e34adf95e45158c597feea65d06a8124898bdeac975c9087fe71b572bd938",
"sha256:1700419d8a18c26ff396b3b06ace315b5f2a6e780dad387e4c48717a12a22c29",
"sha256:2f6f211633ee8d3f7706953e9d3edc7ce63a1d6aad0be5dcee1ece127eea13ae",
"sha256:52e1b4bef02f4040b2fd547357a170fc1146e60ab310cdbdd098db86e929b387",
"sha256:55b4c25cbb3b29f8d5e63aeed27b49fa0f8476b0d4e1b3171d85db891938cc3a",
"sha256:5aaa5928b039ae440d775acea11d01e42ff26e1561c0ffcd3d805750973c6baf",
"sha256:654cb773b2792e50151f0e22be0f2b6e1c3a04c5328ff1d9d59c0398d37ef610",
"sha256:690f858d9a94d903cf5cada62ce069b5d93b313d7d05456dbcd99420856562d9",
"sha256:6ad8663c17db4c5ef438141f99e291c4d4edfeaacc0ce28b5bba2b0bf273d9b5",
"sha256:89cda1a5d3e33ec9e231ece7307afc101b5217523d55ef4dc7fb2abd6de71ba3",
"sha256:92d8a043a4241a710c1cf7593f5577fbb832cf6c3a00ff3fc1ff2052aff5dd89",
"sha256:95fa7726d073c87141f7bbfb04c284901f8328e2d430eeb71b8ffdd5742a5ded",
"sha256:97712e0d0af05febd8ab63d2ef0ab2d0cd9deddf4476f7aa153f76feef4b2754",
"sha256:b2ba0f78b3ef375114856cbdaa30559914d081c416b431f2437f83ce4f8b7f2f",
"sha256:bae83f2a56ab30d5353b47f9b2a33e4aac4de9401fb582b55c42b132a8ac3868",
"sha256:c78e66a922de1c95a208e4ec02e2e5cf0bb83a36ceececc10a72841e53fbf2bd",
"sha256:cf59bbf282b627130f5ba68b7fa3abdb96372b24b66bdf72a4920e8153fc7910",
"sha256:e3cdc9423808f7e1bb9c2e0bdb1c9dc37b0607b30d646ff6faf0d4e41ee8fee3",
"sha256:e9b64e609d37438f7d6e68c2546d2cb8062f3adb27e6336bc129b51be20773ac",
"sha256:fbff901c54c22425a5b809b914a3bfaf4b9570eee0e5ce8186ac71eb2025191c"
],
"index": "pypi",
"version": "==2020.6.8"
},
"thcolor": {
"hashes": [
"sha256:a93a535f7f81b5e38460531b5731f0fa8c4b30b2a63d1fe763c2ac15e37df8b7"
],
"index": "pypi",
"version": "==0.3.1"
}
},
"develop": {
"alabaster": {
"hashes": [
"sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359",
"sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"
],
"version": "==0.7.12"
},
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==19.3.0"
},
"babel": {
"hashes": [
"sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38",
"sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.8.0"
},
"certifi": {
"hashes": [
"sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3",
"sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"
],
"version": "==2020.6.20"
},
"chardet": {
"hashes": [
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
],
"version": "==3.0.4"
},
"collective.checkdocs": {
"hashes": [
"sha256:3a5328257c5224bc72753820c182910d7fb336bc1dba5e09113d48566655e46e"
],
"index": "pypi",
"version": "==0.2"
},
"docutils": {
"hashes": [
"sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af",
"sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==0.16"
},
"idna": {
"hashes": [
"sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6",
"sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.10"
},
"imagesize": {
"hashes": [
"sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1",
"sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.2.0"
},
"jinja2": {
"hashes": [
"sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0",
"sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.11.2"
},
"markupsafe": {
"hashes": [
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
"sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42",
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
"sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b",
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
"sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15",
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
"sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2",
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7",
"sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.1.1"
},
"more-itertools": {
"hashes": [
"sha256:68c70cc7167bdf5c7c9d8f6954a7837089c6a36bf565383919bb595efb8a17e5",
"sha256:b78134b2063dd214000685165d81c154522c3ee0a1c0d4d113c80361c234c5a2"
],
"markers": "python_version >= '3.5'",
"version": "==8.4.0"
},
"packaging": {
"hashes": [
"sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8",
"sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==20.4"
},
"pluggy": {
"hashes": [
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.1"
},
"pudb": {
"hashes": [
"sha256:e8f0ea01b134d802872184b05bffc82af29a1eb2f9374a277434b932d68f58dc"
],
"index": "pypi",
"version": "==2019.2"
},
"py": {
"hashes": [
"sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2",
"sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.9.0"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"markers": "python_version >= '3.5'",
"version": "==2.6.1"
},
"pyparsing": {
"hashes": [
"sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1",
"sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"
],
"markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==2.4.7"
},
"pytest": {
"hashes": [
"sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1",
"sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"
],
"index": "pypi",
"version": "==5.4.3"
},
"pytz": {
"hashes": [
"sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed",
"sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"
],
"version": "==2020.1"
},
"requests": {
"hashes": [
"sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b",
"sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'",
"version": "==2.24.0"
},
"six": {
"hashes": [
"sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259",
"sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.15.0"
},
"snowballstemmer": {
"hashes": [
"sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0",
"sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"
],
"version": "==2.0.0"
},
"sphinx": {
"hashes": [
"sha256:74fbead182a611ce1444f50218a1c5fc70b6cc547f64948f5182fb30a2a20258",
"sha256:97c9e3bcce2f61d9f5edf131299ee9d1219630598d9f9a8791459a4d9e815be5"
],
"index": "pypi",
"version": "==3.1.1"
},
"sphinx-rtd-theme": {
"hashes": [
"sha256:22c795ba2832a169ca301cd0a083f7a434e09c538c70beb42782c073651b707d",
"sha256:373413d0f82425aaa28fb288009bf0d0964711d347763af2f1b65cafcb028c82"
],
"index": "pypi",
"version": "==0.5.0"
},
"sphinxcontrib-applehelp": {
"hashes": [
"sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a",
"sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-devhelp": {
"hashes": [
"sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e",
"sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.2"
},
"sphinxcontrib-htmlhelp": {
"hashes": [
"sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f",
"sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-jsmath": {
"hashes": [
"sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178",
"sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.1"
},
"sphinxcontrib-qthelp": {
"hashes": [
"sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72",
"sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.3"
},
"sphinxcontrib-serializinghtml": {
"hashes": [
"sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc",
"sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"
],
"markers": "python_version >= '3.5'",
"version": "==1.1.4"
},
"urllib3": {
"hashes": [
"sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527",
"sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4' and python_version < '4'",
"version": "==1.25.9"
},
"urwid": {
"hashes": [
"sha256:0896f36060beb6bf3801cb554303fef336a79661401797551ba106d23ab4cd86"
],
"version": "==2.1.0"
},
"wcwidth": {
"hashes": [
"sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784",
"sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"
],
"version": "==0.2.5"
}
}
}

View File

@ -32,5 +32,3 @@ livehtml:
$(SPHINXWATCH) -b html $(SPHINXOPTS) . $(BUILDDIR)/html
.PHONY: livehtml
# End of file.

View File

@ -1,167 +1,120 @@
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
"""Configuration file for the Sphinx documentation builder.
# -- Path setup --------------------------------------------------------------
For the full list of built-in configuration values, see the documentation:
https://www.sphinx-doc.org/en/master/usage/configuration.html
"""
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
from __future__ import annotations
def _add_paths():
import os
import sys
from pathlib import Path
import sys
sys.path.insert(0, os.path.abspath('..'))
_add_paths()
from toml import load as load_toml
# -- Project information -----------------------------------------------------
# Add the module path.
sys.path.insert(0, str(Path(__file__).parent.parent))
sys.path.insert(0, str(Path(__file__).parent / "_ext"))
pyproject = load_toml(open(Path(__file__).parent.parent / "pyproject.toml"))
project = 'textoutpc'
copyright = '2019, Thomas Touhey'
author = 'Thomas Touhey'
project = "textoutpc"
version = str(pyproject["tool"]["poetry"]["version"])
copyright = "2023, Thomas Touhey"
author = "Thomas Touhey"
# The full version, including alpha/beta/rc tags
def _get_release():
from os.path import dirname, join
from pkg_resources import find_distributions as find_dist
module_path = join(dirname(__file__), '..')
dist = next(find_dist(module_path, True))
return dist.version
release = _get_release()
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc'
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.intersphinx",
"sphinx.ext.todo",
"sphinxcontrib.mermaid",
"remove_first_line_in_module_docstrings",
]
# Add any paths that contain templates here, relative to this directory.
templates_path = []
templates_path: list[str] = []
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
html_theme = "furo"
html_static_path = ["_static"]
html_title = f"textoutpc {version}"
html_use_index = False
html_copy_source = False
html_show_sourcelink = False
html_domain_indices = False
html_css_files = ["custom.css"]
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.goutput*']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = []
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'textoutpcdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"fastapi": ("https://fastapi.tiangolo.com/", None),
"pydantic": ("https://docs.pydantic.dev/2.4/", None),
"sqlalchemy": ("https://docs.sqlalchemy.org/en/20/", None),
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'textoutpc.tex', 'textoutpc Documentation',
'Thomas Touhey', 'manual'),
]
todo_include_todos = True
mermaid_output_format = "raw"
mermaid_init_js = """
function isDarkMode() {
const color = (
getComputedStyle(document.body)
.getPropertyValue("--color-foreground-primary")
);
# -- Options for manual page output ------------------------------------------
if (color == "#ffffffcc")
return true;
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'textoutpc', 'textoutpc Documentation',
[author], 1)
]
return false;
}
const observer = new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (
mutation.type != "attributes"
|| mutation.attributeName != "data-theme"
)
return
# -- Options for Texinfo output ----------------------------------------------
const nodes = document.querySelectorAll(".mermaid");
nodes.forEach(node => {
/* Restore the original code before reprocessing. */
node.innerHTML = node.getAttribute("data-original-code");
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'textoutpc', 'textoutpc Documentation',
author, 'textoutpc', 'One line description of project.',
'Miscellaneous'),
]
/* Remove the attribute saying data is processed; it is not! */
if (node.hasAttribute("data-processed"))
node.removeAttribute("data-processed");
});
mermaid.initialize({
theme: isDarkMode() ? "dark" : "base",
darkMode: isDarkMode(),
});
mermaid.run({nodes: nodes, querySelector: ".mermaid"});
});
});
(function (window) {
/* Store original code for diagrams into an attribute directly, since
Mermaid actually completely replaces the content and removes the
original code. */
document.querySelectorAll(".mermaid").forEach(node => {
node.setAttribute("data-original-code", node.innerHTML);
})
mermaid.initialize({
startOnLoad: true,
theme: isDarkMode() ? "dark" : "base",
darkMode: isDarkMode(),
});
observer.observe(document.body, {attributes: true});
})(window);
"""
autodoc_typehints_format = "short"
autodoc_default_options = {
"members": True,
"undoc-members": True,
"show-inheritance": True,
"exclude-members": "model_config, model_fields",
}
autodoc_member_order = "bysource"

509
poetry.lock generated Normal file
View File

@ -0,0 +1,509 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
[[package]]
name = "cfgv"
version = "3.4.0"
description = "Validate configuration and produce human readable error messages."
optional = false
python-versions = ">=3.8"
files = [
{file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"},
{file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"},
]
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.3.2"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"},
{file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"},
{file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"},
{file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"},
{file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"},
{file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"},
{file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"},
{file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"},
{file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"},
{file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"},
{file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"},
{file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"},
{file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"},
{file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"},
{file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"},
{file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"},
{file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"},
{file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"},
{file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"},
{file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"},
{file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"},
{file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"},
{file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"},
{file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"},
{file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"},
{file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"},
{file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"},
{file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"},
{file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"},
{file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"},
{file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"},
{file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"},
{file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"},
{file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"},
{file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"},
{file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"},
{file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"},
{file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"},
{file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"},
{file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"},
{file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"},
{file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"},
{file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"},
{file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"},
{file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"},
{file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"},
{file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"},
{file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"},
{file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"},
{file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"},
{file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"},
{file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"},
]
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
toml = ["tomli"]
[[package]]
name = "distlib"
version = "0.3.7"
description = "Distribution utilities"
optional = false
python-versions = "*"
files = [
{file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"},
{file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"},
]
[[package]]
name = "docutils"
version = "0.20.1"
description = "Docutils -- Python Documentation Utilities"
optional = false
python-versions = ">=3.7"
files = [
{file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"},
{file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"},
]
[[package]]
name = "exceptiongroup"
version = "1.2.0"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"},
{file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "filelock"
version = "3.13.1"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
{file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"},
{file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"},
]
[package.extras]
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"]
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
typing = ["typing-extensions (>=4.8)"]
[[package]]
name = "identify"
version = "2.5.32"
description = "File identification library for Python"
optional = false
python-versions = ">=3.8"
files = [
{file = "identify-2.5.32-py2.py3-none-any.whl", hash = "sha256:0b7656ef6cba81664b783352c73f8c24b39cf82f926f78f4550eda928e5e0545"},
{file = "identify-2.5.32.tar.gz", hash = "sha256:5d9979348ec1a21c768ae07e0a652924538e8bce67313a73cb0f681cf08ba407"},
]
[package.extras]
license = ["ukkonen"]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "nodeenv"
version = "1.8.0"
description = "Node.js virtual environment builder"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"},
{file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"},
]
[package.dependencies]
setuptools = "*"
[[package]]
name = "packaging"
version = "23.2"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
]
[[package]]
name = "platformdirs"
version = "4.0.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-4.0.0-py3-none-any.whl", hash = "sha256:118c954d7e949b35437270383a3f2531e99dd93cf7ce4dc8340d3356d30f173b"},
{file = "platformdirs-4.0.0.tar.gz", hash = "sha256:cb633b2bcf10c51af60beb0ab06d2f1d69064b43abf4c185ca6b28865f3f9731"},
]
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"]
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"]
[[package]]
name = "pluggy"
version = "1.3.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.8"
files = [
{file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"},
{file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
version = "3.5.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = false
python-versions = ">=3.8"
files = [
{file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"},
{file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"},
]
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
[[package]]
name = "pytest"
version = "7.4.3"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"},
{file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "4.1.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "pyyaml"
version = "6.0.1"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.6"
files = [
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
]
[[package]]
name = "regex"
version = "2023.10.3"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.7"
files = [
{file = "regex-2023.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4c34d4f73ea738223a094d8e0ffd6d2c1a1b4c175da34d6b0de3d8d69bee6bcc"},
{file = "regex-2023.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f4e49fc3ce020f65411432183e6775f24e02dff617281094ba6ab079ef0915"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cd1bccf99d3ef1ab6ba835308ad85be040e6a11b0977ef7ea8c8005f01a3c29"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:81dce2ddc9f6e8f543d94b05d56e70d03a0774d32f6cca53e978dc01e4fc75b8"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c6b4d23c04831e3ab61717a707a5d763b300213db49ca680edf8bf13ab5d91b"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c15ad0aee158a15e17e0495e1e18741573d04eb6da06d8b84af726cfc1ed02ee"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6239d4e2e0b52c8bd38c51b760cd870069f0bdf99700a62cd509d7a031749a55"},
{file = "regex-2023.10.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4a8bf76e3182797c6b1afa5b822d1d5802ff30284abe4599e1247be4fd6b03be"},
{file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9c727bbcf0065cbb20f39d2b4f932f8fa1631c3e01fcedc979bd4f51fe051c5"},
{file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3ccf2716add72f80714b9a63899b67fa711b654be3fcdd34fa391d2d274ce767"},
{file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:107ac60d1bfdc3edb53be75e2a52aff7481b92817cfdddd9b4519ccf0e54a6ff"},
{file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:00ba3c9818e33f1fa974693fb55d24cdc8ebafcb2e4207680669d8f8d7cca79a"},
{file = "regex-2023.10.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f0a47efb1dbef13af9c9a54a94a0b814902e547b7f21acb29434504d18f36e3a"},
{file = "regex-2023.10.3-cp310-cp310-win32.whl", hash = "sha256:36362386b813fa6c9146da6149a001b7bd063dabc4d49522a1f7aa65b725c7ec"},
{file = "regex-2023.10.3-cp310-cp310-win_amd64.whl", hash = "sha256:c65a3b5330b54103e7d21cac3f6bf3900d46f6d50138d73343d9e5b2900b2353"},
{file = "regex-2023.10.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:90a79bce019c442604662d17bf69df99090e24cdc6ad95b18b6725c2988a490e"},
{file = "regex-2023.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c7964c2183c3e6cce3f497e3a9f49d182e969f2dc3aeeadfa18945ff7bdd7051"},
{file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef80829117a8061f974b2fda8ec799717242353bff55f8a29411794d635d964"},
{file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5addc9d0209a9afca5fc070f93b726bf7003bd63a427f65ef797a931782e7edc"},
{file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c148bec483cc4b421562b4bcedb8e28a3b84fcc8f0aa4418e10898f3c2c0eb9b"},
{file = "regex-2023.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d1f21af4c1539051049796a0f50aa342f9a27cde57318f2fc41ed50b0dbc4ac"},
{file = "regex-2023.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b9ac09853b2a3e0d0082104036579809679e7715671cfbf89d83c1cb2a30f58"},
{file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ebedc192abbc7fd13c5ee800e83a6df252bec691eb2c4bedc9f8b2e2903f5e2a"},
{file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d8a993c0a0ffd5f2d3bda23d0cd75e7086736f8f8268de8a82fbc4bd0ac6791e"},
{file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:be6b7b8d42d3090b6c80793524fa66c57ad7ee3fe9722b258aec6d0672543fd0"},
{file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4023e2efc35a30e66e938de5aef42b520c20e7eda7bb5fb12c35e5d09a4c43f6"},
{file = "regex-2023.10.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d47840dc05e0ba04fe2e26f15126de7c755496d5a8aae4a08bda4dd8d646c54"},
{file = "regex-2023.10.3-cp311-cp311-win32.whl", hash = "sha256:9145f092b5d1977ec8c0ab46e7b3381b2fd069957b9862a43bd383e5c01d18c2"},
{file = "regex-2023.10.3-cp311-cp311-win_amd64.whl", hash = "sha256:b6104f9a46bd8743e4f738afef69b153c4b8b592d35ae46db07fc28ae3d5fb7c"},
{file = "regex-2023.10.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff507ae210371d4b1fe316d03433ac099f184d570a1a611e541923f78f05037"},
{file = "regex-2023.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be5e22bbb67924dea15039c3282fa4cc6cdfbe0cbbd1c0515f9223186fc2ec5f"},
{file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a992f702c9be9c72fa46f01ca6e18d131906a7180950958f766c2aa294d4b41"},
{file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7434a61b158be563c1362d9071358f8ab91b8d928728cd2882af060481244c9e"},
{file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2169b2dcabf4e608416f7f9468737583ce5f0a6e8677c4efbf795ce81109d7c"},
{file = "regex-2023.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9e908ef5889cda4de038892b9accc36d33d72fb3e12c747e2799a0e806ec841"},
{file = "regex-2023.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12bd4bc2c632742c7ce20db48e0d99afdc05e03f0b4c1af90542e05b809a03d9"},
{file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bc72c231f5449d86d6c7d9cc7cd819b6eb30134bb770b8cfdc0765e48ef9c420"},
{file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bce8814b076f0ce5766dc87d5a056b0e9437b8e0cd351b9a6c4e1134a7dfbda9"},
{file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:ba7cd6dc4d585ea544c1412019921570ebd8a597fabf475acc4528210d7c4a6f"},
{file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b0c7d2f698e83f15228ba41c135501cfe7d5740181d5903e250e47f617eb4292"},
{file = "regex-2023.10.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5a8f91c64f390ecee09ff793319f30a0f32492e99f5dc1c72bc361f23ccd0a9a"},
{file = "regex-2023.10.3-cp312-cp312-win32.whl", hash = "sha256:ad08a69728ff3c79866d729b095872afe1e0557251da4abb2c5faff15a91d19a"},
{file = "regex-2023.10.3-cp312-cp312-win_amd64.whl", hash = "sha256:39cdf8d141d6d44e8d5a12a8569d5a227f645c87df4f92179bd06e2e2705e76b"},
{file = "regex-2023.10.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a3ee019a9befe84fa3e917a2dd378807e423d013377a884c1970a3c2792d293"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76066d7ff61ba6bf3cb5efe2428fc82aac91802844c022d849a1f0f53820502d"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe50b61bab1b1ec260fa7cd91106fa9fece57e6beba05630afe27c71259c59b"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fd88f373cb71e6b59b7fa597e47e518282455c2734fd4306a05ca219a1991b0"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3ab05a182c7937fb374f7e946f04fb23a0c0699c0450e9fb02ef567412d2fa3"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dac37cf08fcf2094159922edc7a2784cfcc5c70f8354469f79ed085f0328ebdf"},
{file = "regex-2023.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54ddd0bb8fb626aa1f9ba7b36629564544954fff9669b15da3610c22b9a0991"},
{file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3367007ad1951fde612bf65b0dffc8fd681a4ab98ac86957d16491400d661302"},
{file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:16f8740eb6dbacc7113e3097b0a36065a02e37b47c936b551805d40340fb9971"},
{file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4f2ca6df64cbdd27f27b34f35adb640b5d2d77264228554e68deda54456eb11"},
{file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:39807cbcbe406efca2a233884e169d056c35aa7e9f343d4e78665246a332f597"},
{file = "regex-2023.10.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7eece6fbd3eae4a92d7c748ae825cbc1ee41a89bb1c3db05b5578ed3cfcfd7cb"},
{file = "regex-2023.10.3-cp37-cp37m-win32.whl", hash = "sha256:ce615c92d90df8373d9e13acddd154152645c0dc060871abf6bd43809673d20a"},
{file = "regex-2023.10.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f649fa32fe734c4abdfd4edbb8381c74abf5f34bc0b3271ce687b23729299ed"},
{file = "regex-2023.10.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b98b7681a9437262947f41c7fac567c7e1f6eddd94b0483596d320092004533"},
{file = "regex-2023.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:91dc1d531f80c862441d7b66c4505cd6ea9d312f01fb2f4654f40c6fdf5cc37a"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82fcc1f1cc3ff1ab8a57ba619b149b907072e750815c5ba63e7aa2e1163384a4"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7979b834ec7a33aafae34a90aad9f914c41fd6eaa8474e66953f3f6f7cbd4368"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef71561f82a89af6cfcbee47f0fabfdb6e63788a9258e913955d89fdd96902ab"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd829712de97753367153ed84f2de752b86cd1f7a88b55a3a775eb52eafe8a94"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00e871d83a45eee2f8688d7e6849609c2ca2a04a6d48fba3dff4deef35d14f07"},
{file = "regex-2023.10.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:706e7b739fdd17cb89e1fbf712d9dc21311fc2333f6d435eac2d4ee81985098c"},
{file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cc3f1c053b73f20c7ad88b0d1d23be7e7b3901229ce89f5000a8399746a6e039"},
{file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6f85739e80d13644b981a88f529d79c5bdf646b460ba190bffcaf6d57b2a9863"},
{file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:741ba2f511cc9626b7561a440f87d658aabb3d6b744a86a3c025f866b4d19e7f"},
{file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e77c90ab5997e85901da85131fd36acd0ed2221368199b65f0d11bca44549711"},
{file = "regex-2023.10.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:979c24cbefaf2420c4e377ecd1f165ea08cc3d1fbb44bdc51bccbbf7c66a2cb4"},
{file = "regex-2023.10.3-cp38-cp38-win32.whl", hash = "sha256:58837f9d221744d4c92d2cf7201c6acd19623b50c643b56992cbd2b745485d3d"},
{file = "regex-2023.10.3-cp38-cp38-win_amd64.whl", hash = "sha256:c55853684fe08d4897c37dfc5faeff70607a5f1806c8be148f1695be4a63414b"},
{file = "regex-2023.10.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c54e23836650bdf2c18222c87f6f840d4943944146ca479858404fedeb9f9af"},
{file = "regex-2023.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:69c0771ca5653c7d4b65203cbfc5e66db9375f1078689459fe196fe08b7b4930"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ac965a998e1388e6ff2e9781f499ad1eaa41e962a40d11c7823c9952c77123e"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c0e8fae5b27caa34177bdfa5a960c46ff2f78ee2d45c6db15ae3f64ecadde14"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c56c3d47da04f921b73ff9415fbaa939f684d47293f071aa9cbb13c94afc17d"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ef1e014eed78ab650bef9a6a9cbe50b052c0aebe553fb2881e0453717573f52"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d29338556a59423d9ff7b6eb0cb89ead2b0875e08fe522f3e068b955c3e7b59b"},
{file = "regex-2023.10.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9c6d0ced3c06d0f183b73d3c5920727268d2201aa0fe6d55c60d68c792ff3588"},
{file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:994645a46c6a740ee8ce8df7911d4aee458d9b1bc5639bc968226763d07f00fa"},
{file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:66e2fe786ef28da2b28e222c89502b2af984858091675044d93cb50e6f46d7af"},
{file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:11175910f62b2b8c055f2b089e0fedd694fe2be3941b3e2633653bc51064c528"},
{file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:06e9abc0e4c9ab4779c74ad99c3fc10d3967d03114449acc2c2762ad4472b8ca"},
{file = "regex-2023.10.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fb02e4257376ae25c6dd95a5aec377f9b18c09be6ebdefa7ad209b9137b73d48"},
{file = "regex-2023.10.3-cp39-cp39-win32.whl", hash = "sha256:3b2c3502603fab52d7619b882c25a6850b766ebd1b18de3df23b2f939360e1bd"},
{file = "regex-2023.10.3-cp39-cp39-win_amd64.whl", hash = "sha256:adbccd17dcaff65704c856bd29951c58a1bd4b2b0f8ad6b826dbd543fe740988"},
{file = "regex-2023.10.3.tar.gz", hash = "sha256:3fef4f844d2290ee0ba57addcec17eec9e3df73f10a2748485dfd6a3a188cc0f"},
]
[[package]]
name = "setuptools"
version = "69.0.2"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"},
{file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "thcolor"
version = "0.4"
description = "color management module"
optional = false
python-versions = ">=3.6"
files = [
{file = "thcolor-0.4.tar.gz", hash = "sha256:289ec8650393723aeaa34d999427775762d28e6ad53f1705c4d81d6f6d25b38e"},
]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
name = "typing-extensions"
version = "4.8.0"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"},
{file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"},
]
[[package]]
name = "virtualenv"
version = "20.24.7"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.7"
files = [
{file = "virtualenv-20.24.7-py3-none-any.whl", hash = "sha256:a18b3fd0314ca59a2e9f4b556819ed07183b3e9a3702ecfe213f593d44f7b3fd"},
{file = "virtualenv-20.24.7.tar.gz", hash = "sha256:69050ffb42419c91f6c1284a7b24e0475d793447e35929b488bf6a0aade39353"},
]
[package.dependencies]
distlib = ">=0.3.7,<1"
filelock = ">=3.12.2,<4"
platformdirs = ">=3.9.1,<5"
[package.extras]
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.8"
content-hash = "a044960cad543cac882e1c301120fbd2fd5c7a3fa6859ae4f5a6f429b2363101"

118
pyproject.toml Normal file
View File

@ -0,0 +1,118 @@
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.poetry]
name = "textoutpc"
version = "0.3"
description = "textout() equivalent from Planète Casio"
keywords = ["planète casio", "textout", "bbcode", "translator", "parser"]
readme = "README.rst"
homepage = "https://textout.touhey.pro/"
repository = "https://forge.touhey.org/pc/textout.git"
authors = ["Thomas Touhey <thomas@touhey.fr>"]
classifiers = [
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Natural Language :: French",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries",
"Topic :: Text Processing :: Markup :: HTML"
]
[tool.poetry.dependencies]
python = "^3.8"
docutils = "^0.20.1"
regex = "^2023.10.3"
thcolor = "^0.4"
typing-extensions = "^4.8.0"
[tool.poetry.group.dev.dependencies]
pre-commit = "^3.3.3"
pytest = "^7.4.3"
pytest-cov = "^4.1.0"
[tool.black]
target_version = ['py311']
line-length = 79
[tool.commitizen]
name = "cz_conventional_commits"
tag_format = "$version"
update_changelog_on_bump = true
version = "0.1.4"
version_files = [
"pyproject.toml:version",
]
[tool.coverage.report]
exclude_lines = [
"@abstractmethod",
"pragma: no cover"
]
[tool.flake8]
application-import-names = "textoutpc"
ignore = [
"ANN002", # No type annotations for *args
"ANN003", # No type annotations for **kwargs
"ANN101", # No type annotations for self in method
"ANN102", # No type annotations for cls in classmethod
"ANN204", # No return type for special method
"ANN401", # We allow typing.Any in certain annoying cases
"D105", # No need for docstrings in magic __methods__
"D107", # No need for docstrings in __init__
"E203", # flake8 and black disagree on this.
"FI58", # We use future annotations.
"S410", # We use lxml for parsing HTML, so we're fine.
"S413", # We use pycryptodome, not pycrypto
"W503", # Line breaks are before binary operators, not after
]
exclude = ["try*.py"]
per-file-ignores = [
# ANN001: We use fixtures for most callables in tests.
# ANN201: No type annotations for return values for test functions.
# D400,D205: First sentences in test docstrings can be longer than a line.
# S101: Yes, we use assert in tests run by pytest, and it's by design.
"tests/*:ANN001,ANN201,D400,D205,S101"
]
rst-roles = [
"py:class",
"py:attr",
"py:data",
"py:meth",
"py:exc",
"py:mod",
]
rst-directives = ["py:data", "doctest"]
[tool.isort]
add_imports = ["from __future__ import annotations"]
ensure_newline_before_comments = true
force_grid_wrap = 0
force_sort_within_sections = true
group_by_package = true
include_trailing_comma = true
line_length = 79
lines_after_imports = 2
multi_line_output = 3
no_inline_sort = true
profile = "black"
py_version = 311
use_parentheses = true
combine_as_imports = true
[tool.mypy]
ignore_missing_imports = true
mypy_path = "."
warn_unused_ignores = true
show_error_codes = true
plugins = ["sqlalchemy.ext.mypy.plugin"]
[tool.pytest.ini_options]
addopts = """
--cov=textoutpc --cov-report term --doctest-modules
--ignore=docs --ignore=migrations
"""

View File

@ -1,46 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" textout to HTML converter for the command line. """
import sys, argparse
import textoutpc
def parse_args():
""" Parse the arguments. """
ap = argparse.ArgumentParser(prog='textout2html',
description='Convert textout BBcode to HTML.')
ap.add_argument('-o', dest='output', default=sys.stdout,
type=lambda x: sys.stdout if x == '-' else open(x, 'w'),
help='the output source, stdout by default.')
ap.add_argument('--inline', dest='inline', action='store_true',
help='only inline tags will be interpreted')
ap.add_argument('--obsolete-tags', dest='obs_tags', action='store_true',
help='whether to use obsolete HTML tags such as <b>')
ap.add_argument('--label-prefix', dest='lbl_prefix',
help='prefix to use for label tags, e.g. "msg55459-"')
ap.add_argument('input', nargs='?', default=sys.stdin,
type=lambda x: sys.stdin if x == '-' else open(x, 'r'),
help='the input source, stdin by default.')
args = ap.parse_args()
return args
def main():
""" Main function of the script. """
args = parse_args()
print(textoutpc.tohtml(args.input.read(), inline=args.inline,
obsolete_tags=args.obs_tags, label_prefix=args.lbl_prefix),
file=args.output, end='')
if __name__ == '__main__':
try:
main()
except e:
print("textout2html: error: " + str(e))
# End of file.

View File

@ -1,38 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" textout to lightscript converter for the command line. """
import sys, argparse
import textoutpc
def parse_args():
""" Parse the arguments. """
ap = argparse.ArgumentParser(prog='textout2html',
description='Convert textout BBcode to HTML.')
ap.add_argument('-o', dest='output', default=sys.stdout,
type=lambda x: sys.stdout if x == '-' else open(x, 'w'),
help='the output source, stdout by default.')
ap.add_argument('input', nargs='?', default=sys.stdin,
type=lambda x: sys.stdin if x == '-' else open(x, 'r'),
help='the input source, stdin by default.')
args = ap.parse_args()
return args
def main():
""" Main function of the script. """
args = parse_args()
print(textoutpc.tolightscript(args.input.read()), file=args.output, end='')
if __name__ == '__main__':
try:
main()
except e:
print("textout2ls: error: " + str(e))
# End of file.

View File

@ -1,50 +0,0 @@
[metadata]
name = textoutpc
version = attr: textoutpc.version.version
url = https://textout.touhey.pro/
project_urls =
Documentation = https://textout.touhey.pro/docs/
author = Thomas Touhey
author_email = thomas@touhey.fr
description = textout() equivalent from Planète Casio
long_description = file: README.rst
keywords = planète casio, textout, bbcode, translator, parser
license = MIT
classifiers =
Development Status :: 2 - Pre-Alpha
License :: OSI Approved :: MIT License
Natural Language :: French
Operating System :: OS Independent
Programming Language :: Python :: 3
Intended Audience :: Developers
Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries
Topic :: Text Processing :: Markup :: HTML
[options]
zip_safe = False
include_package_data = True
packages = textoutpc, textoutpc.builtin
test_suite = test
scripts =
scripts/textout2html
scripts/textout2ls
install_requires =
regex
thcolor
[options.package_data]
* = *.txt, *.rst
[build_sphinx]
source-dir = docs
[wheel]
universal = True
[flake8]
ignore = F401, F403, E128, E131, E241, E261, E265, E271, W191
exclude = .git, __pycache__, build, dist, docs/conf.py, test.py, test
[tool:pytest]
python_files = tests.py test_*.py *_tests.py
testpaths = tests

View File

@ -1,22 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc Python 3.x module, which is MIT-licensed.
#******************************************************************************
""" Setup script for the textoutpc Python package and script. """
from setuptools import setup as _setup
kwargs = {}
try:
from sphinx.setup_command import BuildDoc as _BuildDoc
kwargs['cmdclass'] = {'build_sphinx': _BuildDoc}
except:
pass
# Actually, most of the project's data is read from the `setup.cfg` file.
_setup(**kwargs)
# End of file.

View File

@ -1,11 +1,8 @@
#!/usr/bin/env python3
#******************************************************************************
# ******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Unit tests for the `textoutpc` Python module. """
# ******************************************************************************
"""Unit tests for the ``textoutpc`` Python module."""
# This file is only there to indicate that the folder is a module.
# It doesn't actually contain code.
# End of file.
from __future__ import annotations

54
tests/test_builtin.py Normal file
View File

@ -0,0 +1,54 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Builtin tag tests for textoutpc."""
from __future__ import annotations
import pytest
from textoutpc.builtin import AlignTag, TextTag
from textoutpc.exceptions import InvalidValue, MissingValue, UnexpectedValue
@pytest.mark.parametrize(
"name,value,exc",
(
("[font]", None, MissingValue),
("[font]", "invalid", InvalidValue),
("[arial]", "unexpected", UnexpectedValue),
("[big]", "unexpected", UnexpectedValue),
("[small]", "unexpected", UnexpectedValue),
("[size]", None, MissingValue),
("[size]", "invalid", InvalidValue),
("[size]", "-1", InvalidValue),
("[size]", "-1.00", InvalidValue),
("[c]", None, MissingValue),
("[c]", "rgb(", InvalidValue),
("[f]", None, MissingValue),
("[f]", "rgb(", InvalidValue),
("[red]", "unexpected", UnexpectedValue),
("[css]", None, MissingValue),
),
)
def test_text_tag_errors(exc: type[Exception], name: str, value: str | None):
"""Test the text tag errors."""
with pytest.raises(exc):
TextTag(name=name, value=value)
def test_align_tag_errors():
"""Test the align tag errors."""
with pytest.raises(MissingValue):
AlignTag(name="[align]")
with pytest.raises(InvalidValue):
AlignTag(name="[align]", value="invalid")
with pytest.raises(ValueError):
AlignTag(name="[invalid]")
with pytest.raises(UnexpectedValue):
AlignTag(name="[center]", value="unexpected")

View File

@ -1,230 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Unit tests for the Python version of textout. """
import pytest
from textoutpc import tohtml as _tohtml
# Define the tests.
@pytest.mark.parametrize('test_input,expected', (
# Basic text.
('', ''),
('lol', '<p>lol</p>'),
('<script>alert(1);</script>',
'<p>&lt;script&gt;alert(1);&lt;/script&gt;</p>'),
# Other tests. (?)
('[a][c][/a]', '<p>[a][c][/a]</p>'),
('[a][a]', '<p>[a][a]</p>'),
("[<>]><[/<>]", "<p>[&lt;&gt;]&gt;&lt;[/&lt;&gt;]</p>"),
# Autolinking.
('(http://www.example.org/some-[damn-url]-(youknow))',
'<p>(<a href="http://www.example.org/some-[damn-url]-(youknow)">' \
'http://www.example.org/some-[damn-url]-(youknow)</a>)</p>'),
('https://thomas.touhey.fr/, tu vois ?',
'<p><a href="https://thomas.touhey.fr/">https://thomas.touhey.fr/' \
'</a>, tu vois ?</p>'),
# Basic text styling.
('[u][b][a][i][/b]', "<p><u><b>[a]</b></u></p>"),
('[u][b]a[/]mdr', '<p><u><b>a</b>mdr</u></p>'),
# Blocks, alignment.
('[left]', ''),
('[left]lol[/]hi', '<div class="align-left"><p>lol</p></div><p>hi</p>'),
('a[justify]b', '<p>a</p><div class="align-justify"><p>b</p></div>'),
('a[i]', '<p>a</p>'),
('a[i][justify]b', '<p>a</p>' \
'<div class="align-justify"><p><i>b</i></p></div>'),
('a[i]k[center]b', '<p>a<i>k</i></p>' \
'<div class="align-center"><p><i>b</i></p></div>'),
('a[i]k[center][b]b[justify]c[/center]d[/]wouhou',
'<p>a<i>k</i></p>' \
'<div class="align-center"><p><i><b>b</b></i></p>' \
'<div class="align-justify"><p><i><b>c</b></i></p></div></div>' \
'<p><i>d</i>wouhou</p>'),
# Show tag for super preprocessing blocks.
('[show]lol', '<p><span class="inline-code">lol</span></p>'),
('[quote][show][justify]hehe',
'<div class="citation"><p><span class="inline-code">' \
'&lt;div class=&quot;align-justify&quot;&gt;' \
'&lt;p&gt;hehe&lt;/p&gt;&lt;/div&gt;' \
'</span></p></div>'),
# Titles.
('lolk[title]smth', '<p>lolk</p>' '<h1 class="title">smth</h1>'),
('[subtitle]<>', '<h2 class="subtitle">&lt;&gt;</h2>'),
# Fonts.
('[arial]test', '<p><span style="font-family: arial">test</span></p>'),
('[font=mono]stereo',
'<p><span style="font-family: monospace">stereo</span></p>'),
('[haettenschweiler]', ''),
('[font=hello]yea', '<p>[font=hello]yea</p>'),
# Color.
('yea[color=blue]dabadee', \
'<p>yea<span style="color: #0000FF">dabadee</span></p>'),
('[color=#12345F]a', '<p><span style="color: #12345F">a</span></p>'),
('[color=#123]a', '<p><span style="color: #112233">a</span></p>'),
('[color=123]a', '<p><span style="color: #010203">a</span></p>'),
('[color=chucknorris]a', '<p><span style="color: #C00000">a</span></p>'),
('[color=rgb(1, 22,242)]a',
'<p><span style="color: #0116F2">a</span></p>'),
('[color= rgb (1,22, 242 , 50.0% )]a',
'<p><span style="color: #0116F2; ' \
'color: rgba(1, 22, 242, 0.5)">a</span></p>'),
('[color=rgba(1,22,242,0.500)]a', '<p><span style="color: #0116F2; ' \
'color: rgba(1, 22, 242, 0.5)">a</span></p>'),
('[color=rbga(5, 7)]b', '<p><span style="color: #050007">b</span></p>'),
('[color=hsl(0, 1,50.0%)]r',
'<p><span style="color: #FF0000">r</span></p>'),
# TODO: hls, hwb
# Links.
('[url]', '<p>[url]</p>'),
('[url=https://thomas.touhey.fr/]mon profil est le meilleur[/url]',
'<p><a href="https://thomas.touhey.fr/">mon profil est le meilleur' \
'</a></p>'),
('[url=https://thomas.touhey.fr/]',
'<p><a href="https://thomas.touhey.fr/">https://thomas.touhey.fr/' \
'</a></p>'),
('[url=http://hey.org/lol[]>"a]', '<p><a href="http://hey.org/lol[]&gt;' \
'&quot;a">' 'http://hey.org/lol[]&gt;&quot;a</a></p>'),
('[url]javascript:alert(1)[/url]',
'<p>[url]javascript:alert(1)[/url]</p>'),
('[url]<script>alert(1);</script>[/url]',
'<p>[url]&lt;script&gt;alert(1);&lt;/script&gt;[/url]</p>'),
('[profil]cake[/profil]',
'<p><a href="https://www.planet-casio.com/Fr/compte/voir_profil.php' \
'?membre=cake">cake</a></p>'),
('[profile]ekac',
'<p><a href="https://www.planet-casio.com/Fr/compte/voir_profil.php' \
'?membre=ekac">ekac</a></p>'),
# Quotes.
('[quote]', ''),
('[quote]a',
'<div class="citation"><p>a</p></div>'),
('[quote=Test 1 :)]lel[/quote]',
'<div class="citation"><p><b>Test 1 ' \
'<img src="/images/smileys/smile.gif"> a écrit:</b></p><p>' \
'lel</p></div>'),
# Spoilers.
('[spoiler]', ''),
('[spoiler=Hello|world> :D]Close this, quick![/spoiler]',
'<div class="spoiler"><div class="title on" ' \
'onclick="toggleSpoiler(this.parentNode, ' "'open'" ');"><p>Hello' \
'</p></div><div class="title off" ' \
'onclick="toggleSpoiler(this.parentNode, ' "'close'" ');"><p>world' \
'&gt; <img src="/images/smileys/grin.gif"></p></div>' \
'<div class="off"><p>Close this, quick!</p></div></div>'),
# Code.
('[code]', ''),
("`[code]`", '<p><span class="inline-code">[code]</span></p>'),
('[inlinecode]', ''),
("[inlinecode]`[/inlinecode]",
'<p><span class="inline-code">`</span></p>'),
("[b]a[noeval]b[/b]c[/noeval]d", "<p><b>ab[/b]cd</b></p>"),
("a[noeval]b[noeval]c[/noeval]d[/noeval]e",
"<p>ab[noeval]c[/noeval]de</p>"),
("[noeval]``[/noeval]", "<p>``</p>"),
('[noeval]<>[/noeval]', '<p>&lt;&gt;</p>'),
# Pictures.
('[img]', '<p>[img]</p>'),
('[img]"incroyable<>"[/img]',
'<p>[img]&quot;incroyable&lt;&gt;&quot;[/img]</p>'),
('[img=right|float|12x345]https://example.org/image.png',
'<img src="https://example.org/image.png" class="img-float-right" ' \
'style="width: 12px; height: 345px" />'),
# Videos.
('[video]"><script>alert(1)</script>[/video]',
'<p>[video]&quot;&gt;&lt;script&gt;alert(1)&lt;/script&gt;' \
'[/video]</p>'),
('[video]<script>alert(document.cookie)</script>[/video]',
'<p>[video]&lt;script&gt;alert(document.cookie)&lt;/script&gt;' \
'[/video]</p>'),
('[video]https://www.youtube.com/watch?v=6odDOOyUawY[/video]',
'<div class="video-wrapper" style="padding-bottom: 56.25%"><iframe ' \
'src="https://www.youtube.com/embed/6odDOOyUawY" ' \
'frameborder="0" allowfullscreen></iframe></div>'),
('[video]https://www.youtube.com/watch?v=<script>alert(1)</script>',
'<p><a href="https://www.youtube.com/watch?v=&lt;script&gt;alert(1)' \
'&lt;/script&gt;">' \
'https://www.youtube.com/watch?v=&lt;script&gt;alert(1)' \
'&lt;/script&gt;</a></p>'),
('[video=left|float|4:3]https://www.youtube.com/watch?v=XEjLoHdbVeE',
'<div class="video-wrapper img-float-left" ' \
'style="padding-bottom: 75%"><iframe ' \
'src="https://www.youtube.com/embed/XEjLoHdbVeE" frameborder="0" ' \
'allowfullscreen></iframe></div>'),
('lol[youtube]h4WLX8hfpJw', '<p>lol</p><div class="video-wrapper" ' \
'style="padding-bottom: 56.25%"><iframe ' \
'src="https://www.youtube.com/embed/h4WLX8hfpJw" frameborder="0" ' \
'allowfullscreen></iframe></div>'),
('[color=blue][youtube]h4WLX8hfpJw',
'<div class="video-wrapper" style="padding-bottom: 56.25%">' \
'<iframe src="https://www.youtube.com/embed/h4WLX8hfpJw" ' \
'frameborder="0" allowfullscreen></iframe></div>'),
('[color=blue]oh[youtube]h4WLX8hfpJw',
'<p><span style="color: #0000FF">oh</span></p>' \
'<div class="video-wrapper" style="padding-bottom: 56.25%"><iframe ' \
'src="https://www.youtube.com/embed/h4WLX8hfpJw" frameborder="0" ' \
'allowfullscreen></iframe></div>'),
# Progress bars.
('[progress=lol]mdr[/progress]', '<p>[progress=lol]mdr[/progress]</p>'),
# Text rotation obfuscation.
('[rot13]obawbhe[/rot13]', '<p>bonjour</p>'),
# Lists.
('[list]haha[b][*]wow[*]incredible[/b][/*]wow[*]yuy[/list]',
'<ul><li><p>wow</p></li><li><p>incredible[/b]</p></li>' \
'<li><p>yuy</p></li></ul>'),
('[list]\n[*]bonjour', '<ul><li><p>bonjour</p></li></ul>'),
# Smileys.
(':)', '<p><img src="/images/smileys/smile.gif"></p>'),
(':):)', '<p>:):)</p>'),
(':) :D', '<p><img src="/images/smileys/smile.gif"> ' \
'<img src="/images/smileys/grin.gif"></p>'),
))
def test_html(test_input, expected):
assert _tohtml(test_input) == expected
# End of file.

View File

@ -1,30 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Unit tests for the Python version of textout. """
import pytest
from textoutpc import tohtml as _tohtml
@pytest.mark.parametrize('test_input,expected', (
# Basic text.
('', ''),
('lol', 'lol'),
# Basic text styling.
('[u][b]a[/]mdr', '<u><b>a</b>mdr</u>'),
# Links.
('[url=https://thomas.touhey.fr/]',
'<a href="https://thomas.touhey.fr/">https://thomas.touhey.fr/</a>'),
))
def test_htmli(test_input, expected):
assert _tohtml(test_input, inline = True) == expected
# End of file.

126
tests/test_lexer.py Normal file
View File

@ -0,0 +1,126 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Lexer tests for textoutpc."""
from __future__ import annotations
from collections.abc import Sequence
from io import StringIO
import pytest
from textoutpc.lexer import (
CloseTagEntity,
Entity,
NewlineEntity,
OpenTagEntity,
SpecialEntity,
TextEntity,
iter_textout_entities,
)
@pytest.mark.parametrize(
"inp,expected",
(
("abc", (TextEntity(content="abc"),)),
("a" * 1500, (TextEntity(content="a" * 1500),)),
("[]", (TextEntity(content="[]"),)),
("[ /hello]", (CloseTagEntity(name="hello"),)),
(
"[b][i]hello[/b]omg",
(
OpenTagEntity(name="b"),
OpenTagEntity(name="i"),
TextEntity(content="hello"),
CloseTagEntity(name="b"),
TextEntity(content="omg"),
),
),
("[mytag]", (OpenTagEntity(name="mytag"),)),
("[mytag=value]", (OpenTagEntity(name="mytag", value="value"),)),
(
"[mytag=value=other]",
(OpenTagEntity(name="mytag", value="value=other"),),
),
("[hello[]]", (OpenTagEntity(name="hello[]"),)),
(
"[hello[][]=world[][ohno]]what",
(
OpenTagEntity(name="hello[][]", value="world[][ohno]"),
TextEntity(content="what"),
),
),
(
"[hello=]",
(OpenTagEntity(name="hello", value=""),),
),
(
"``k",
(
SpecialEntity(value="`"),
SpecialEntity(value="`"),
TextEntity(content="k"),
),
),
(
"a[mytag]b[/myothertag]",
(
TextEntity(content="a"),
OpenTagEntity(name="mytag"),
TextEntity(content="b"),
CloseTagEntity(name="myothertag"),
),
),
("\n\r\n", (NewlineEntity(), NewlineEntity())),
(
"[" + "w" * 33 + "]",
(TextEntity(content="[" + "w" * 33 + "]"),),
),
(
# Partial result cannot be of the maximum entity size.
"w" * 1000 + "[" + "w" * 512,
(TextEntity(content="w" * 1000 + "[" + "w" * 512),),
),
(
"w" * 1000 + "[" + "a" * 50,
(TextEntity(content="w" * 1000 + "[" + "a" * 50),),
),
(
"[hello=" + "w" * 256 + "]",
(OpenTagEntity(name="hello", value="w" * 256),),
),
(
"[hello=" + "w" * 257 + "]",
(TextEntity(content="[hello=" + "w" * 257 + "]"),),
),
(
"[" + "w" * 33 + "]",
(TextEntity(content="[" + "w" * 33 + "]"),),
),
(
"[/" + "w" * 33 + "]",
(TextEntity(content="[/" + "w" * 33 + "]"),),
),
(
"[" * 19 + "]" * 18,
(
TextEntity(content="[["),
OpenTagEntity(name="[" * 16 + "]" * 16),
TextEntity(content="]"),
),
),
),
)
def test_lex(inp: str, expected: Sequence[Entity]):
"""Test lexing an input string to get a sequence."""
assert tuple(iter_textout_entities(inp)) == tuple(expected)
def test_stringio_lex():
"""Test lexing from a string input string."""
stream = StringIO("[abc]")
assert tuple(iter_textout_entities(stream)) == (OpenTagEntity(name="abc"),)

View File

@ -1,21 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Unit tests for the Python version of textout, lightscript-related
functions. """
import pytest
from textoutpc import tolightscript as _tolightscript
@pytest.mark.parametrize('test_input,expected', (
# Basic text.
('', ''),
))
def test_lightscript(test_input, expected):
assert _tolightscript(test_input) == expected
# End of file.

195
tests/test_parser.py Normal file
View File

@ -0,0 +1,195 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Parser tests for textoutpc."""
from __future__ import annotations
from itertools import chain
from typing import Iterable, Sequence
from docutils.nodes import Element, Node, Text, container, emphasis, strong
from docutils.utils import new_document
import pytest
from textoutpc.nodes import progress, spoiler
from textoutpc.parser import TextoutParser
def represent_node(node: Node) -> str:
"""Get the representation string for a node.
:param node: The node to get the representation for.
:return: The string representation.
"""
base = repr(node)
if isinstance(node, Element):
base += (
" ["
+ ", ".join(
f"{key}={value!r}" for key, value in node.attributes.items()
)
+ "]"
)
return base
def compare_nodes(
first_node_iterable: Iterable[Node],
second_node_iterable: Iterable[Node],
/,
*,
path: str = "",
) -> bool:
"""Compare node sequences.
:param first_node_iterable: The first node iterable.
:param second_node_iterable: The second node iterable.
:param path: The path.
:return: Whether the comparison works or not.
"""
first_node_iterator = iter(first_node_iterable)
second_node_iterator = iter(second_node_iterable)
result = True
for i, (first_node, second_node) in enumerate(
zip(first_node_iterator, second_node_iterator),
):
similar_nodes = True
if type(first_node) is not type(second_node):
similar_nodes = False
if isinstance(first_node, Text) and isinstance(second_node, Text):
similar_nodes = str(first_node) == str(second_node) and result
elif isinstance(first_node, Element) and isinstance(
second_node,
Element,
):
if isinstance(first_node, progress) and isinstance(
second_node,
progress,
):
similar_nodes = (
first_node.value == second_node.value and similar_nodes
)
if first_node.attributes != second_node.attributes:
similar_nodes = False
if not similar_nodes:
result = False
result = (
compare_nodes(first_node, second_node, path=path + f"[{i}]")
and result
)
if not similar_nodes:
print(f"Different nodes at {path or 'root'}:")
print(f" in input: {represent_node(first_node)}")
print(f" in output: {represent_node(second_node)}")
result = False
try:
first_node = next(first_node_iterator)
except StopIteration:
pass
else:
result = False
print(f"Additional nodes in the input at {path or 'root'}:")
for node in chain((first_node,), first_node_iterator):
print(f" {represent_node(node)}")
try:
second_node = next(second_node_iterator)
except StopIteration:
pass
else:
result = False
print(f"Additional nodes in the output at {path or 'root'}:")
for node in chain((second_node,), second_node_iterator):
print(f" {represent_node(node)}")
return result
@pytest.mark.parametrize(
"inputstring,expected",
(
("hello\nworld", [Text("hello\nworld")]),
(
"[b][i]hello[/i]",
[strong("", emphasis("", Text("hello")))],
),
(
"[b][i]hello[/b]omg",
[strong("", emphasis("", Text("hello"))), Text("omg")],
),
(
"[c=#abc;font-size: 12pt]hello",
[
container(
"",
Text("hello"),
style="color: #AABBCC; font-size: 12pt",
),
],
),
(
"[b=unexpected]wow",
[Text("[b=unexpected]wow")],
),
(
"[center]hello",
[container("", Text("hello"), **{"class": "align-center"})],
),
(
"[progress=55]My super progress bar",
[progress("", Text("My super progress bar"), value=55)],
),
(
"[hello]world[/hello]",
[Text("[hello]world[/hello]")],
),
(
"[noeval][hello]world[/hello][/noeval]",
[Text("[hello]world[/hello]")],
),
(
"the message is: [rot=13]uryyb[/rot] - the - [rot13]jbeyq",
[Text("the message is: hello - the - world")],
),
(
"[spoiler=should open|should close]spoiler [b]content[/b]!",
[
spoiler(
"",
Text("spoiler "),
strong("", Text("content")),
Text("!"),
closed_title="should open",
opened_title="should close",
),
],
),
(
"[code=c]int main() { return 0; }",
[
container(
"",
Text("int main() { return 0; }"),
**{"class": "code"},
),
],
),
),
)
def test_parser(inputstring: str, expected: Sequence[Node]) -> None:
"""Test that the parser works correctly."""
doc = new_document("/tmp/fake-source.bbcode") # noqa: S108
parser = TextoutParser()
parser.parse(inputstring, doc)
assert compare_nodes(doc, expected)

39
tests/test_tags.py Normal file
View File

@ -0,0 +1,39 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Tags tests for textoutpc."""
from __future__ import annotations
from typing import Sequence
from docutils.nodes import Node, Text, TextElement, container
import pytest
from textoutpc.tags import Tag
@pytest.mark.parametrize(
"nodes,expected",
(
([], ""),
([TextElement("", Text("hello, world"))], "hello, world"),
),
)
def test_get_raw_text(nodes: Sequence[Node], expected: str):
"""Get raw text."""
assert Tag.get_text_from_raw_children(nodes) == expected
def test_get_raw_text_with_more_children():
"""Check that raw text cannot be obtained in some cases."""
with pytest.raises(AssertionError, match=r"More than one"):
Tag.get_text_from_raw_children([Text("hello,"), Text(" world")])
def test_get_raw_text_from_unsupported_children():
"""Check that raw text cannot be obtained with unsupported types."""
with pytest.raises(AssertionError, match=r"Unsupported child"):
Tag.get_text_from_raw_children([container("", Text("hello"))])

View File

@ -1,46 +1,8 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Functions for the user.
Really simplifies the thing.
"""
# *****************************************************************************
"""textout() equivalent from Planète Casio."""
from io import StringIO as _StringIO
from .version import version
from ._options import TextoutOptions as Options, \
TextoutBlockTag as BlockTag, TextoutInlineTag as InlineTag, \
TextoutParagraphTag as ParagraphTag, TextoutSmiley as Smiley, \
TextoutImage as Image, TextoutVideo as Video
from ._translate import Translator as _Translator
__all__ = ["version", "tohtml", "tolightscript",
"Options", "BlockTag", "ParagraphTag", "InlineTag",
"Smiley", "Image", "Video"]
# ---
# Public functions.
# ---
_default_options = Options()
def tohtml(message, options = _default_options, **tweaks):
""" Converts textout BBcode to HTML.
Receives a string, returns a string. """
t = _Translator(_StringIO(message), _StringIO(), 'html', \
tweaks, options)
return t.process().getvalue()
def tolightscript(message, options = _default_options, **tweaks):
""" Converts textout BBcode to Lightscript.
Receives a string, returns a string. """
return "" # TODO: real thing one day
return _Translator(_StringIO(message), _StringIO(), 'lightscript', \
tweaks, options).process().getvalue()
# End of file.
from __future__ import annotations

View File

@ -1,70 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Utilities for HTML conversions. """
import regex as _re
from html import escape
__all__ = ["escape", "urls", "SmileyConvertor"]
# ---
# Smileys.
# ---
class SmileyConvertor:
""" Smileys convertor. """
def __init__(self, smileys = {}):
self._html = {escape(a): b.url \
for a, b in smileys.items() if b.url != None}
self._re = _re.compile('(^|\\s)(' + '|'.join(map(_re.escape,
self._html.keys())) + ')(\\s|$)')
def convert(self, text):
cv = ""
while text:
try:
m = next(self._re.finditer(text))
except StopIteration:
break
cv += text[:m.start()] + m.group(1)
cv += '<img src="' + self._html[m.group(2)] + '">'
text = m.group(3) + text[m.end():]
return cv + text
# ---
# URLs.
# ---
_urlreg = _re.compile("""\
(?P<sp>^|\s|[[:punct:]])
(?P<url>(https?|ftp):
(?P<ucore>[^\[\]\(\)\s]* (\[(?&ucore)\]?)* (\((?&ucore)\)?)*)*
)
""", _re.VERBOSE | _re.M)
def urls(text):
""" Convert URLs. """
def _sub_html(m):
sp = m.group('sp')
url = m.group('url')
aft = ''
# Hack for the last comma.
if url[-1] == ',':
url, aft = url[:-1], ','
text = '{}<a href="{}">{}</a>{}' \
.format(sp, url, url, aft)
return text
return _urlreg.sub(_sub_html, text)
# End of file.

View File

@ -1,42 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Utilities for Lightscript conversions. """
import regex as _re
__all__ = ["urls"]
# ---
# URLs.
# ---
_urlreg = _re.compile("""\
(?P<sp>^|\s|[[:punct:]])
(?P<url>(https?|ftp):
(?P<ucore>[^\[\]\(\)\s]* (\[(?&ucore)\]?)* (\((?&ucore)\)?)*)*
)
""", _re.VERBOSE | _re.M)
def urls(text):
""" Convert URLs. """
def _sub_ls(m):
sp = m.group('sp')
url = m.group('url')
aft = ''
# Hack for the last comma.
if url[-1] == ',':
url, aft = url[:-1], ','
url = url.replace('<', '%3C')
url = url.replace('>', '%3E')
text = '{}<{}>{}'.format(sp, url, aft)
return text
return _regurl.sub(_sub_ls, text)
# End of file.

View File

@ -1,355 +0,0 @@
#!/usr/bin/env python3
#**************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#**************************************************************************
""" Base classes to use with options (tags, smileys) in textoutpc, with a
manager class.
For your tag to be used as a textoutpc tag, you have to make it
inherit one of the `TextoutBlockTag` or `TextoutInlineTag` classes.
Making separate tag modules is possible through the manager class,
which allows not to hardcode the tags into the module. """
from functools import partial as _p
from inspect import ismodule as _ismod, isclass as _isclass, \
getargspec as _getargspec, getfullargspec as _getfullargspec, \
currentframe as _currentframe, getouterframes as _getouterframes
from importlib import import_module as _importmod
from ._html import SmileyConvertor as _htmlsm
__all__ = ["TextoutOptions",
"TextoutTag", "TextoutBlockTag", "TextoutInlineTag",
"TextoutParagraphTag", "TextoutSmiley", "TextoutImage", "TextoutVideo"]
def _getargscount(func):
try:
return len(_getfullargspec(func).args)
except:
return len(_getargspec(func).args)
# ---
# Tags.
# ---
# Main base tag class.
# For more about defining a tag, see `doc/tags.md`.
class TextoutTag:
""" The textout tag base class.
Is initialized with these values:
<name><content><name>
| name: "<name>" (only special chars such as `)
| value: None
[<name>]<content>[/<name>]
| name: "[<name>]"
| value: None
[<name>]<content>[/] (when possible)
| name: "[<name>]"
| value: None
[<name>=<value>]<content>[/<name>]
| name: "[<name>]"
| value: "<value>"
[<name>=<value>]<content>[/] (when possible)
| name: "[<name>]"
| value: "<value>" """
aliases = ()
def __init__(self, name, value, ot, tweaks, options):
""" Initialize the textout tag with the documented members. """
# Store internal data.
self.__name = name
self.__value = value
self.__output_type = ot
self.__tweaks = tweaks
self.__options = options
self.output_type = ot
# Call both prepare functions.
if hasattr(self, 'prepare'):
try:
assert _getargscount(self.prepare) == 4
args = (name, value, ot)
except:
args = (name, value)
self.prepare(*args)
if hasattr(self, 'prepare_' + ot):
prep = getattr(self, 'prepare_' + ot)
try:
assert len(_getargspec(prep).args) == 4
args = (name, value, ot)
except:
args = (name, value)
prep(*args)
# Prepare the preprocessing elements.
if hasattr(self, 'preprocess'):
if hasattr(self, 'preprocess_' + ot):
self.__preprocess0 = self.preprocess
self.preprocess = self.__preprocess_double
elif hasattr(self, 'preprocess_' + ot):
self.preprocess = getattr(self, 'preprocess_' + ot)
if hasattr(self, 'preprocess'):
self.__preprocess2 = self.preprocess
self.preprocess = self.__preprocess_and_prepare
else:
self.__after_preprocess()
if hasattr(self, 'default_' + ot):
self.default = getattr(self, 'default_' + ot)
def __repr__(self):
return f"{self.__class__.__name__}(name = {repr(self.__name)}, " \
f"value = {repr(self.__value)}, " \
f"ot = {repr(self.__output_type)})"
def __preprocess_double(self, content):
""" Preprocess using the two methods. """
ct = self.__preprocess0(content)
if ct != None:
content = ct
del ct
ct = self.__preprocess1(content)
if ct != None:
content = ct
del ct
return content
def __preprocess_and_prepare(self, content):
""" Preprocess and do the things after. """
ret = self.__preprocess2(content)
self.__after_preprocess()
return ret
def __out(self, name):
""" Generic function to call two output functions of the same
type. """
getattr(self, '__' + name)()
getattr(self, name + '_' + self.__output_type)()
def __after_preprocess(self):
""" After preprocessing, check the begin, content and end that may
have been set by the preprocessing function. """
ot = self.__output_type
for otype in ('begin', 'content', 'end'):
if hasattr(self, otype):
if hasattr(self, otype + '_' + ot):
setattr(self, '__' + otype, getattr(self, otype))
setattr(self, otype, _p(self.__out, otype))
elif hasattr(self, otype + '_' + ot):
setattr(self, otype, getattr(self, otype + '_' + ot))
def tweak(self, key, default = None):
try:
return self.__tweaks[key]
except KeyError:
return default
def image(self, *args, **kwargs):
return self.__options.get_image(*args, **kwargs)
def video(self, *args, **kwargs):
return self.__options.get_video(*args, **kwargs)
# Role-specific base tag classes.
class TextoutBlockTag(TextoutTag):
pass
class TextoutInlineTag(TextoutTag):
pass
# Default tag: paragraph.
class TextoutParagraphTag(TextoutBlockTag):
""" Main tag for basic paragraphs. """
notempty = True
def begin_html(self):
return '<p>'
def end_html(self):
return '</p>'
# ---
# Smileys.
# ---
class TextoutSmiley:
""" Base class for smileys. """
aliases = ()
url = None
def __repr__(self):
return f"{self.__class__.__name__}(aliases = {repr(self.aliases)}, " \
f"url = {repr(self.url)})"
# ---
# Multimedia.
# ---
class TextoutImage:
""" Base class for images. """
def __init__(self, url):
raise ValueError("no URL supported")
class TextoutVideo:
""" Base class for videos. """
def __init__(self, url):
raise ValueError("no URL supported")
# ---
# Options extractor and manager.
# ---
_builtin_module = None
def _get_builtin_module():
""" Get the `.builtin` module. """
global _builtin_module
if _builtin_module == None:
_builtin_module = _importmod('..builtin', __name__)
return _builtin_module
class TextoutOptions:
""" Options manager.
Object responsible for getting the tags. """
def __init__(self, *modules, default = True):
self._aliases = {}
self._s_aliases = {}
self._videos = []
self._images = []
if default:
self.add(_get_builtin_module())
for mod in modules:
self.add(mod)
def __repr__(self):
return f"{self.__class__.__name__}()"
def add(self, element):
""" Add an option. """
if isinstance(element, str):
element = str(element)
element = _importmod(element,
_getouterframes(_currentframe(), 1)[0].name)
if _ismod(element):
self.__extract(element)
return True
if _isclass(element) and issubclass(element, TextoutTag):
for alias in element.aliases:
self._aliases[alias] = element
return True
if _isclass(element) and issubclass(element, TextoutSmiley):
for alias in element.aliases:
self._s_aliases[alias] = element
self._htmlsm = None
return True
if _isclass(element) and issubclass(element, TextoutImage):
if not any(image is element for image in self._images):
self._images.append(element)
if _isclass(element) and issubclass(element, TextoutVideo):
if not any(video is element for video in self._videos):
self._videos.append(element)
def __extract(self, module):
""" Extract options from a module. """
tags = []
smileys = []
# Obtain the list of properties from the module.
try:
ds = module.__all__
except:
ds = dir(module)
# Get the submodules from the module (usually different files in the
# tags module folder).
for submodule in (obj for name, obj in ((nm, getattr(module, nm)) \
for nm in ds) if (name == '__init__' or name[0] != '_') \
and _ismod(obj)):
self.__extract(submodule)
# Extract the tags from the current module.
for obj in (obj for name, obj in ((nm, getattr(module, nm)) \
for nm in ds) if name[0] != '_'):
self.add(obj)
def get_smileys(self):
""" Get the smileys dictionary. """
return self._s_aliases.copy()
def htmlsmileys(self, text):
""" Get the smileys convertor for HTML. """
if not self._htmlsm:
self._htmlsm = _htmlsm(self._s_aliases)
return self._htmlsm.convert(text)
def get_video(self, url):
""" Get a video using its URL. """
for video in self._videos:
try:
v = video(url)
except:
continue
break
else:
raise ValueError("invalid video URL")
return v
def get_image(self, url):
""" Get an image using its URL. """
for image in self._images:
try:
i = image(url)
except:
continue
break
else:
raise ValueError("invalid image URL")
return i
def get_tag(self, name):
""" Get the tag class corresponding to a name. """
return self._aliases[name]
# End of file.

View File

@ -1,199 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Layer on top of the character stream.
See the `TextoutStream` class description for more information.
"""
import io as _io
import regex as _re
__all__ = ["TextoutStream", "TextoutUnit"]
# ---
# Class definitions.
# ---
class TextoutUnit:
""" Raw textout stream unit. """
BEGIN = 1
END = 2
SPECIAL = 3
NEWLINE = 4
PARSEP = 5
def __init__(self, *args):
self.full = ''
if len(args) > 1:
self.type, self.name, self.value, *_ = args + (None,)
return
result = args[0]
gr = result.groupdict()
self.name = None
self.value = None
if gr['sname'] == '\n':
self.type = self.NEWLINE
elif gr['parsep'] != None:
self.type = self.PARSEP
elif gr['bname'] != None:
self.type = self.BEGIN
self.name = gr['bname']
self.value = gr['value']
self.full = "[{}{}]".format(self.name,
"=" + self.value if self.value != None else "")
elif gr['ename'] != None:
self.type = self.END
self.name = gr['ename']
self.full = "[/" + self.name + "]"
else:
self.type = self.SPECIAL
self.name = gr['sname']
self.full = self.name
if self.name != None:
self.name = self.name.lower()
if self.type != self.SPECIAL:
self.name = "[{}]".format(self.name)
def __repr__(self):
typetab = {self.BEGIN: "begin", self.END: "end",
self.SPECIAL: "special", self.NEWLINE: "newline"}
return '_TextoutUnit(type={}{}{})'.format(\
typetab[self.type],
', name=' + repr(self.name) if self.name != None else "",
', value=' + repr(self.value) if self.value != None else "")
def __equ__(self, other):
if not isinstance(other, TextoutUnit):
return False
if self.type == other.type \
and (self.type == self.NEWLINE or self.name == other.name) \
and (self.type != self.BEGIN or self.value == other.value):
return False
return True
class TextoutStream:
""" Textout stream, for easier stream processing.
The idea behind this stream is that it will provide more suitable
(therefore easier to process) data for the applications above,
with raw text and tags. """
# A tag can basically be one of the following things:
# - a starting tag, looking like [<name>] or [<name>=<attribute>]
# - an ending tag, looking like [/<name>]
# - a special tag (starting or ending), usually one-char (the only
# one currently available is the ` tag).
#
# A tag name is 32 chars at most (at least 1 char).
# A closing tag can have no name, which means that it will close the
# last opened tag automatically.
# A tag attribute is 256 chars at most.
#
# FIXME: check the sizes? it seems that it stopped working…
_Tag = _re.compile(r"""
\[\s?
(?P<bname>
(?P<bname_e>[^\/\[\]\=][^\[\]\=]* (\[(?&bname_e)\]?)*)*
)
(\s?=\s?(?P<value>
(?P<value_e>[^\[\]]* (\[(?&value_e)\]?)*)*
))?
\s?\]
|
\[[\\\/]\s?(?P<ename>
(?P<ename_e>[^\/\[\]\=][^\[\]\=]* (\[(?&ename_e)\]?)*)*
)\s?\]
|
(?P<parsep>[\n]{2,})
|
(?P<sname>`|[\n])
""", _re.VERBOSE | _re.DOTALL | _re.MULTILINE)
# Keep this buffer size above the maximum size of a tag (387)
# for this class to work alright. Anything above 512 should work great.
BUFFER_SIZE = 1024
def __init__(self, stream):
# If the 'stream' is a string, we want to use standard stream
# functions, so we're gonna enforce them using the `StringIO` class.
if isinstance(stream, str):
stream = _io.StringIO(stream)
# Buffer management.
self.stream = stream
self.buf = ""
# Management of the last tag match.
self.result = None
self.last = None
# Error position.
self.pos = 0
self.line = 0
self.col = 0
def __iter__(self):
# This class is (obviously) iterable.
# We want to use this class as the iterator as well.
return self
def __next__(self):
# If we have a result, process it.
if self.result:
data, self.result = TextoutUnit(self.result), None
self.last = data
return data
# Make sure to have enough data to read.
self.buf += self.stream.read(self.BUFFER_SIZE - len(self.buf))
if not self.buf:
self.last = None
raise StopIteration
# Check that we have a result.
result = self._Tag.search(self.buf, partial = True)
if not result:
text = self.buf
self.buf = ''
self.last = text
return text
# If there is some text, return it.
# Eventually store the result so we can process it later.
if result.start() > 0:
ret = self.buf[:result.start()]
self.buf = self.buf[result.end():]
if not result.partial:
self.result = result
self.last = ret
return ret
# Process the result now!
self.buf = self.buf[result.end():]
data = TextoutUnit(result)
self.last = data
return data
# End of file.

View File

@ -1,896 +0,0 @@
#!/usr/bin/env python3
#**************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#**************************************************************************
""" Main translation function.
See the `Translator` class documentation for more information.
"""
import string as _string
from copy import deepcopy as _deepcopy
from ._options import TextoutBlockTag as _TextoutBlockTag, \
TextoutParagraphTag as _TextoutParagraphTag
from ._stream import TextoutStream as _TextoutStream
from ._html import escape as _htmlescape, urls as _htmlurls
__all__ = ["Translator"]
# ---
# Tweaks interface.
# ---
class _TweaksDictionary:
""" Tweaks dictionary. Read-only, and makes sure to match equivalent
tweak keyword, e.g. `label_prefix`, `LABELPREFIX` and
`__LaBeL___PRE_FIX__`. """
def __init__(self, base):
self.__elts = {}
for kw in base:
self.__elts[self.__normalize(kw)] = base[kw]
def __repr__(self):
return f"{self.__class__.__name__}({repr(self.__elts)})"
def __getitem__(self, key):
return self.__elts[self.__normalize(key)]
def __getattr__(self, key):
try:
return self.__elts[self.__normalize(key)]
except:
raise AttributeError(key)
def __normalize(self, name):
return ''.join(c for c in name \
if c in _string.ascii_letters).lower()
# ---
# Tag data utility.
# ---
class _TagData:
BLOCK = 1
INLINE = 2
def __init__(self, tag, name, full):
""" Tag data initialization.
Here, we prepare all of the attributes from the tag's
after-preparation attributes. """
# `name` is the name through which the tag has been called.
# `full` is the full tag beginning mark.
self.name = name
self.type = self.BLOCK if isinstance(tag, _TextoutBlockTag) \
else self.INLINE
self.full = full
# Tag stack behaviour.
# `nwi` is whether the tag can be in itself directly or not (see
# the `not_within_itself` property in the docs).
# `onlyin` is the list of tags that are allowed as a parent tag
# to this one.
# `allowed` is the tags that are allowed amongst the children tags.
self.nwi = bool(tag.not_within_itself) \
if hasattr(tag, 'not_within_itself') else False
self.onlyin = list(tag.only_in) \
if hasattr(tag, 'only_in') else None
self.allowed = list(tag.allowed_tags) \
if hasattr(tag, 'allowed_tags') else None
# Tag beginning displaying.
# `notempty` is the moment when (and if) to start displaying the
# tag's code and content.
# `started` is whether the tag's beginning has been processed,
# i.e. if the content is no longer processed.
# `notext` is whether text within the tag directly is printed
# or not.
self.notempty = bool(tag.notempty) if hasattr(tag, 'notempty') \
else False
self.started = False
self.notext = bool(tag.no_text) if hasattr(tag, 'no_text') \
else False
# `base` is the actual tag object returned by `get_tag()`.
self.base = tag
# Flags and properties calculated from the tag's attributes, using
# the rules given in `TAGS.md`.
# `ign` is whether the content should be read while the tag is
# opened.
# `generic` is whether the tag can be terminated by the generic
# tag ending mark [/].
# `raw` is whether the tag's content should be read as raw.
# `super` is whether the tag is a superblock or not.
# `inlined` is whether the next block on the same level is turned
# into a superblock or not.
self.ign = not hasattr(tag, 'preprocess') \
and hasattr(tag, 'content')
self.generic = False if name == None else bool(tag.generic) \
if hasattr(tag, 'generic') else True
self.raw = bool(tag.raw) if hasattr(tag, 'raw') \
else hasattr(tag, 'preprocess')
self.super = True if hasattr(tag, 'preprocess') else \
bool(tag.superblock) if hasattr(tag, 'superblock') \
else False
self.inlined = bool(tag.inlined) if self.super \
and hasattr(tag, 'inlined') and bool(tag.inlined) else False
self.noinline = bool(tag.noinline) if self.type == self.BLOCK \
and hasattr(tag, 'noinline') else False
# Content processing utilities.
# `last` is the content of the tag. A boolean indicates that we
# only want to know if the content is empty or not, and a string
# means we want to get the full content to re-use it later.
# In order not to manage a third case, even if the tag doesn't
# care if its content is empty or not, this property should be
# set to `False`.
self.last = "" if hasattr(tag, 'preprocess') else False
# Reset the tag.
self.reset()
def reset(self):
""" Reset the tag, generally because it has been closed. """
self.tag = _deepcopy(self.base)
self.started = False
if isinstance(self.last, bool):
self.last = False
else:
self.last = ""
def __repr__(self):
return f'TagData(tag = {repr(self.tag)})'
# ---
# Translator main class.
# ---
class Translator:
""" One-time usage class for translating.
Use it this way: `Translator(my_inp, my_outp).process()`.
You can even chain calls as the `process()` method returns
the output stream object. """
def __init__(self, inp, outp, output_type, tweaks, options):
""" Initializer. """
if not output_type in ('html', 'lightscript'):
raise Exception("Invalid output type")
self.output_type = output_type
self.tweaks = _TweaksDictionary(tweaks)
self.options = options
self.inp = inp
self.outp = outp
# `queue` is the queue of tag containers, with the actual tag
# objects, calculated tag properties, variables for content
# processing, and other stuff.
# `cign` is the number of tags requiring the content to be ignored.
self.queue = []
self.cign = 0
# Text group management.
# In the following example text:
#
# some [incredible] text [align=center] you know
#
# There are two input groups, what's before and what's after the
# valid `[align=center]` tag. We want to flush the text in two
# steps only, in order to detect things such as URLs and smileys.
#
# The text group also manages the invalid tags, to manage URLs with
# brackets in it,
# e.g. https://example.org/[some-incredible-thing]-yea
self.text_group = ""
# `raw_mode` is whether the no evaluating mode is on or not.
# `raw_deg` is the number of times the raw tag has to be closed
# to exit.
self.raw_mode = False
self.raw_deg = 0
# `inline_mode` is whether the inline mode is on or not.
# Actually, for now, this mode is only global and cannot be
# enabled by tags.
self.inline_mode = bool(self.tweak("inline", False))
def __repr__(self):
p = []
p.append(f"inp = {repr(self.inp)}")
p.append(f"outp = {repr(self.outp)}")
p.append(f"output_type = {repr(self.output_type)}")
p.append(f"tweaks = {repr(self.tweaks)}")
p.append(f"options = {repr(self.options)}")
return f"{self.__class__.__name__}({', '.join(p)})"
def tweak(self, key, default = None):
""" Get a tweak from the tweaks dictionary. """
try:
return self.tweaks[key]
except KeyError:
return default
# ---
# Text outputting utilities.
# ---
def process_text(self, text):
""" Process text groups for naked URLs and stuff. """
# In all cases, we want to escape for HTML things, so that the
# user doesn't insert raw HTML tags (which would be a security
# flaw!).
if self.output_type == 'html':
text = _htmlescape(text)
# For non-raw HTML, we want to add smiley and URLs conversion,
# because it's nicer!
if not self.raw_mode and self.output_type == 'html':
text = _htmlurls(text)
text = self.options.htmlsmileys(text)
return text
def put_text(self, text):
""" Output some text. """
# If we want to ignore the content (because it is not used
# nor output or the current tag doesn't allow text), let the
# text fall into the void.
if self.cign > 0 or (self.queue and self.queue[0].notext):
return
# Add to the text group, which will be processed when
# `flush_text()` is used.
self.text_group += text
def flush_text(self, superblocks_only = False,
next_block_is_super = False):
""" Flush the text that has been output. """
# First of all, check if the text group is empty or if we want to
# ignore it.
if not self.text_group or self.cign > 0:
return
# Pop the text group and put the code, with the process function
# in case it is given to a non-raw processing tag or given to the
# output.
text = self.text_group
self.text_group = ""
self.add_text(text, process_func = lambda x: self.process_text(x),
superblocks_only = superblocks_only,
next_block_is_super = next_block_is_super)
# ---
# Code outputting utilities.
# ---
def add_text(self, text, process_func = lambda x: x, start_tags = True,
superblocks_only = False, next_block_is_super = False,
skip_first = False):
""" Add text to the higher blocks if available. """
# The last queue is composed of booleans (does the group contain
# something or not) and texts for content processing.
# We want to set all of the booleans to True until the first text
# group, to which we want to add the current text.
# If there is no content preprocessing and we have to output it,
# we want to start the tags first: `dat == None` will be our
# signal!
blockfound = False
for dat in self.queue:
# Check if it is a tag we want to contribute to.
if dat.type == dat.BLOCK:
if dat.super or next_block_is_super:
blockfound = True
next_block_is_super = dat.inlined
elif not superblocks_only and not blockfound:
blockfound = True
next_block_is_super = dat.inlined
else:
continue
# Check if it is the first tag we want to skip.
if skip_first:
skip_first = False
continue
# Contribute to it, either by or-ing the content if it is
# a boolean (but anything or True == True), or by contributing
# to the buffer otherwise.
if isinstance(dat.last, bool):
dat.last = True
continue
# Start the tags if we're about to give this content to
# preprocessing.
if start_tags:
self.start_tags()
# Add the content to the preprocess buffer.
if not dat.raw:
text = process_func(text)
dat.last += text
break
else:
# No `break` has been encountered, which means the content has
# not been added to any preprocessing tag. Please process it!
if start_tags:
self.start_tags()
self.outp.write(process_func(text))
return False
# The content has been given for preprocessing.
return True
def put_debug(self, message):
""" Put a debug message directly into the output. """
self.outp.write(message)
def put_code(self, code, start_tags = True, flush_text = True,
superblocks_only = True, next_block_is_super = False,
skip_first = False):
""" Put some code. """
# We don't want to mix text and code, so we'll flush to be sure
# that the order doesn't get mixed up.
if flush_text:
self.flush_text()
# First of all, check if the text is empty or if we want to
# ignore it.
if not code or self.cign > 0:
return
# Add the code.
self.add_text(code, start_tags = start_tags,
superblocks_only = superblocks_only,
next_block_is_super = next_block_is_super,
skip_first = skip_first)
def put_newline(self):
""" Put a newline. """
# If we want to ignore the content (because it is not used
# nor output or the current tag doesn't allow text), let the
# text fall into the void.
if self.cign > 0 or (self.queue and self.queue[0].notext):
return
# The newline depends on the output type and the context,
# of course.
if self.output_type == 'html' and not self.raw_mode:
newline = '<br />\n'
else:
newline = '\n'
# Then put this as one puts code.
self.put_code(newline)
# ---
# Tag queue management.
# ---
def push_tag(self, dat):
""" Push a tag onto the tag stack. """
# If the tag does not process its content but replaces the content,
# that means the content is ignored.
if dat.ign:
self.cign += 1
# If we're about to put a tag or anything, empty the text block
# here.
self.flush_text()
# Insert the tag into the queue.
self.queue.insert(0, dat)
# Start the tag (and parent tags) if required.
self.start_tags()
# Don't forget to add the tag to the queue, and to enable raw
# mode if the tag expects a raw content (e.g. `[code]`).
if dat.raw:
self.raw_mode = True
self.raw_deg = 0
def pop_tag(self, end = ""):
""" Pop a tag from the tag stack.
`end` represents the full version of the ending tag marker,
for displaying if the tag is invalid. """
if not self.queue:
return
# Even if we had no beginning, no content and no end, what is
# here has to be distinguished from what was right before!
# So we need to flush the text group for this.
# (this will probably be useless for tags with preprocessing
# enabled, but that's okay, flushing doesn't modify the content
# processing queue)
self.flush_text()
# Pop the tag out of the queue.
dat = self.queue.pop(0)
tag = dat.tag
pcattrs = {'superblocks_only': dat.type == dat.BLOCK,
'next_block_is_super': dat.inlined}
# If preprocessing has been enabled, we ought to process the
# content, check if the tag is valid, and do everything we would
# have done while pushing the tag if it didn't do content
# processing.
if hasattr(tag, 'preprocess'):
# Take out the content of the content preprocessing queue.
# If there is no content and the tag proposes a default
# content, let's use it instead.
content = dat.last
if not content and hasattr(tag, 'default'):
try:
content = tag.default()
except:
# The tag is not supposed to have an empty content,
# so we ought to put it as an invalid tag an go on.
self.put_text(dat.full)
self.put_text(end)
return
# Send the content to the tag while checking its validity (by
# checking if the `preprocess()` method returns an exception).
try:
ct = tag.preprocess(content)
except:
# The tag is invalid in the end, so we ought to send the
# raw things to the text group and forget about the tag.
self.put_text(dat.full)
self.put_text(content)
self.put_text(end)
return
# If we're here, congrats, the tag is valid! Now, if the
# `preprocess()` method returned something different, we
# want to use it instead.
if ct != None:
content = ct
# Output the beginning and the content. If there was no
# content, just put the content that we got earlier.
if hasattr(tag, 'begin'):
self.put_code(tag.begin(), **pcattrs)
dat.started = True
if hasattr(tag, 'content'):
self.put_code(tag.content(), **pcattrs)
elif dat.raw:
# XXX: I'm unsure about this. Shall raw tags return code
# or text? The text will only be escaped as raw mode is
# still enabled at this point.
self.put_text(content)
else:
self.put_code(content, **pcattrs)
elif hasattr(tag, 'content'):
# Tag replaces content without preprocessing, which means
# the content has been ignored and the tag only puts the
# things.
self.cign -= 1
self.put_code(tag.content(), **pcattrs)
elif hasattr(tag, 'default'):
# Tag defines a default content if there might be none,
# without text preprocessing. If there is no content, print it.
# Notice that the default content method can also raise
# an exception if the tag in its current configuration should
# not have an empty content.
if not dat.started:
if hasattr(dat.tag, 'begin'):
self.put_code(dat.tag.begin(), **pcattrs)
dat.started = True
if not dat.last:
try:
self.put_text(tag.default())
except:
# The tag is not supposed to have empty content!
# Let's put the raw things again as when there is
# content processing.
self.put_text(dat.full)
self.put_text(end)
return
# Don't forget to end the tag!
if not dat.started:
pass
else:
if dat.type == dat.BLOCK:
self.close_inline_tags()
if hasattr(tag, 'end'):
self.put_code(tag.end(), start_tags = False, **pcattrs)
# Disable raw mode if it was a raw tag (which means that it
# enabled it, as tags into raw tags cannot be processed).
if dat.raw:
self.raw_mode = False
# ---
# Automatically start and end tags.
# ---
def start_tags(self):
""" Start the tags that haven't been started yet.
If a block has been newly opened, we ought to close the block
at the same level as them before opening it.
This is usually called when content is output, for tags that
aren't empty. """
# First, get the references to the blocks to end, the blocks to
# start, and all of the inline tags.
superblocks = []
block_to_start = None
block_to_end = None
inlines = []
next_block_is_super = False
for idx, dat in enumerate(self.queue):
# Check that the tag hasn't already been started or doesn't
# call for content processing.
if idx > 0 and type(dat.last) != bool:
break
# Then put the tag in the appropriate queue.
if dat.type == dat.BLOCK:
if block_to_start is not None and \
dat.super or next_block_is_super:
# The block is to be considered as the block to start.
# Sometimes the block to start is the latest
# superblock!
superblocks.insert(0, dat)
next_block_is_super = dat.inlined
elif dat.started:
block_to_end = dat
next_block_is_super = dat.inlined
elif block_to_end is None and block_to_start is None:
block_to_start = dat
next_block_is_super = dat.inlined
else:
inlines.insert(0, dat)
# If there is no new block to start, there's no need to end the
# current block.
if not block_to_start:
block_to_end = None
# Put the tag ends for the blocks to end.
# If there are some, we ought to close the inline tags first.
if block_to_end is not None:
for dat in inlines[::-1] + [block_to_end]:
if not dat.started:
continue
if hasattr(dat.tag, 'end'):
self.put_code(dat.tag.end(), start_tags = False,
skip_first = True)
dat.started = False
dat.reset()
# Then, put the tag beginnings.
to_begin = superblocks \
+ ([block_to_start] if block_to_start else [])
if all(not x.noinline for x in to_begin):
to_begin += inlines
for dat in to_begin:
if dat.started:
continue
if dat.notempty and not dat.last:
break
if hasattr(dat.tag, 'begin'):
self.put_code(dat.tag.begin(), start_tags = False,
flush_text = False, skip_first = dat == self.queue[0])
dat.started = True
def close_inline_tags(self):
""" We're about to close a block, so we want to close any inline
tags that could have been taken within it. """
for dat in self.queue:
# Check that the tag hasn't already been closed.
if dat.type != dat.INLINE or not dat.started:
continue
if hasattr(dat.tag, 'end'):
self.put_code(dat.tag.end(), start_tags = False)
dat.started = False
dat.reset()
# ---
# Main function.
# ---
def process(self):
""" Main function of the textout translator. """
# By default, everything is in a paragraph.
# Other blocks will supplant this by being further in the queue.
if not self.inline_mode:
self.push_tag(_TagData(_TextoutParagraphTag(None, None,
self.output_type, self.tweaks, self.options), None, ''))
# We want to get our elements out of the element stream (Lephe
# told me that the `TextoutStream` class was actually a lexer,
# but as I don't know the theory behind this...).
for element in _TextoutStream(self.inp):
# If it is a string or a newline, let's just put it.
# Otherwise, the element is some tag data or at least something
# that requires some special processing.
if isinstance(element, str):
self.put_text(element)
continue
tagdata = element
if tagdata.type == tagdata.NEWLINE:
self.put_newline()
continue
# XXX: As we don't manage paragraphs for now, end of lines and
# paragraphs separator are just output for now.
if not tagdata.type in (tagdata.BEGIN, tagdata.END, \
tagdata.SPECIAL):
self.put_text(tagdata.full)
continue
# Check if it is a tag end (we do not know for special tags,
# as they usually are one-character long).
if tagdata.type in (tagdata.END, tagdata.SPECIAL):
# If raw mode is activated, that means that the queue is
# not empty and that the top tag of the queue is the tag
# that initiated raw mode. We're just going to check that
# the name corresponds, and that the tag has not be opened
# into itself (see the description of `raw_deg` in the
# initializer).
if self.raw_mode:
if tagdata.name != self.queue[0].name \
and not (tagdata.name == "[]" \
and self.queue[0].generic):
self.put_text(tagdata.full)
continue
if self.raw_deg > 0:
self.put_text(tagdata.full)
self.raw_deg -= 1
continue
# Check to which opened tag the ending tag corresponds.
pos = -1
if tagdata.name == "[]":
# Generic closing tag [/] management.
# `pos` is set to 0 here.
for qpos, qdat in enumerate(self.queue):
if qdat.name != None:
pos = qpos
break
else:
# Get the position corresponding to the tag.
for qpos, qdat in enumerate(self.queue):
if tagdata.name == qdat.name:
pos = qpos
break
# Then react to `pos`.
# If `pos` is 0 or above, an opening tag has been found.
# We ought to autoclose opened stuff which are not
# terminated explicitely, and close the tag closed
# explicitely.
if pos >= 0:
while pos > 0:
self.pop_tag()
pos -= 1
self.pop_tag(tagdata.full)
continue
if tagdata.type == tagdata.END:
self.put_text(tagdata.full)
continue
# If we are here, the tag is a special tag which hasn't
# been identified to be an ending tag. We don't want to
# stop because that means it is a beginning tag.
# From here, we know the tag is not a beginning tag.
# In raw mode, always display the tag, but if the tag
# corresponds to the raw tag opened, augment the number of
# tags required to close the raw tag.
if self.raw_mode:
if tagdata.name == self.queue[0].name:
self.raw_deg += 1
self.put_text(tagdata.full)
continue
# Get the initialized tag with the name and value.
# If the tag is unknown, output the full thing and just go on.
try:
tag = self.options.get_tag(tagdata.name)
except:
self.put_text(tagdata.full)
continue
value = tagdata.value
if value != None and hasattr(tag, 'procvalue') \
and tag.procvalue:
value = self.process_text(value)
try:
tag = tag(tagdata.name, value, self.output_type,
self.tweaks, self.options)
except:
self.put_text(tagdata.full)
continue
# Check if it is a block tag.
dat = _TagData(tag, tagdata.name, tagdata.full)
if self.inline_mode and dat.type == dat.BLOCK:
self.put_text(tagdata.full)
continue
# Check if is an allowed tag.
if dat.type == dat.BLOCK:
try:
sb = next(d for d in self.queue if d.super)
except StopIteration:
alw = None
else:
alw = sb.allowed
else:
try:
pr = self.queue[0]
except IndexError:
alw = None
else:
alw = pr.allowed
if alw is not None and not any(cls for cls in alw \
if isinstance(dat.base, cls)):
self.put_text(tagdata.full)
continue
# Check if it is within itself and it can't.
if dat.nwi and any(d for d in self.queue \
if isinstance(d.base, type(dat.base))):
while not isinstance(self.queue[0].base, type(dat.base)):
self.pop_tag()
self.pop_tag()
# Check if it is allowed in this parent.
if dat.onlyin is not None and self.queue \
and not any(cls for cls in dat.onlyin \
if isinstance(self.queue[0].base, cls)):
self.put_text(tagdata.full)
continue
# And don't forget to push the tag (through its data).
self.push_tag(dat)
# Push a paragraph tag if the block is a superblock.
if dat.type == dat.BLOCK and dat.super and not dat.raw \
and not dat.inlined and (dat.allowed is None \
or _TextoutParagraphTag in dat.allowed):
self.push_tag(_TagData(_TextoutParagraphTag(None, None,
self.output_type, self.tweaks, self.options), None,
''))
# End of file, it seems! Let's close the tags, flush the text
# and just resume our lives from there.
while self.queue:
self.pop_tag()
self.flush_text()
# And don't forget to return the output for the user to chain
# stuff easily ;)
return self.outp
def reopen(self, inp, outp):
""" Open another instance of this translator for
sub-translators. """
return Translator(inp, outp, self.output_type, self.tweaks,
self.options)
# End of file.

1054
textoutpc/builtin.py Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,57 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["AlignTag"]
class AlignTag(_BlockTag):
""" Main tag for aligning paragraphs.
Example uses:
[align=center]This text is centered horizontally.[/align]
[justify]This text is justified.[/justify]
"""
aliases = ('[align]', '[center]', '[centre]', '[left]', '[right]',
'[justify]')
superblock = True
notempty = True
def prepare(self, name, value):
_align = {
'center': 'center',
'centre': 'center',
'left': 'left',
'right': 'right',
'justify': 'justify'}
if not name:
align = None
elif name == 'align' and value is not None:
align = _align[value]
else:
align = _align[name[1:-1]]
self._align = align
def begin_html(self):
if not self._align:
return ''
cl = []
if self._align:
cl.append('align-' + self._align)
return '<div{}>'.format(' class="' + ' '.join(cl) + '"' if cl else '')
def end_html(self):
if not self._align:
return ''
return '</div>'
# End of file.

View File

@ -1,77 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag, InlineTag as _InlineTag
__all__ = ["CodeTag", "InlineCodeTag", "NoEvalTag"]
class CodeTag(_BlockTag):
""" The basic code tag, for displaying code.
Example uses:
[code]int main()
{
printf("hello, world");
}[/code] """
aliases = ('[code]',)
generic = False
raw = True
notempty = True
def begin_html(self):
return '<div class="code">'
def end_html(self):
return '</div>'
def begin_lightscript(self):
return '```\n'
def end_lightscript(self):
return '```\n'
class InlineCodeTag(_InlineTag):
""" Inline code tag, doesn't display a box, simply doesn't evaluate
the content and uses monospace font.
Example uses:
`some inline code`
[inlinecode][b]The tags will be shown verbatim.[/b][/inlinecode]
[inlinecode][inlinecode][i]This also[/inlinecode] works![/inlinecode]
"""
aliases = ('`', '[inlinecode]')
generic = False
raw = True
def begin_html(self):
return '<span class="inline-code">'
def end_html(self):
return '</span>'
def begin_lightscript(self):
return '`'
def end_lightscript(self):
return '`'
class NoEvalTag(_InlineTag):
""" Inline code tag, simply doesn't evaluate the content.
Example uses:
[noeval][b]wow, and no need for monospace![/b][/noeval]
"""
aliases = ('[noeval]', '[nobbcode]')
generic = False
raw = True
# End of file.

View File

@ -1,129 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
import urllib.parse as _urlparse
from .. import BlockTag as _BlockTag
from html import escape as _htmlescape
__all__ = ["ImageTag", "AdminImageTag"]
class ImageTag(_BlockTag):
""" The main tag for displaying an image.
Example uses:
[img]picture_url[/img]
[img=center]picture_url[/img]
[img=12x24]picture_url[/img]
[img=center|12x24]picture_url[/img]
[img=x24|right]picture_url[/img]
"""
aliases = ('[img]',)
raw = True
def prepare(self, name, value):
_align = {
'center': ('center', False),
'centre': ('center', False),
'left': ('left', False),
'right': ('right', False),
'float': (None, True),
'floating': (None, True),
'float-left': ('left', True),
'float-center': ('center', True),
'float-centre': ('center', True),
'float-right': ('right', True),
}
self._width = None
self._height = None
self._align = None
self._float = False
for arg in ("", value)[value is not None].split('|'):
if not arg:
pass
elif arg[0] in '0123456789x':
self._width = None
self._height = None
dim = arg.split('x')
try:
self._width = int(dim[0])
except ValueError:
pass
try:
self._height = int(dim[1])
except ValueError:
pass
elif arg in _align:
al, fl = _align[arg]
if al is not None:
self._align = al
if fl:
self._float = True
def preprocess(self, content):
try:
self._image = self.image(content)
except:
url = _urlparse.urlparse(content)
if url.scheme not in ('http', 'https'):
raise Exception("No allowed prefix!")
self._image = content
def content_html(self):
if isinstance(self._image, str):
url = _htmlescape(self._image)
return '<p><a href="{}">{}</a></p>'.format(url, url)
style = []
cls = []
if self._width:
style.append('width: {}px'.format(self._width))
elif self._height:
style.append('width: auto')
if self._height:
style.append('height: {}px'.format(self._height))
elif self._width:
style.append('height: auto')
if self._float:
cls.append('img-float-{}'.format(self._align or 'right'))
elif self._align:
cls.append('img-{}'.format(self._align))
return '<img src="{}"{}{} />'.format(_htmlescape(self._image.embed),
' class="{}"'.format(' '.join(cls)) if cls else '',
' style="{}"'.format('; '.join(style)) if style else '')
def content_lightscript(self):
url = self._image.embed.replace('[', '%5B').replace(']', '%5D')
return '[[image:{}]]'.format(url)
class AdminImageTag(ImageTag):
""" This tag is special for Planète Casio, as it takes images from
the `ad`ministration's image folder.
It just adds this folder's prefix.
Example uses:
[adimg]some_picture.png[/img]
[adimg=center]some_picture.png[/img]
[adimg=12x24]some_picture.png[/img]
[adimg=center|12x24]some_picture.png[/img]
[adimg=x24|right]some_picture.png[/img]
"""
aliases = ('[adimg]',)
def preprocess(self, content):
self._url = 'https://www.planet-casio.com/images/ad/' + content
self._checkurl()
# End of file.

View File

@ -1,46 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
import urllib.parse as _urlparse
from .. import Image as _Image
__all__ = ["GenericImage"]
class GenericImage(_Image):
""" Get a direct image. Actually this doesn't test anything, we should
use like the Embed module again, as for videos. """
# FIXME: make that disappear one day for the OpenWebImage.
def __init__(self, content):
url = _urlparse.urlparse(content)
if url.scheme not in ('http', 'https'):
raise Exception("No allowed prefix!")
self.embed = content
# WARNING: This is only for demonstration sake. Do not use without a cache!
# This demonstration class uses the `embed-python` module.
#
#from embed import Embed as _Embed
#
#class OpenWebImage(_Image):
# """ Decentralized way to gather an image data. """
#
# def __init__(self, url):
# u = _urlparse.urlparse(url)
# if not u.scheme in ('https',):
# raise Exception
#
# embed = _Embed(url)
# embed = embed.embed
# assert embed['type'] == 'image'
#
# self.embed = embed['url']
# End of file.

View File

@ -1,60 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import InlineTag as _InlineTag
import re as _re
__all__ = ["LabelTag", "TargetTag"]
_labelexpr = _re.compile('^[a-z0-9-]{1,16}$', _re.I)
class LabelTag(_InlineTag):
""" The label tag, defines an anchor at a point of the post.
Example uses:
[label=installation]Installation de tel logiciel... (no ending req.)
[label=compilation][/label] Compilation de tel logiciel...
"""
aliases = ('[label]',)
def prepare(self, name, value):
if not _labelexpr.match(value):
raise Exception
self._label = value
def begin_html(self):
#name = 'label-{}'.format(self._label)
#if _v42compat:
# name += ' ' + self._label
name = self.tweak("label_prefix", "") + self._label
return '<a name="{}"></a>'.format(name)
class TargetTag(_InlineTag):
""" The goto tag, links to an anchor defined in the post.
Example uses:
[target=installation]Check out the installation manual[/target]!
"""
aliases = ('[target]',)
def prepare(self, name, value):
if not _labelexpr.match(value):
raise Exception
self._label = value
def begin_html(self):
#name = 'label-' + self._label
name = self.tweak("label_prefix", "") + self._label
return '<a href="#{}">'.format(name)
def end_html(self):
return '</a>'
# End of file.

View File

@ -1,182 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import InlineTag as _InlineTag
from html import escape as _htmlescape
__all__ = ["LinkTag", "ProfileTag", "TopicTag", "TutorialTag",
"ProgramTag"]
class LinkTag(_InlineTag):
""" The main link tag.
Example uses:
[url=https://example.org/hi]Go to example.org[/url]!
[url=/Fr/index.php][/url]
[url]https://random.org/randomize.php[/url] """
aliases = ('[url]',)
raw = True
def _validate(self):
for prefix in ('http://', 'https://', 'ftp://', '/', '#'):
if self._url.startswith(prefix):
break
else:
raise Exception("No allowed prefix!")
def prepare(self, name, value):
self._url = None
# If there is no value, wait until we have a content to
# decide if we are valid or not.
if value is None:
self.preprocess = self._preprocess_if_no_value
return
# Otherwise, get the URL and validate.
self._url = value
self._validate()
self.default = self._default_if_value
def _default_if_value(self):
return self._url
def _preprocess_if_no_value(self, content):
self._url = content
self._validate()
def begin_html(self):
target = self.tweak("link_target", "").casefold()
tattrs = ''
if target == 'blank':
tattrs = ' target="_blank" rel="noopener"'
return '<a href="{}"{}>'.format(_htmlescape(self._url), tattrs)
def end_html(self):
return '</a>'
def begin_lightscript(self):
return '['
def end_lightscript(self):
url = self._url.replace('(', '%28').replace(')', '%29')
return ']({})'.format(url)
class ProfileTag(LinkTag):
""" A special link tag for Planète Casio's profiles.
Adds the prefix to the content, and sets the value.
Example uses:
[profil]Cakeisalie5[/] """
aliases = ('[profil]', '[profile]')
def prepare(self, name, value):
# Override the LinkTag's prepare method.
pass
def preprocess(self, content):
# Check the username's content (see `check(…, "pseudo")` in PCv42).
username = content
allowed = "abcdefghijklmnopqrstuvwxyz0123456789_ -."
if any(car not in allowed for car in allowed):
raise ValueError("invalid username!")
# Prepare the tag.
self._url = 'https://www.planet-casio.com/Fr/compte/voir_profil.php' \
'?membre={}'.format(username)
self._validate()
class TopicTag(LinkTag):
""" A special link tag for Planète Casio's topics.
Adds the prefix to the content, and sets the value.
Example uses:
[topic]234[/] """
aliases = ('[topic]',)
def prepare(self, name, value):
# Override the LinkTag's prepare method.
pass
def preprocess(self, content):
# Check the topic number.
topic = int(content)
# Prepare the tag.
self._url = 'https://www.planet-casio.com/Fr/forums/' \
f'lecture_sujet.php?id={topic}'
self._validate()
class TutorialTag(LinkTag):
""" A special link tag for Planète Casio's tutorial.
Adds the prefix to the content, and sets the value.
Example uses:
[tutorial]71[/tutorial]
[tuto]71[/tuto] """
aliases = ('[tutorial]', '[tuto]')
def prepare(self, name, value):
# Override the LinkTag's prepare method.
pass
def preprocess(self, content):
# Check the topic number.
topic = int(content)
# Prepare the tag.
self._url = 'https://www.planet-casio.com/Fr/programmation/' \
f'tutoriels.php?id={topic}'
self._validate()
class ProgramTag(LinkTag):
""" A special link tag for a Planète Casio's program.
Adds the prefix to the content, and sets the value.
Example uses:
[program]3598[/program]
[prog]3598[/prog] """
aliases = ('[program]', '[prog]')
def prepare(self, name, value):
# Override the LinkTag's prepare method.
pass
def preprocess(self, content):
# Check the program number.
program = int(content)
# Prepare the tag.
self._url = 'https://www.planet-casio.com/Fr/programmes/' \
f'voir_un_programme_casio.php?showid={program}'
self._validate()
# End of file.

View File

@ -1,113 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["ListTag", "ListElementTag"]
# Bullet style names.
_ol_list_style_types = {
'disc': 'disc',
'circle': 'circle',
'square': 'square',
}
_ul_list_style_types = {
'1': 'decimal',
'a': 'lower-alpha',
'A': 'upper-alpha',
'i': 'lower-roman',
'I': 'upper-roman',
}
_list_style_types = _ol_list_style_types.copy()
_list_style_types.update(_ul_list_style_types)
_ul_lst_names = set(_ul_list_style_types.keys())
_ol_lst_names = set(_ol_list_style_types.keys())
# Tag definitions.
class _ListTagBase(_BlockTag):
pass
class ListElementTag(_BlockTag):
""" List element for basic lists (see `ListTag`). """
aliases = ('[*]', '[li]')
only_in = (_ListTagBase,)
notempty = True
superblock = True
not_within_itself = True
def begin_html(self):
return '<li>'
def end_html(self):
return '</li>'
class ListTag(_ListTagBase):
""" Main tag for making basic lists.
Example use:
[ul]
[*] Item number one.
[*] Item number [b]two[/b].
[/ul]
"""
aliases = ('[list]', '[ul]', '[ol]')
notempty = True
superblock = True
allowed_tags = (ListElementTag,)
no_text = True
def prepare(self, name, value):
us = _ul_lst_names
os = _ol_lst_names
if name == '[list]' and value == None:
self._tag = 'ul'
self._style = None
elif name == '[list]' and value in us:
self._tag = 'ul'
self._style = value
elif name == '[list]' and value in os:
self._tag = 'ol'
self._style = value
elif name == '[ul]' and value == None:
self._tag = 'ul'
self._style = None
elif name == '[ul]' and value in us:
self._tag = 'ul'
self._style = value
elif name == '[ol]' and value == None:
self._tag = 'ol'
self._style = None
elif name == '[ol]' and value in os:
self._tag = 'ol'
self._style = value
else:
raise ValueError("invalid bullet style")
# Find out the HTML style name.
if self._style != None:
self._style = _list_style_types[self._style]
def begin_html(self):
tag = f'<{self._tag}'
if self._style != None:
tag += f' style="list-style-type: {self._style}"'
tag += '>'
return tag
def end_html(self):
return '</ul>'
# End of file.

View File

@ -1,36 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["ProgressTag"]
class ProgressTag(_BlockTag):
""" Progress tag, used to display the progress on anything.
Usage:
[progress=50]My great progress bar[/progress]
[progress=100][/progress] """
aliases = ('[progress]',)
raw = True
def prepare(self, name, value):
self._val = int(value)
if self._val < 0 or self._val > 100:
raise Exception("progress value should be between 0 and 100 incl.")
def begin_html(self):
return '<div>'
def end_html(self):
return '' \
'<div class="progress">' \
'<div class="progress-inner" style="width: {}%;">{}%' \
'</div></div></div>'.format(self._val, self._val)
# End of file.

View File

@ -1,49 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["QuoteTag"]
class QuoteTag(_BlockTag):
""" The main tag for quoting someone.
Example uses:
[quote]Hey, I said that![/quote]
[quote=Someone important]I said something important, and it's
multiline and [b]formatted[/b]!
[quote=Someone else]Heck, he's even quoting me in his quote![/quote]
[/quote]
"""
aliases = ('[quote]',)
superblock = True
notempty = True
procvalue = True
def prepare(self, name, value):
self._value = value
def begin_html(self):
f = '<div class="citation">'
if self._value:
f += '<p><b>{} a écrit:</b></p>'.format(self._value)
return f
def end_html(self):
return '</div>'
def begin_lightscript(self):
text = '<<<'
if self._value:
text += ' ' + self._value
return text + '\n'
def end_lightscript(self):
return '<<<\n'
# End of file.

View File

@ -1,44 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
import string as _string
from .. import InlineTag as _InlineTag
__all__ = ["RotTag"]
class RotTag(_InlineTag):
""" Tag which un-rot13 a content.
Demonstration tag for content processing.
Example uses:
[rot=13]obawbhe[/rot]
[rot13]Obawbhe[/rot13]
"""
aliases = ('[rot]', '[rot13]')
raw = True
def prepare(self, name, value):
if name == "[rot]":
if not value:
value = 13
else:
rot = int(value)
assert 1 <= rot <= 25
else:
rot = int(name[4:-1])
upr0 = _string.ascii_uppercase
upr1 = upr0[rot:] + upr0[:rot]
lwr0 = _string.ascii_lowercase
lwr1 = lwr0[rot:] + lwr0[:rot]
self._trans = str.maketrans(upr0 + lwr0, upr1 + lwr1)
def preprocess(self, content):
return str.translate(content, self._trans)
# End of file.

View File

@ -1,36 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from html import escape as _htmlescape
from .. import BlockTag as _BlockTag
__all__ = ["ShowTag"]
class ShowTag(_BlockTag):
""" Tag which shows the HTML code that is produced by textout().
Example uses:
[show][b]hello world![/show]
"""
aliases = ('[show]',)
notempty = True
superblock = True
inlined = True
generic = False
raw = False
def preprocess_html(self, content):
return _htmlescape(content)
def begin_html(self):
return '<span class="inline-code">'
def end_html(self):
return '</span>'
# End of file.

View File

@ -1,171 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import Smiley as _Smiley
__all__ = ["TwistedSmiley", "EvilSmiley", "SmileSmiley", "WinkSmiley",
"SadSmiley", "GrinSmiley", "HeheSmiley", "CoolSmiley", "Cool2Smiley",
"MadSmiley", "EekSmiley", "MrGreenSmiley", "ShockedSmiley",
"ConfusedSmiley", "EyebrowsSmiley", "CrySmiley", "LolSmiley",
"SorrySmiley", "RollEyesSmiley", "WazaSmiley", "HereSmiley",
"BowSmiley", "GoodSmiley", "LoveSmiley", "OuchSmiley", "FacepalmSmiley",
"InsultsSmiley", "WhatSmiley", "ExclSmiley"]
_prefix = '/images/smileys/'
class TwistedSmiley(_Smiley):
aliases = ('>:)',)
url = _prefix + 'twisted.gif'
class EvilSmiley(_Smiley):
aliases = ('>:(', ':grr:')
url = _prefix + 'evil.gif'
class SmileSmiley(_Smiley):
aliases = (':)',)
url = _prefix + 'smile.gif'
class WinkSmiley(_Smiley):
aliases = (';)',)
url = _prefix + 'wink.gif'
class SadSmiley(_Smiley):
aliases = (':(',)
url = _prefix + 'sad.gif'
class GrinSmiley(_Smiley):
aliases = (':D', ':grin:')
url = _prefix + 'grin.gif'
class HeheSmiley(_Smiley):
aliases = (':p',)
url = _prefix + 'hehe.gif'
class CoolSmiley(_Smiley):
aliases = (':cool:',)
url = _prefix + 'cool.gif'
class Cool2Smiley(_Smiley):
aliases = ('8-)',)
url = _prefix + 'cool2.gif'
class MadSmiley(_Smiley):
aliases = (':@',)
url = _prefix + 'mad.gif'
class EekSmiley(_Smiley):
aliases = ('0_0',)
url = _prefix + 'eek.gif'
class MrGreenSmiley(_Smiley):
aliases = (':E', ':mrgreen:')
url = _prefix + 'mrgreen.gif'
class ShockedSmiley(_Smiley):
aliases = (':O',)
url = _prefix + 'shocked.gif'
class ConfusedSmiley(_Smiley):
aliases = (':s', ':oops:')
url = _prefix + 'confused2.gif'
class EyebrowsSmiley(_Smiley):
aliases = ('^^',)
url = _prefix + 'eyebrows.gif'
class CrySmiley(_Smiley):
aliases = (":'(", ":cry:")
url = _prefix + 'cry.gif'
# FIXME
#class WhistleSmiley(_Smiley):
# aliases = (":-°", ':whistle:')
# url = _prefix + 'whistle.gif'
# height = '15px'
class LolSmiley(_Smiley):
aliases = (":lol:",)
url = _prefix + 'lol.gif'
class SorrySmiley(_Smiley):
aliases = (":sry:",)
url = _prefix + 'redface.gif'
class RollEyesSmiley(_Smiley):
aliases = (":mmm:",)
url = _prefix + 'rolleyes.gif'
class WazaSmiley(_Smiley):
aliases = (":waza:",)
url = _prefix + 'waza.gif'
class HereSmiley(_Smiley):
aliases = (":here:", ":arrow:")
url = _prefix + 'pointer.gif'
class BowSmiley(_Smiley):
aliases = (":bow:",)
url = _prefix + 'bow.gif'
class GoodSmiley(_Smiley):
aliases = (":good:",)
url = _prefix + 'welldone.gif'
class LoveSmiley(_Smiley):
aliases = (":love:",)
url = _prefix + 'love.gif'
class OuchSmiley(_Smiley):
aliases = (":aie:",)
url = _prefix + 'banghead2.gif'
class FacepalmSmiley(_Smiley):
aliases = (":facepalm:",)
url = _prefix + 'facepalm.gif'
class InsultsSmiley(_Smiley):
aliases = (":argh:",)
url = _prefix + 'insults.gif'
class WhatSmiley(_Smiley):
aliases = (":?:",)
url = _prefix + 'what.gif'
class ExclSmiley(_Smiley):
aliases = (":!:",)
url = _prefix + 'excl.gif'
# End of file.

View File

@ -1,50 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["SpoilerTag"]
class SpoilerTag(_BlockTag):
""" Hide content at first glance, force people to click to read content.
These elements can contain 'secret' elements such as solutions, source
code, or various other things.
Example uses:
[spoiler]This is hidden![/spoiler]
[spoiler=Y a quelque chose de caché!|Ah, bah en fait non :)]:E
And it's multiline, [big]and formatted[/big], as usual :D[/spoiler]
"""
aliases = ('[spoiler]',)
superblock = True
notempty = True
procvalue = True
def prepare(self, name, value):
self._closed = "Cliquez pour découvrir"
self._open = "Cliquez pour recouvrir"
if value:
titles = value.split('|')
if titles[0]:
self._closed = titles[0]
if len(titles) >= 2 and (len(titles) > 2 or titles[1]):
self._open = '|'.join(titles[1:])
def begin_html(self):
return '<div class="spoiler">' \
'<div class="title on" onclick="toggleSpoiler(this.parentNode, ' \
'\'open\');"><p>{}</p></div>' \
'<div class="title off" onclick="toggleSpoiler(this.parentNode, ' \
'\'close\');"><p>{}</p></div>' \
'<div class="off">'.format(self._closed, self._open)
def end_html(self):
return '</div></div>'
# End of file.

View File

@ -1,255 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import InlineTag as _InlineTag
from thcolor import Color as _Color
__all__ = ["TextTag"]
def _get_color(text):
return _Color.from_text(text).rgba()
# ---
# Data.
# ---
_big_size = 2.00
_sml_size = 0.75
_fonts = {
"arial": "Arial",
"comic": "Comic MS",
"tahoma": "Tahoma",
"courier": "Courier",
"haettenschweiler": "Haettenschweiler",
"mono": "monospace",
"monospace": "monospace"
}
# ---
# Tag definition.
# ---
class TextTag(_InlineTag):
""" Main tag for setting text formatting.
Example uses:
[b]Bold text.[/b]
[i]Italic text.[/i]
[u]Underlined text.[/u]
[strike]Striked text.[/strike]
[striked]Text strikes again.[/striked]
[font=arial]Arial text.[/font]
[arial]Arial text again.[/arial]
[blue]This will be in blue[/blue]
[color=blue]This as well[/color]
[color=rgb(255, 255, 255, 0.4)]BLACKNESS[/color]
[color=hsl(0, 100%, 0.5)]This will be red.[/color]
Also supports a hack used on Planète Casio for a while, which
is a CSS injection, e.g.:
[color=brown; size: 16pt]Hello world![/color]
"""
aliases = ('[css]', '[b]', '[i]', '[u]', '[o]', '[s]', '[strike]',
'[monospace]', '[mono]', '[font]', '[color]', '[c]',
'[size]', '[big]', '[small]',
'[arial]', '[comic]', '[tahoma]', '[courier]',
'[haettenschweiler]', '[red]', '[green]', '[blue]',
'[yellow]', '[maroon]', '[purple]', '[gray]',
'[grey]', '[brown]')
notempty = True
def prepare(self, name, value):
self._bold = False
self._italic = False
self._underline = False
self._overline = False
self._strike = False
self._font = None
self._color = None
self._bgcolor = None
self._size = None
# Récupérer la partie correspondant à l'injection CSS s'il y
# en a une.
def get_props(value):
props = ''
if value is not None:
index = value.find(';')
if index >= 0:
props = value[index + 1:]
value = value[:index]
return value, props
# Définir les propriétés à partir du truc principal.
name = name[1:-1]
props = ""
if name == "css":
props = value
elif name == "b":
self._bold = True
elif name == "i":
self._italic = True
elif name == "u":
self._underline = True
elif name == "o":
self._overline = True
elif name in ("s", "strike", "striked"):
self._strike = True
elif name in ("color", 'c'):
value, props = get_props(value)
self._color = _get_color(value)
elif name == 'f':
value, props = get_props(value)
self._bgcolor = _get_color(value)
elif name == "font":
value, props = get_props(value)
assert value in _fonts
self._font = _fonts[value]
elif name in ('size', 'big', 'small'):
if name != 'size':
value = name
if value == 'big':
self._size = _big_size
elif value == 'small':
self._size = _sml_size
else:
self._size = round(int(value) / 100.0, 2)
assert 0 < self._size <= 3.0
if self._size == 1.0:
self._size = None
elif name in _fonts:
self._font = name
else:
self._color = _get_color(name)
# Gestion des propriétés CSS (par injection ou via `[css]`).
for prop in props.split(';'):
prop = prop.strip()
if not prop:
continue
name, *value = prop.split(':')
if not value:
continue
name = name.strip()
value = ':'.join(value).strip()
if name in ('size', 'font-size'):
# Control the font size.
unit = 'pt'
if value.endswith('pt'):
value = value[:-2].rstrip()
elif value.endswith('em'):
unit = 'em'
value = value[:-2].rstrip()
if not value or \
any(c != '0' for c in value[:-3]) or \
any(c not in '0123456789' for c in value[-3:]):
continue
value = int(value[-3:])
if unit == 'pt':
value /= 12 # XXX: default em size
if 0 < value <= 3.0:
self._size = value
elif name == 'color':
# Control the text color.
self._color = _get_color(value)
elif name == 'background-color':
# Control the background color.
self._bgcolor = _get_color(value)
def _get_css(self):
""" Get the `style` CSS classes and properties for HTML output. """
classes, props = [], []
if not self.tweak('obsolete_tags', True):
if self._bold:
props.append('font-weight: bold')
if self._italic:
props.append('font-style: italic')
if self._underline or self._strike or self._overline:
props.append('text-decoration:{}{}{}'.format(' underline'
if self._underline else '', ' line-through'
if self._strike else '', ' overline'
if self._overline else ''))
else:
if self._overline:
props.append('text-decoration:{}'.format(' overline'
if self._overline else ''))
if self._font:
props.append('font-family: ' + self._font)
if self._color:
# `transparent` is at least considered as a special value,
# or at most as an alias to `rgba(0,0,0,0)`.
if self._color[3] == 0.0:
props.append('color: transparent')
else:
# always append the #rgb color: it will be read by older
# browsers if the `rgba()` function isn't supported.
props.append('color: #%02X%02X%02X' % self._color[0:3])
if self._color[3] < 1.0:
props.append('color: rgba({}, {}, {}, {})'
.format(*self._color))
if self._bgcolor and self._bgcolor[3] != 0.0:
props.append('background-color: #%02X%02X%02X' % self._color[0:3])
if self._bgcolor[3] < 1.0:
props.append('background-color: rgba({}, {}, {}, {})'
.format(*self._bgcolor))
if self._size:
props.append('font-size: {}em'.format(self._size))
return classes, props
def begin_html(self):
obsoletetags = self.tweak('obsolete_tags', True)
cls, props = self._get_css()
if cls or props:
props = '<span{}{}>'.format(' class="{}"'.format(' '.join(cls))
if cls else '', ' style="{}"'.format('; '.join(props))
if props else '')
else:
props = ''
return '' \
+ ('', '<b>')[obsoletetags and self._bold] \
+ ('', '<i>')[obsoletetags and self._italic] \
+ ('', '<u>')[obsoletetags and self._underline] \
+ ('', '<strike>')[obsoletetags and self._strike] \
+ props
def end_html(self):
obsoletetags = self.tweak('obsolete_tags', True)
return '' \
+ ('', '</span>')[any(self._get_css())] \
+ ('', '</strike>')[obsoletetags and self._strike] \
+ ('', '</u>')[obsoletetags and self._underline] \
+ ('', '</i>')[obsoletetags and self._italic] \
+ ('', '</b>')[obsoletetags and self._bold]
# End of file.

View File

@ -1,48 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
from .. import BlockTag as _BlockTag
__all__ = ["TitleTag"]
class TitleTag(_BlockTag):
""" The title tag.
Example uses:
[title]Some title[/title]
[subtitle]Some subtitle[/subtitle]
"""
aliases = ('[title]', '[subtitle]')
raw = True
def prepare(self, name, value):
level = self.tweak("title_level", "1").casefold()
if isinstance(level, str) and level[0] == "h":
level = level[1:]
level = int(level)
assert 1 <= level <= 5
# Name.
self._level = name[1:-1]
# HTML tag.
level += self._level == "subtitle"
self._tag = f"h{level}"
def begin_html(self):
return f'<{self._tag} class="{self._level}">'
def end_html(self):
return f'</{self._tag}>'
def begin_lightscript(self):
return '#' * ((self._level == "subtitle") + 1) + ' '
# End of file.

View File

@ -1,144 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
import urllib.parse as _urlparse
from html import escape as _htmlescape
from .. import BlockTag as _BlockTag
__all__ = ["VideoTag", "YoutubeTag"]
_defaultratio_w = 16
_defaultratio_h = 9
class VideoTag(_BlockTag):
""" The video tag, puts a preview of the video whose URL is given.
Only a few 'big' services are supported for now.
Example uses:
[video]video_url[/video]
[video=4:3]video_url[/video]
[video tiny]video_url[/video tiny]
[video]https://www.youtube.com/watch?v=yhXpV8hRKxQ[/video]
"""
aliases = ('[video]', '[video tiny]')
raw = True
noinline = True
def prepare(self, name, value):
""" Prepare the video tag. """
_align = {
'center': ('center', False),
'centre': ('center', False),
'left': ('left', False),
'gauche': ('left', False),
'right': ('right', False),
'droite': ('right', False),
'float': (None, True),
'floating': (None, True),
'flotte': (None, True),
'flottant': (None, True),
'float-left': ('left', True),
'float-center': ('center', True),
'float-centre': ('center', True),
'float-right': ('right', True),
}
self._sizeclass = "video-tiny" if "tiny" in name \
else None
self._align = None
self._float = False
self._ratio = None
for arg in map(str.strip, (value or "").split('|')):
if not arg:
pass
elif arg[0] in '0123456789:':
rx, ry = _defaultratio_w, _defaultratio_h
rn = 0
rat = arg.split(':')
try: rx = int(rat[0]); rn += 1
except: pass
try: ry = int(rat[1]); rn += 1
except: pass
if rn:
self._ratio = round(ry / rx, 4)
elif arg in _align:
al, fl = _align[arg]
if al != None:
self._align = al
if fl:
self._float = True
def preprocess(self, content):
try:
self._video = self.video(content)
except:
url = _urlparse.urlparse(content)
if url.scheme not in ('http', 'https'):
raise Exception("No allowed prefix!")
self._video = content
def content_html(self):
""" Produce the embed code for the given type. """
if isinstance(self._video, str):
url = _htmlescape(self._video)
target = self.tweak("link_target", "").casefold()
tattrs = ''
if target == 'blank':
tattrs = ' target="_blank" rel="noopener"'
return '<p><a href="{}"{}>{}</a></p>'.format(url, tattrs, url)
align = "float-" + (self._align or "left") if self._align \
else self._align
if self._ratio:
ratio = self._ratio * 100
elif hasattr(self._video, 'ratio'):
ratio = self._video.ratio * 100
else:
ratio = round(_defaultratio_h / _defaultratio_w, 4) * 100
iratio = int(ratio)
if ratio == iratio:
ratio = iratio
ratio = str(ratio)
code = '<div class="video-wrapper{}{}"{}>' \
.format(f" {self._sizeclass}" if self._sizeclass else "",
f' img-{align}' if align else "",
f' style="padding-bottom: {ratio}%"')
code += '<iframe src="{}" frameborder="0" allowfullscreen>' \
'</iframe>'.format(self._video.embed)
return code + '</div>'
def content_lightscript(self):
url = self._url.replace('[', '%5B').replace(']', '%5D')
return '[[image:{}]]'.format(url)
class YoutubeTag(VideoTag):
""" Alias for the video tag with only the Youtube possibility.
Example uses:
[youtube]okMK1NYRySI[/youtube] """
aliases = ('[youtube]',)
def preprocess(self, content):
super().preprocess(f'https://www.youtube.com/watch?v={content}')
# End of file.

View File

@ -1,100 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
import re as _re
import urllib.parse as _urlparse
from .. import Video as _Video
__all__ = ["YouTubeVideo", "DailymotionVideo", "VimeoVideo"]
class YouTubeVideo(_Video):
""" Get a video from Youtube. """
_hexcode = _re.compile('[a-zA-Z0-9_-]+')
def __init__(self, url):
url = _urlparse.urlparse(url)
if url.scheme not in ('http', 'https'):
raise Exception
if url.netloc == "youtu.be":
self._id = url.path[1:]
if not self._hexcode.match(self._id):
raise ValueError("invalid id")
elif url.netloc in ('youtube.com', 'www.youtube.com'):
if url.path != '/watch':
raise ValueError("invalid id")
self._id = _urlparse.parse_qs(url.query)['v'][0]
if not self._hexcode.fullmatch(self._id):
raise Exception
else:
raise ValueError("unknown URL")
self.embed = f"https://www.youtube.com/embed/{self._id}"
class DailymotionVideo(_Video):
""" Get a video from Dailymotion. """
_dailypath = _re.compile('^/video/([a-z0-9]+)$')
def __init__(self, url):
url = _urlparse.urlparse(url)
if url.scheme not in ('http', 'https'):
raise Exception
if url.netloc in ('dailymotion.com', 'www.dailymotion.com'):
self._code = self._dailypath.match(url.path).groups()[0]
else:
raise ValueError("unknown URL")
self.embed = f"https://www.dailymotion.com/embed/video/{self._code}"
class VimeoVideo(_Video):
""" Get a video from Vimeo. """
_numcode = _re.compile('^/[0-9]+$')
def __init__(self, url):
url = _urlparse.urlparse(url)
if url.scheme not in ('http', 'https'):
raise Exception
if url.netloc in ('vimeo.com', 'www.vimeo.com'):
self._code = url.path[1:]
if not self._numcode.match(self._code):
raise ValueError("invalid video code")
else:
raise ValueError("unknown URL")
self.embed = f"https://player.vimeo.com/video/{self._code}" \
"?title=0&byline=0&portrait=0"
# WARNING: This is only for demonstration sake. Do not use without a cache!
# This demonstration class uses the `embed-python` module.
#
#from embed import Embed as _Embed
#
#class OpenWebVideo(_Video):
# """ Decentralized way to gather a video data. """
#
# def __init__(self, url):
# u = _urlparse.urlparse(url)
# if not u.scheme in ('https',):
# raise Exception
#
# embed = _Embed(url)
# embed = embed.embed
# assert embed['type'] == 'video'
#
# self.embed = embed['url']
# if 'ratio' in embed:
# self.ratio = embed['ratio'] / 100
# End of file.

View File

@ -1,35 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2018 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Built-in tags and smileys for the `textoutpc` module.
Some of these options will probably have to move to a separate module
Planète Casio-specific, but still, here we are.
"""
# Tags.
from ._Align import *
from ._Code import *
from ._Image import *
from ._Label import *
from ._Link import *
from ._List import *
from ._Progress import *
from ._Quote import *
from ._Rot import *
from ._Show import *
from ._Spoiler import *
from ._Text import *
from ._Title import *
from ._Video import *
# Other resources (smileys, multimedia).
from ._Smileys import *
from ._Images import *
from ._Videos import *
# End of file.

32
textoutpc/exceptions.py Normal file
View File

@ -0,0 +1,32 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Exceptions for textoutpc."""
from __future__ import annotations
class TagValidationError(Exception):
"""A tag validation has failed for an unknown error."""
__slots__ = ("message", "args", "kwargs")
def __init__(self, message: str = "", *args, **kwargs):
self.message = message
self.args = args
self.kwargs = kwargs
class MissingValue(TagValidationError):
"""A value should have been provided, and wasn't."""
class UnexpectedValue(TagValidationError):
"""No value should have been provided, but one was."""
class InvalidValue(TagValidationError):
"""An invalid value was provided."""

277
textoutpc/lexer.py Normal file
View File

@ -0,0 +1,277 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2018-2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Lexer definition for textoutpc."""
from __future__ import annotations
from collections.abc import Iterator
from io import StringIO
from typing import Any, NamedTuple, TextIO, Union
import regex
from typing_extensions import TypeAlias
__all__ = [
"CloseTagEntity",
"NewlineEntity",
"OpenTagEntity",
"SpecialEntity",
"TextEntity",
"iter_textout_entities",
]
# A tag can basically be one of the following things:
# - a starting tag, looking like [<name>] or [<name>=<attribute>]
# - an ending tag, looking like [/<name>]
# - a special tag (starting or ending), usually one-char (the only
# one currently available is the ` tag).
#
# A tag name is 32 chars at most (at least 1 char).
# A closing tag can have no name, which means that it will close the
# last opened tag automatically.
# A tag attribute is 256 chars at most.
#
# FIXME: Check the sizes.
MAX_TAG_NAME_SIZE: int = 32
MAX_TAG_VALUE_SIZE: int = 256
MAX_ENTITY_SIZE: int = MAX_TAG_NAME_SIZE + MAX_TAG_VALUE_SIZE + 3
BUFFER_SIZE: int = 1024 # Must be more than MAX_ENTITY_SIZE!
ENTITY_RE = regex.compile(
r"""
\[\s*[\\\/] (?P<ename>
(?P<ename_e>
[^\[\]\=]+ (\[(?&ename_e)*\]?)*
| [^\[\]\=]* (\[(?&ename_e)*\]?)+
)*
)
\s?\]
|
\[\s* (?P<bname>
(?P<bname_e>
[^\[\]\=]* (\[(?P&bname_e)*\]?)+
| [^\[\]\=]+ (\[(?P&bname_e)*\]?)*
)+
)
(\s* = \s* (?P<value>
(?P<value_e>
[^\[\]]* (\[(?&value_e)*\]?)+
| [^\[\]]+ (\[(?&value_e)*\]?)*
)*
))?
\s?\]
|
(?P<newline>\n|\r\n|\r)
|
(?P<sname>`)
""",
regex.VERBOSE | regex.DOTALL | regex.MULTILINE,
)
class OpenTagEntity(NamedTuple):
"""Explicit opening of a tag."""
name: str
"""Name of the tag that is being opened."""
value: str | None = None
"""Optional value transmitted with the tag."""
raw: str = ""
"""Raw entity, if need be to yield it."""
def __eq__(self, other: Any) -> bool:
return (
isinstance(other, OpenTagEntity)
and other.name == self.name
and other.value == self.value
)
class CloseTagEntity(NamedTuple):
"""Closing of a tag closing object for textout BBCode.
:param name: The name of the tag that is being closed.
:param full: The full entity, if need be to yield it.
"""
name: str
"""Name of the tag that is being closed."""
raw: str = ""
"""Raw entity, if need be to yield it."""
def __eq__(self, other: Any) -> bool:
return isinstance(other, CloseTagEntity) and other.name == self.name
class SpecialEntity(NamedTuple):
"""Special characters that could mean the opening or closing of a tag.
:param name: The special character(s) for the entity.
"""
value: str
"""Special character(s) for the entity."""
def __eq__(self, other: Any) -> bool:
return isinstance(other, SpecialEntity) and other.value == self.value
class NewlineEntity(NamedTuple):
"""Entity representing a newline."""
def __eq__(self, other: Any) -> bool:
return isinstance(other, NewlineEntity)
class TextEntity(NamedTuple):
"""Entity representing raw text."""
content: str
"""Content in the text."""
def __eq__(self, other: Any) -> bool:
return isinstance(other, TextEntity) and other.content == self.content
NEWLINE_ENTITY_INSTANCE = NewlineEntity()
def get_textout_entity_from_match(
match: regex.Match,
) -> NewlineEntity | OpenTagEntity | CloseTagEntity | SpecialEntity | None:
"""Get a textout entity from the given match.
:param match: The full (non-partial) match to yield an entity from.
:return: The obtained entity, or None if an error has occurred during
matching.
"""
parts = match.groupdict()
if parts["newline"] is not None:
return NEWLINE_ENTITY_INSTANCE
if parts["bname"] is not None:
name = parts["bname"]
value = parts["value"]
if len(name) > MAX_TAG_NAME_SIZE or (
value is not None and len(value) > MAX_TAG_VALUE_SIZE
):
return None
return OpenTagEntity(
name=name.casefold(),
value=value,
raw=match.group(0),
)
if parts["ename"] is not None:
name = parts["ename"]
if len(name) > MAX_TAG_NAME_SIZE:
return None
return CloseTagEntity(
name=name.casefold(),
raw=match.group(0),
)
if parts["sname"] is None: # pragma: no cover
raise AssertionError("sname should be filled here!")
return SpecialEntity(value=parts["sname"])
Entity: TypeAlias = Union[
OpenTagEntity,
CloseTagEntity,
SpecialEntity,
NewlineEntity,
TextEntity,
]
def iter_textout_entities(
stream_or_string: TextIO | str,
/,
) -> Iterator[Entity]:
"""Iterate over textout entities.
:param stream_or_string: The text stream or string to read from.
:return: The iterator for textout entities and raw text.
"""
stream: TextIO | None
if isinstance(stream_or_string, str):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
buf = "" # Current buffer of unprocessed input.
text = "" # Text buffer, to avoid consecutive text yields.
while True:
if not buf and stream is not None:
buf = stream.read(BUFFER_SIZE - len(buf))
if not buf:
break
# Try and match a tag.
result = ENTITY_RE.search(buf, partial=True)
if not result or not result.group(0):
text += buf
buf = ""
continue
# If there is some text, return it.
start, end = result.span()
if start > 0:
text += buf[:start]
buf = buf[start:]
if not result.partial:
# Result is actually exploitable, we can go on!
pass
elif len(buf) >= MAX_ENTITY_SIZE:
# A partial result cannot be more than the maximum entity size!
# In such case, maybe if we start later, we can get a full match?
text += buf[:1]
buf = buf[1:]
continue
else:
# We need to complete the buffer from here to get a full tag.
if stream is not None:
new_data = stream.read(BUFFER_SIZE - len(buf))
if new_data:
# We have full data to complete the match, we need to try!
buf += new_data
continue
# We've reached the end of our stream, we need to continue with
# what we've got. Maybe if we start later, we can get a full
# match?
text += buf[:1]
buf = buf[1:]
stream = None
continue
entity = get_textout_entity_from_match(result)
if entity is None:
text += buf[:1]
buf = buf[1:]
continue
if text:
yield TextEntity(content=text)
text = ""
buf = buf[end - start :]
yield entity
if text:
yield TextEntity(content=text)

44
textoutpc/nodes.py Normal file
View File

@ -0,0 +1,44 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Nodes specific to textoutpc.
The terminology used to categorize nodes defined here is from the `docutils
document tree <doctree_>`_ page, and as used in `nodes.py`_.
.. _doctree: https://docutils.sourceforge.io/docs/ref/doctree.html#toc-entry-1
.. _nodes.py:
https://github.com/docutils/docutils/blob/master/docutils/docutils/nodes.py
"""
from __future__ import annotations
from docutils.nodes import Body, Element, General, TextElement
class progress(General, TextElement):
"""Simple body element used to represent a progress bar, as a block."""
value: float
"""The value between 0 and 100 of the progress bar."""
def __init__(self, *args, value: float, **kwargs):
super().__init__(*args, **kwargs)
self.value = value
class spoiler(Body, Element):
"""Compound body element used to represent a spoiler, as a block."""
closed_title: str
"""Label to display as the title while the spoiler is closed."""
opened_title: str
"""Label to display as the title while the spoiler is opened."""
def __init__(self, *args, closed_title: str, opened_title: str, **kwargs):
super().__init__(*args, **kwargs)
self.closed_title = closed_title
self.opened_title = opened_title

320
textoutpc/parser.py Normal file
View File

@ -0,0 +1,320 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2018-2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Parser definition for textoutpc."""
from __future__ import annotations
from typing import NamedTuple, Sequence
from docutils.nodes import document as Document, Node, Text
from docutils.parsers import Parser
from .builtin import (
AdminImageTag,
AlignTag,
CodeTag,
ImageTag,
InlineCodeTag,
LabelTag,
LinkTag,
NoEvalTag,
ProfileTag,
ProgressTag,
RotTag,
SpoilerTag,
TargetTag,
TextTag,
)
from .exceptions import TagValidationError
from .lexer import (
CloseTagEntity,
Entity,
NewlineEntity,
OpenTagEntity,
TextEntity,
iter_textout_entities,
)
from .tags import Tag
BUILTIN_TAGS = {
# TODO: Add the [calc] BBCode tag.
# TODO: Add the [quote] BBCode tag.
# TODO: Add the [indent] BBCode tag.
# TODO: Add the [list] and [li] BBCode tags.
# TODO: Add the [table], [tr], [td] and [th] BBCode tags.
# TODO: Add the [video] and [video tiny] BBCode tags.
"`": InlineCodeTag,
"[adimg]": AdminImageTag,
"[arial]": TextTag,
"[b]": TextTag,
"[big]": TextTag,
"[blue]": TextTag,
"[brown]": TextTag,
"[c]": TextTag,
"[center]": AlignTag,
"[code]": CodeTag,
"[color]": TextTag,
"[comic]": TextTag,
"[courier]": TextTag,
"[css]": TextTag,
"[font]": TextTag,
"[gray]": TextTag,
"[green]": TextTag,
"[grey]": TextTag,
"[haettenschweiler]": TextTag,
"[i]": TextTag,
"[img]": ImageTag,
"[justify]": AlignTag,
"[label]": LabelTag,
"[maroon]": TextTag,
"[mono]": TextTag,
"[monospace]": TextTag,
"[noeval]": NoEvalTag,
"[o]": TextTag,
"[profile]": ProfileTag,
"[progress]": ProgressTag,
"[purple]": TextTag,
"[red]": TextTag,
"[rot]": RotTag,
"[rot13]": RotTag,
"[s]": TextTag, # Synonym for [strike].
"[size]": TextTag,
"[small]": TextTag,
"[spoiler]": SpoilerTag,
"[strike]": TextTag,
"[tahoma]": TextTag,
"[target]": TargetTag,
"[u]": TextTag,
"[url]": LinkTag,
"[yellow]": TextTag,
}
class StackElement(NamedTuple):
"""Element of the parsing stack."""
name: str
"""Name of the tag."""
tag: Tag
"""Instantiated tag."""
is_raw: bool
"""Whether the tag is raw or not."""
children: list[Node]
"""Children nodes which to add to the parent element."""
class TextoutStateMachine:
"""State machine for a "textout"-style language."""
__slots__ = ("document", "stack", "tags", "text")
document: Document
"""Document to which to add elements."""
tags: dict[str, type[Tag]]
"""Tags mapping."""
stack: list[StackElement]
"""Element stack."""
text: str
"""Text buffer.
This is mostly used not to produce multiple Text elements.
"""
def __init__(
self,
/,
*,
document: Document,
tags: dict[str, type[Tag]],
) -> None:
self.document = document
self.tags = tags
self.stack = []
self.text = ""
def flush_text(self, /) -> list[Node]:
"""Flush the text.
:return: The obtained list.
"""
text, self.text = self.text, ""
if not text:
return []
return [Text(text)]
def close_multiple(self, count: int, /) -> None:
"""Close multiple tags.
:param count: Number of elements in the stack to close.
"""
if len(self.stack) < count: # pragma: no cover
raise AssertionError(
f"Could not close {count} contexts with a {len(self.stack)}-"
+ "deep stack.",
)
# We need to add the text element first if we have some text
# in the buffer.
children = self.flush_text()
# We now need to close every one of the tags.
for el in self.stack[:count]:
children = list(
el.tag.process(children=el.children + children),
)
self.stack[:count] = []
prev: Sequence[Node] | Document
if self.stack:
prev = self.stack[0].children
else:
prev = self.document
if (
len(prev) > 0
and len(children) > 0
and isinstance(prev[-1], Text)
and isinstance(children[0], Text)
):
# We want to optimize the texts.
children[0] = Text(str(prev.pop(-1)) + str(children[0]))
prev.extend(children)
def process(self, entity: Entity, /) -> None:
"""Process the lexical entity.
:param entity: The entity to process.
"""
if isinstance(entity, TextEntity):
self.text += entity.content
return
if isinstance(entity, NewlineEntity):
self.text += "\n"
return
if isinstance(entity, OpenTagEntity):
if self.stack and self.stack[0].is_raw:
# We are not allowed to open tags in a raw context.
self.text += entity.raw
return
ent_name = f"[{entity.name}]"
tag_cls = self.tags.get(ent_name)
if tag_cls is None:
self.text += entity.raw
return
try:
tag = tag_cls(name=ent_name, value=entity.value)
except TagValidationError:
# TODO: Add a warning.
self.text += entity.raw
return
# Add the text currently in the buffer to the top of the stack
# before inserting the new element.
text_nodes = self.flush_text()
if text_nodes:
prev: Sequence[Node] | Document
if self.stack:
prev = self.stack[0].children
else:
prev = self.document
if len(prev) > 0 and isinstance(prev[-1], Text):
prev[-1] = Text(str(prev[-1]) + str(text_nodes[0]))
else:
prev.extend(text_nodes)
# Insert the element.
self.stack.insert(
0,
StackElement(
name=f"[{entity.name}]",
tag=tag,
children=[],
is_raw=tag.is_raw(),
),
)
return
if isinstance(entity, CloseTagEntity):
ent_name = f"[{entity.name}]"
if self.stack and self.stack[0].is_raw:
if self.stack[0].name == ent_name:
# We are indeed closing the current raw tag!
self.close_multiple(1)
else:
# We are not closing the raw tag, and cannot close any
# parent tag, so we actually just consider this as text.
self.text += entity.raw
return
for i, el in enumerate(self.stack):
# In non-raw cases, the [/] tag means that we want to close
# the first found tag.
if ent_name in ("[]", el.name):
self.close_multiple(1 + i)
return
else:
# The closing tag doesn't correspond to an existing tag,
# so we consider it as simple text.
self.text += entity.raw
return
raise NotImplementedError( # pragma: no cover
f"Unsupported element {entity!r}",
)
def close(self, /) -> None:
"""Close the existing stack."""
self.close_multiple(len(self.stack))
class TextoutParser(Parser):
"""Parser for Planète Casio "textout"-type BBCode.
:param tags: The tags to use with the parser.
"""
__slots__ = ("tags",)
tags: dict[str, type[Tag]]
"""Tag classes, bound by name."""
def __init__(self, /, *, tags: dict[str, type[Tag]] | None = None) -> None:
if tags is None:
tags = BUILTIN_TAGS
self.tags = tags
def parse(self, inputstring: str, document: Document) -> None:
"""Parse the input string in BBCode to a document.
:param inputstring: The input string to parse to obtain the document.
:param document: The document to populate.
"""
self.setup_parse(inputstring, document)
self.lexer = iter_textout_entities(self.inputstring)
state_machine = TextoutStateMachine(document=document, tags=self.tags)
for entity in self.lexer:
state_machine.process(entity)
state_machine.close()
self.finish_parse()

99
textoutpc/tags.py Normal file
View File

@ -0,0 +1,99 @@
#!/usr/bin/env python
# *****************************************************************************
# Copyright (C) 2018-2023 Thomas Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
# *****************************************************************************
"""Tags definitions for textoutpc."""
from __future__ import annotations
from abc import ABC, abstractmethod
from collections.abc import Sequence
from typing import Iterator
from docutils.nodes import Node, TextElement
__all__ = ["RawTag", "Tag"]
class Tag(ABC):
"""A tag for textoutpc's BBCode.
Note that the provided name may be surrounded by brackets if the tag is
a normal tag, or not if it is a special tag such as "`".
:param name: The name of the tag.
:param value: The value of the content.
"""
__slots__ = ("name", "value")
@staticmethod
def get_text_from_raw_children(children: Sequence[Node], /) -> str:
"""Get text from children.
This is a function to use with raw tags only, as they are guaranteed
to be called with text elements or nodes only.
"""
if not children:
return ""
if len(children) > 1:
raise AssertionError(
"More than one children for a raw tag, this is a bug!",
)
child = children[0]
if isinstance(child, TextElement) and len(child.children) == 1:
return str(child.children[0])
elif isinstance(child, str):
return str(child)
raise AssertionError(f"Unsupported child for text: {child!r}")
def __init__(self, *, name: str, value: str | None = None):
self.name = name
self.value = value
self.validate()
def validate(self) -> None:
"""Validate the name and value for this tag.
:raises TagValidationError: The name and value combination is invalid.
"""
def is_raw(self) -> bool:
"""Return whether the content of this tag should be read as raw or not.
This will be called after the tag is initialied, but before the tag
is used to populate a node, in order to read if what follows the tag
is interpreted or not and whether we should look for an end tag or not.
This may take into account both the name and the value of the tag.
"""
return False
@abstractmethod
def process(self, *, children: Sequence[Node]) -> Iterator[Node]:
"""Process the tag with children to build document nodes.
:param children: The children to process.
:return: The produced nodes.
"""
class RawTag(Tag):
"""A tag for textoutpc's BBCode, except always raw.
This means that the content for such tags must systematically be
not interpreted, whatever the name and values are.
"""
__slots__ = ()
def is_raw(self) -> bool:
"""Return whether the content of this tag should be read as raw or not.
Since the tag is a raw tag, this will always be true.
"""
return True

View File

@ -1,13 +0,0 @@
#!/usr/bin/env python3
#******************************************************************************
# Copyright (C) 2020 Thomas "Cakeisalie5" Touhey <thomas@touhey.fr>
# This file is part of the textoutpc project, which is MIT-licensed.
#******************************************************************************
""" Only define the version of the module.
Can be included directly, without dependencies. """
__all__ = ["version"]
version = "0.2.1"
# End of file.