diff --git a/.coveragerc b/.coveragerc
deleted file mode 100644
index 1a0c106b..00000000
--- a/.coveragerc
+++ /dev/null
@@ -1,16 +0,0 @@
-; ***********************************
-; |docname| - Configure code coverage
-; ***********************************
-[run]
-; Select code to be covered -- everything in the `pretext` subdirectory.
-source = pretext
-; This doesn't work. It works if the path is absolute, but I can't find any relative path that does work.
-;omit = pretext\core\*.py
-
-[report]
-; This is a simple workaround for the `omit` setting above: don't report coverage for the PreTeXt core.
-omit = pretext\core\*.py
-
-[html]
-; For better organization, place generated reports under the tests.
-directory = test/htmlcov
diff --git a/poetry.lock b/poetry.lock
index 75b59aa8..675f9f87 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -2,31 +2,45 @@
[[package]]
name = "black"
-version = "22.12.0"
+version = "23.3.0"
description = "The uncompromising code formatter."
optional = false
python-versions = ">=3.7"
files = [
- {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"},
- {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"},
- {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"},
- {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"},
- {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"},
- {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"},
- {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"},
- {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"},
- {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"},
- {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"},
- {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"},
- {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
+ {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
+ {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
+ {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
+ {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
+ {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
+ {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
+ {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
+ {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
+ {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
+ {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
+ {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
+ {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
+ {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
+ {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
+ {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
+ {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
+ {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
]
[package.dependencies]
click = ">=8.0.0"
mypy-extensions = ">=0.4.3"
+packaging = ">=22.0"
pathspec = ">=0.9.0"
platformdirs = ">=2"
-tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""}
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}
[package.extras]
@@ -191,13 +205,13 @@ test = ["black", "flake8", "myst-parser", "pytest", "sphinx (>=3)"]
[[package]]
name = "codechat-server"
-version = "0.2.9"
+version = "0.2.16"
description = "The CodeChat Server for software documentation"
optional = false
python-versions = ">=3.7"
files = [
- {file = "CodeChat_Server-0.2.9-py3-none-any.whl", hash = "sha256:a1e8ddbe23f2f6bcb39cde63683372b780892fb3843ee41ea3dc5c0e1918da1d"},
- {file = "CodeChat_Server-0.2.9.tar.gz", hash = "sha256:b00d90dc47d6cafd5bd82eabe13877f0b2ecfaa09cc720608fd3bab754c28c91"},
+ {file = "CodeChat_Server-0.2.16-py3-none-any.whl", hash = "sha256:fef2eee46e138813bb0f4a8586696d702eb2b0cfd01a7d9578960539e6172e78"},
+ {file = "CodeChat_Server-0.2.16.tar.gz", hash = "sha256:a5bd3d3cf6c9622b5c8669cf74a8a3c897345ddb84676fb82cbbab5508f39ba2"},
]
[package.dependencies]
@@ -207,7 +221,7 @@ markdown = "*"
psutil = "*"
strictyaml = "*"
thrift = "*"
-typer = "*"
+typer = {version = "*", extras = ["all"]}
watchdog = "*"
websockets = "*"
@@ -472,13 +486,13 @@ files = [
[[package]]
name = "importlib-metadata"
-version = "6.6.0"
+version = "6.7.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.7"
files = [
- {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"},
- {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"},
+ {file = "importlib_metadata-6.7.0-py3-none-any.whl", hash = "sha256:cb52082e659e97afc5dac71e79de97d8681de3aa07ff18578330904a9d18e5b5"},
+ {file = "importlib_metadata-6.7.0.tar.gz", hash = "sha256:1aaf550d4f73e5d6783e7acb77aec43d49da8017410afae93822cc9cca98c4d4"},
]
[package.dependencies]
@@ -487,7 +501,7 @@ zipp = ">=0.5"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
perf = ["ipython"]
-testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"]
+testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
[[package]]
name = "iniconfig"
@@ -592,6 +606,20 @@ html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
source = ["Cython (>=0.29.7)"]
+[[package]]
+name = "lxml-stubs"
+version = "0.4.0"
+description = "Type annotations for the lxml package"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lxml-stubs-0.4.0.tar.gz", hash = "sha256:184877b42127256abc2b932ba8bd0ab5ea80bd0b0fee618d16daa40e0b71abee"},
+ {file = "lxml_stubs-0.4.0-py3-none-any.whl", hash = "sha256:3b381e9e82397c64ea3cc4d6f79d1255d015f7b114806d4826218805c10ec003"},
+]
+
+[package.extras]
+test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1.9.3)"]
+
[[package]]
name = "markdown"
version = "3.4.3"
@@ -609,6 +637,30 @@ importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
[package.extras]
testing = ["coverage", "pyyaml"]
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
+
[[package]]
name = "mccabe"
version = "0.7.0"
@@ -620,6 +672,63 @@ files = [
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
]
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "mypy"
+version = "1.4.0"
+description = "Optional static typing for Python"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "mypy-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3af348e0925a59213244f28c7c0c3a2c2088b4ba2fe9d6c8d4fbb0aba0b7d05"},
+ {file = "mypy-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0b2e0da7ff9dd8d2066d093d35a169305fc4e38db378281fce096768a3dbdbf"},
+ {file = "mypy-1.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210fe0f39ec5be45dd9d0de253cb79245f0a6f27631d62e0c9c7988be7152965"},
+ {file = "mypy-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f7a5971490fd4a5a436e143105a1f78fa8b3fe95b30fff2a77542b4f3227a01f"},
+ {file = "mypy-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:50f65f0e9985f1e50040e603baebab83efed9eb37e15a22a4246fa7cd660f981"},
+ {file = "mypy-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1b5c875fcf3e7217a3de7f708166f641ca154b589664c44a6fd6d9f17d9e7e"},
+ {file = "mypy-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4c734d947e761c7ceb1f09a98359dd5666460acbc39f7d0a6b6beec373c5840"},
+ {file = "mypy-1.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5984a8d13d35624e3b235a793c814433d810acba9eeefe665cdfed3d08bc3af"},
+ {file = "mypy-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0f98973e39e4a98709546a9afd82e1ffcc50c6ec9ce6f7870f33ebbf0bd4f26d"},
+ {file = "mypy-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:19d42b08c7532d736a7e0fb29525855e355fa51fd6aef4f9bbc80749ff64b1a2"},
+ {file = "mypy-1.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ba9a69172abaa73910643744d3848877d6aac4a20c41742027dcfd8d78f05d9"},
+ {file = "mypy-1.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a34eed094c16cad0f6b0d889811592c7a9b7acf10d10a7356349e325d8704b4f"},
+ {file = "mypy-1.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:53c2a1fed81e05ded10a4557fe12bae05b9ecf9153f162c662a71d924d504135"},
+ {file = "mypy-1.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bba57b4d2328740749f676807fcf3036e9de723530781405cc5a5e41fc6e20de"},
+ {file = "mypy-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:653863c75f0dbb687d92eb0d4bd9fe7047d096987ecac93bb7b1bc336de48ebd"},
+ {file = "mypy-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7461469e163f87a087a5e7aa224102a30f037c11a096a0ceeb721cb0dce274c8"},
+ {file = "mypy-1.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0cf0ca95e4b8adeaf07815a78b4096b65adf64ea7871b39a2116c19497fcd0dd"},
+ {file = "mypy-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:94a81b9354545123feb1a99b960faeff9e1fa204fce47e0042335b473d71530d"},
+ {file = "mypy-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:67242d5b28ed0fa88edd8f880aed24da481929467fdbca6487167cb5e3fd31ff"},
+ {file = "mypy-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f2b353eebef669529d9bd5ae3566905a685ae98b3af3aad7476d0d519714758"},
+ {file = "mypy-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62bf18d97c6b089f77f0067b4e321db089d8520cdeefc6ae3ec0f873621c22e5"},
+ {file = "mypy-1.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca33ab70a4aaa75bb01086a0b04f0ba8441e51e06fc57e28585176b08cad533b"},
+ {file = "mypy-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5a0ee54c2cb0f957f8a6f41794d68f1a7e32b9968675ade5846f538504856d42"},
+ {file = "mypy-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c34d43e3d54ad05024576aef28081d9d0580f6fa7f131255f54020eb12f5352"},
+ {file = "mypy-1.4.0-py3-none-any.whl", hash = "sha256:f051ca656be0c179c735a4c3193f307d34c92fdc4908d44fd4516fbe8b10567d"},
+ {file = "mypy-1.4.0.tar.gz", hash = "sha256:de1e7e68148a213036276d1f5303b3836ad9a774188961eb2684eddff593b042"},
+]
+
+[package.dependencies]
+mypy-extensions = ">=1.0.0"
+tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
+typing-extensions = ">=3.10"
+
+[package.extras]
+dmypy = ["psutil (>=4.0)"]
+install-types = ["pip"]
+python2 = ["typed-ast (>=1.4.0,<2)"]
+reports = ["lxml"]
+
[[package]]
name = "mypy-extensions"
version = "1.0.0"
@@ -753,33 +862,33 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa
[[package]]
name = "platformdirs"
-version = "3.5.1"
+version = "3.6.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
optional = false
python-versions = ">=3.7"
files = [
- {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"},
- {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"},
+ {file = "platformdirs-3.6.0-py3-none-any.whl", hash = "sha256:ffa199e3fbab8365778c4a10e1fbf1b9cd50707de826eb304b50e57ec0cc8d38"},
+ {file = "platformdirs-3.6.0.tar.gz", hash = "sha256:57e28820ca8094678b807ff529196506d7a21e17156cb1cddb3e74cebce54640"},
]
[package.extras]
-docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.2.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
+docs = ["furo (>=2023.5.20)", "proselint (>=0.13)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)"]
[[package]]
name = "playwright"
-version = "1.34.0"
+version = "1.35.0"
description = "A high-level API to automate web browsers"
optional = false
python-versions = ">=3.7"
files = [
- {file = "playwright-1.34.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:69bb9b3296e366a23a99277b4c7673cb54ce71a3f5d630f114f7701b61f98f25"},
- {file = "playwright-1.34.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:402d946631c8458436e099d7731bbf54cf79c9e62e3acae0ea8421e72616926b"},
- {file = "playwright-1.34.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:462251cda0fcbb273497d357dbe14b11e43ebceb0bac9b892beda041ff209aa9"},
- {file = "playwright-1.34.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:a8ba124ea302596a03a66993cd500484fb255cbc10fe0757fa4d49f974267a80"},
- {file = "playwright-1.34.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf0cb6aac49d24335fe361868aea72b11f276a95e7809f1a5d1c69b4120c46ac"},
- {file = "playwright-1.34.0-py3-none-win32.whl", hash = "sha256:c50fef189d87243cc09ae0feb8e417fbe434359ccbcc863fb19ba06d46d31c33"},
- {file = "playwright-1.34.0-py3-none-win_amd64.whl", hash = "sha256:42e16c930e1e910461f4c551a72fc1b900f37124431bf2b6a6d9ddae70042db4"},
+ {file = "playwright-1.35.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:9eb7fdb7bb3f4e528e63641b83827531739c58a40c71d4ea0030321d3f04a742"},
+ {file = "playwright-1.35.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:444446b55bfb33ac62398f9f71a8fdb6cee1ceda3316d95db3c6419c51bca9be"},
+ {file = "playwright-1.35.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:4f486ea09940a35c08ec26f272bdcb6c1e043d400f3b9b924d541c5f4a7ed8f3"},
+ {file = "playwright-1.35.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:a29cd048b3eddaf116b154328bcb8e3f3a637753cbb926ae3ef5a5e694ed2d64"},
+ {file = "playwright-1.35.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc1d0686bdb6d7c2ce75087a1d6c7820e3de65be893f8a5ec64455613e884b39"},
+ {file = "playwright-1.35.0-py3-none-win32.whl", hash = "sha256:188481e780166eae9a2215bb3a58043ae167f7cc963282c7b7d14ba53858451c"},
+ {file = "playwright-1.35.0-py3-none-win_amd64.whl", hash = "sha256:2df7cfaaca881fd065f68e1c72a8216679a5b096f2add60c8c7c9dc909fdf7ef"},
]
[package.dependencies]
@@ -908,13 +1017,13 @@ files = [
[[package]]
name = "pytest"
-version = "7.3.1"
+version = "7.3.2"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.3.1-py3-none-any.whl", hash = "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362"},
- {file = "pytest-7.3.1.tar.gz", hash = "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3"},
+ {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"},
+ {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"},
]
[package.dependencies]
@@ -926,7 +1035,7 @@ pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
-testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
+testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-console-scripts"
@@ -963,13 +1072,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale
[[package]]
name = "pytest-mock"
-version = "3.10.0"
+version = "3.11.1"
description = "Thin-wrapper around the mock package for easier use with pytest"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-mock-3.10.0.tar.gz", hash = "sha256:fbbdb085ef7c252a326fd8cdcac0aa3b1333d8811f131bdcc701002e1be7ed4f"},
- {file = "pytest_mock-3.10.0-py3-none-any.whl", hash = "sha256:f4c973eeae0282963eb293eb173ce91b091a79c1334455acfac9ddee8a1c784b"},
+ {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"},
+ {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"},
]
[package.dependencies]
@@ -1036,6 +1145,36 @@ urllib3 = ">=1.21.1,<3"
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
+[[package]]
+name = "rich"
+version = "13.4.2"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+ {file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"},
+ {file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "shellingham"
+version = "1.5.0.post1"
+description = "Tool to Detect Surrounding Shell"
+optional = false
+python-versions = ">=3.7"
+files = [
+ {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"},
+ {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"},
+]
+
[[package]]
name = "single-version"
version = "1.5.1"
@@ -1101,6 +1240,17 @@ all = ["tornado (>=4.0)", "twisted"]
tornado = ["tornado (>=4.0)"]
twisted = ["twisted"]
+[[package]]
+name = "toml"
+version = "0.10.2"
+description = "Python Library for Tom's Obvious, Minimal Language"
+optional = false
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
+ {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
+]
+
[[package]]
name = "tomli"
version = "2.0.1"
@@ -1125,6 +1275,9 @@ files = [
[package.dependencies]
click = ">=7.1.1,<9.0.0"
+colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""}
+rich = {version = ">=10.11.0,<14.0.0", optional = true, markers = "extra == \"all\""}
+shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""}
typing-extensions = ">=3.7.4.3"
[package.extras]
@@ -1146,13 +1299,13 @@ files = [
[[package]]
name = "urllib3"
-version = "2.0.2"
+version = "2.0.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.7"
files = [
- {file = "urllib3-2.0.2-py3-none-any.whl", hash = "sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e"},
- {file = "urllib3-2.0.2.tar.gz", hash = "sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc"},
+ {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"},
+ {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"},
]
[package.extras]
@@ -1312,4 +1465,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more
[metadata]
lock-version = "2.0"
python-versions = "^3.8.5"
-content-hash = "7b39dc70870ef5568ce92331fa79ad3c8b5ae1951c3c6a95c2ad413472546e1c"
+content-hash = "70a73884491c7bcf5e65801eb48a9e77d381548cb73c8340d43a0a881341a9bc"
diff --git a/pretext/__init__.py b/pretext/__init__.py
index d98bc5e1..1aa327ae 100644
--- a/pretext/__init__.py
+++ b/pretext/__init__.py
@@ -46,7 +46,7 @@
]
-def activate():
+def activate() -> None:
"""
This function was provided by the original `pretext` package
deployed to PyPI by Alex Willmer. Thanks to their generosity,
diff --git a/pretext/build.py b/pretext/build.py
index d17a9c4e..6cd01845 100644
--- a/pretext/build.py
+++ b/pretext/build.py
@@ -2,7 +2,7 @@
import os
from pathlib import Path
import sys
-from typing import Optional
+from typing import Dict, Optional
from . import utils, core, codechat
@@ -14,11 +14,11 @@ def html(
ptxfile: Path,
pub_file: Path,
output: Path,
- stringparams,
+ stringparams: Dict[str, str],
custom_xsl: Optional[Path],
- xmlid_root,
- zipped=False,
-):
+ xmlid_root: Optional[str],
+ zipped: bool = False,
+) -> None:
os.makedirs(output, exist_ok=True)
log.info(f"\nNow building HTML into {output}\n")
if xmlid_root is not None:
@@ -40,9 +40,9 @@ def html(
None,
output.as_posix(),
)
- codechat.map_path_to_xml_id(
- ptxfile, utils.project_path(ptxfile), output.as_posix()
- )
+ pp = utils.project_path(ptxfile)
+ assert pp is not None, f"Invalid project path to {ptxfile}."
+ codechat.map_path_to_xml_id(ptxfile, pp, output.as_posix())
except Exception as e:
log.critical(e)
log.debug("Exception info:\n##################\n", exc_info=True)
@@ -54,9 +54,9 @@ def latex(
ptxfile: Path,
pub_file: Path,
output: Path,
- stringparams,
+ stringparams: Dict[str, str],
custom_xsl: Optional[Path],
-):
+) -> None:
os.makedirs(output, exist_ok=True)
log.info(f"\nNow building LaTeX into {output}\n")
# ensure working directory is preserved
@@ -81,10 +81,10 @@ def pdf(
ptxfile: Path,
pub_file: Path,
output: Path,
- stringparams,
+ stringparams: Dict[str, str],
custom_xsl: Optional[Path],
pdf_method: str,
-):
+) -> None:
os.makedirs(output, exist_ok=True)
log.info(f"\nNow building LaTeX into {output}\n")
# ensure working directory is preserved
@@ -110,10 +110,10 @@ def custom(
ptxfile: Path,
pub_file: Path,
output: Path,
- stringparams,
+ stringparams: Dict[str, str],
custom_xsl: Path,
output_filename: Optional[str] = None,
-):
+) -> None:
os.makedirs(output, exist_ok=True)
if output_filename is not None:
output_filepath = output / output_filename
@@ -141,7 +141,9 @@ def custom(
# build (non Kindle) ePub:
-def epub(ptxfile, pub_file: Path, output: Path, stringparams):
+def epub(
+ ptxfile: Path, pub_file: Path, output: Path, stringparams: Dict[str, str]
+) -> None:
os.makedirs(output, exist_ok=True)
try:
utils.npm_install()
@@ -151,7 +153,7 @@ def epub(ptxfile, pub_file: Path, output: Path, stringparams):
"Unable to build epub because node packages are not installed. Exiting..."
)
log.info(f"\nNow building ePub into {output}\n")
- with utils.working_directory("."):
+ with utils.working_directory(Path()):
try:
core.epub(
ptxfile,
@@ -169,7 +171,9 @@ def epub(ptxfile, pub_file: Path, output: Path, stringparams):
# build Kindle ePub:
-def kindle(ptxfile, pub_file: Path, output: Path, stringparams):
+def kindle(
+ ptxfile: Path, pub_file: Path, output: Path, stringparams: Dict[str, str]
+) -> None:
os.makedirs(output, exist_ok=True)
try:
utils.npm_install()
@@ -179,7 +183,7 @@ def kindle(ptxfile, pub_file: Path, output: Path, stringparams):
"Unable to build Kindle ePub because node packages are not installed. Exiting..."
)
log.info(f"\nNow building Kindle ePub into {output}\n")
- with utils.working_directory("."):
+ with utils.working_directory(Path()):
try:
core.epub(
ptxfile,
@@ -197,7 +201,13 @@ def kindle(ptxfile, pub_file: Path, output: Path, stringparams):
# build Braille:
-def braille(ptxfile, pub_file: Path, output: Path, stringparams, page_format="emboss"):
+def braille(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ stringparams: Dict[str, str],
+ page_format: str = "emboss",
+) -> None:
os.makedirs(output, exist_ok=True)
log.warning(
"Braille output is still experimental, and requires additional libraries from liblouis (specifically the file2brl software)."
@@ -210,7 +220,7 @@ def braille(ptxfile, pub_file: Path, output: Path, stringparams, page_format="em
"Unable to build braille because node packages could not be installed. Exiting..."
)
log.info(f"\nNow building braille into {output}\n")
- with utils.working_directory("."):
+ with utils.working_directory(Path()):
try:
core.braille(
xml_source=ptxfile,
@@ -232,9 +242,9 @@ def webwork_sets(
ptxfile: Path,
pub_file: Path,
output: Path,
- stringparams,
- zipped=False,
-):
+ stringparams: Dict[str, str],
+ zipped: bool = False,
+) -> None:
os.makedirs(output, exist_ok=True)
log.info(f"\nNow building WeBWorK Sets into {output}\n")
# ensure working directory is preserved
diff --git a/pretext/cli.py b/pretext/cli.py
index d783c7fa..f227e7c8 100644
--- a/pretext/cli.py
+++ b/pretext/cli.py
@@ -12,9 +12,9 @@
import tempfile
import platform
from pathlib import Path
-import typing as t
import atexit
import subprocess
+from typing import List, Optional, Tuple
from .config import xml_overlay
from . import (
@@ -62,7 +62,7 @@
is_flag=True,
help='Display list of build/view "targets" available in the project manifest.',
)
-def main(ctx, targets):
+def main(ctx: click.Context, targets: bool) -> None:
"""
Command line tools for quickly creating, authoring, and building PreTeXt projects.
@@ -77,12 +77,12 @@ def main(ctx, targets):
Use the `--help` option on any CLI command to learn more, for example,
`pretext build --help`.
"""
- if utils.project_path() is not None:
+ if (pp := utils.project_path()) is not None:
if targets:
Project().print_target_names()
return
# create file handler which logs even debug messages
- fh = logging.FileHandler(utils.project_path() / "cli.log", mode="w")
+ fh = logging.FileHandler(pp / "cli.log", mode="w")
fh.setLevel(logging.DEBUG)
file_log_format = logging.Formatter("{levelname:<8}: {message}", style="{")
fh.setFormatter(file_log_format)
@@ -90,7 +90,7 @@ def main(ctx, targets):
# output info
log.info(f"PreTeXt project found in `{utils.project_path()}`.")
# permanently change working directory for rest of process
- os.chdir(utils.project_path())
+ os.chdir(pp)
if utils.requirements_version() is None:
log.warning(
"Project's CLI version could not be detected from `requirements.txt`."
@@ -119,7 +119,7 @@ def main(ctx, targets):
short_help="Use when communicating with PreTeXt support.",
context_settings=CONTEXT_SETTINGS,
)
-def support():
+def support() -> None:
"""
Outputs useful information about your installation needed by
PreTeXt volunteers when requesting help on the pretext-support
@@ -160,12 +160,14 @@ def support():
context_settings={"help_option_names": [], "ignore_unknown_options": True},
)
@click.argument("args", nargs=-1)
-def devscript(args):
+def devscript(args: List[str]) -> None:
"""
Aliases the core pretext script.
"""
PY_CMD = sys.executable
- subprocess.run([PY_CMD, core.resources.path("pretext", "pretext")] + list(args))
+ subprocess.run(
+ [PY_CMD, str(core.resources.path("pretext", "pretext"))] + list(args)
+ )
# pretext new
@@ -191,7 +193,7 @@ def devscript(args):
type=click.STRING,
help="Download a zipped template from its URL.",
)
-def new(template, directory, url_template):
+def new(template: str, directory: Path, url_template: str) -> None:
"""
Generates the necessary files for a new PreTeXt project.
Supports `pretext new book` (default) and `pretext new article`,
@@ -249,7 +251,7 @@ def new(template, directory, url_template):
is_flag=True,
help="Refresh initialization of project even if project.ptx exists.",
)
-def init(refresh):
+def init(refresh: bool) -> None:
"""
Generates the project manifest for a PreTeXt project in the current directory. This feature
is mainly intended for updating existing projects to use this CLI.
@@ -273,7 +275,7 @@ def init(refresh):
}
for resource in resource_to_dest:
with templates.resource_path(resource) as resource_path:
- project_resource_path = Path(resource_to_dest.get(resource)).resolve()
+ project_resource_path = Path(resource_to_dest[resource]).resolve()
if project_resource_path.exists():
new_resource_name = (
project_resource_path.stem
@@ -311,7 +313,7 @@ def init(refresh):
# pretext build
@main.command(short_help="Build specified target", context_settings=CONTEXT_SETTINGS)
-@click.argument("target", required=False)
+@click.argument("target_name", required=False, metavar="target")
@click.option(
"--clean",
is_flag=True,
@@ -344,13 +346,13 @@ def init(refresh):
help=xml_overlay.USAGE_DESCRIPTION.format("-p"),
)
def build(
- target,
- clean,
- generate,
- no_generate,
- xmlid: t.Optional[str],
- project_ptx_override: t.Tuple[str, str],
-):
+ target_name: str,
+ clean: bool,
+ generate: str,
+ no_generate: bool,
+ xmlid: Optional[str],
+ project_ptx_override: Tuple[Tuple[str, str], ...],
+) -> None:
"""
Build [TARGET] according to settings specified by project.ptx.
@@ -369,7 +371,6 @@ def build(
for path, value in project_ptx_override:
overlay.upsert_node_or_attribute(path, value)
- target_name = target
if utils.no_project(task="build"):
return
project = Project()
@@ -453,6 +454,7 @@ def build(
@click.option(
"-t",
"--target",
+ "target_name",
type=click.STRING,
help="Name of target to generate assets for (if not specified, first target from manifest is used).",
)
@@ -474,11 +476,11 @@ def build(
)
def generate(
assets: str,
- target: t.Optional[str],
+ target_name: Optional[str],
all_formats: bool,
- xmlid: t.Optional[str],
- project_ptx_override: t.Tuple[str, str],
-):
+ xmlid: Optional[str],
+ project_ptx_override: Tuple[Tuple[str, str], ...],
+) -> None:
"""
Generate specified (or all) assets for the default target (first target in "project.ptx"). Asset "generation" is typically
slower and performed less frequently than "building" a project, but is
@@ -501,16 +503,15 @@ def generate(
messages = project.apply_overlay(overlay)
for message in messages:
log.info("project.ptx overlay " + message)
- target_name = target
target = project.target(name=target_name)
- if target_name is None:
- log.info(
- f"Since no target was specified with the -t flag, we will generate assets for the first target in the manifest ({target.name()})."
- )
if target is None:
utils.show_target_hints(target_name, project, task="generating assets for")
log.critical("Exiting without generating any assets.")
return
+ if target_name is None:
+ log.info(
+ f"Since no target was specified with the -t flag, we will generate assets for the first target in the manifest ({target.name()})."
+ )
if all_formats and assets == "ALL":
log.info(
f'Generating all assets in all asset formats for the target "{target.name()}".'
@@ -542,7 +543,7 @@ def generate(
short_help="Preview specified target based on its format.",
context_settings=CONTEXT_SETTINGS,
)
-@click.argument("target", required=False)
+@click.argument("target_name", metavar="target", required=False)
@click.option(
"-a",
"--access",
@@ -611,15 +612,15 @@ def generate(
help="By default, pretext view tries to launch the default application to view the specified target. Setting this suppresses this behavior.",
)
def view(
- target: str,
+ target_name: str,
access: str,
- port: t.Optional[int],
- directory: str,
+ port: Optional[int],
+ directory: Optional[str],
watch: bool,
build: bool,
- generate: t.Optional[str],
+ generate: Optional[str],
no_launch: bool,
-):
+) -> None:
"""
Starts a local server to preview built PreTeXt documents in your browser.
TARGET is the name of the defined in `project.ptx`.
@@ -627,9 +628,9 @@ def view(
if directory is not None:
if utils.cocalc_project_id() is not None:
try:
- subdir = directory.relative_to(Path.home())
+ subdir = Path(directory).relative_to(Path.home())
except ValueError:
- subdir = ""
+ subdir = Path()
log.info("Directory can be previewed at the following link at any time:")
log.info(f" https://cocalc.com/{utils.cocalc_project_id()}/raw/{subdir}")
return
@@ -638,7 +639,6 @@ def view(
return
if utils.no_project(task="view the output for"):
return
- target_name = target
project = Project()
target = project.target(name=target_name)
if target is None:
@@ -650,7 +650,7 @@ def view(
try:
subdir = target.output_dir().relative_to(Path.home())
except ValueError:
- subdir = ""
+ subdir = Path()
log.info("Built project can be previewed at the following link at any time:")
log.info(f" https://cocalc.com/{utils.cocalc_project_id()}/raw/{subdir}")
return
@@ -672,9 +672,9 @@ def view(
short_help="Deploys Git-managed project to GitHub Pages.",
context_settings=CONTEXT_SETTINGS,
)
-@click.argument("target", required=False)
+@click.argument("target_name", metavar="target", required=False)
@click.option("-u", "--update_source", is_flag=True, required=False)
-def deploy(target, update_source):
+def deploy(target_name: str, update_source: bool) -> None:
"""
Automatically deploys most recent build of [TARGET] to GitHub Pages,
making it available to the general public.
@@ -684,7 +684,6 @@ def deploy(target, update_source):
"""
if utils.no_project(task="deploy"):
return
- target_name = target
project = Project()
target = project.target(name=target_name)
if target is None or target.format() != "html":
diff --git a/pretext/codechat.py b/pretext/codechat.py
index 71b9c0ed..2493b8a8 100644
--- a/pretext/codechat.py
+++ b/pretext/codechat.py
@@ -14,7 +14,7 @@
import collections # defaultdict
import glob # glob
import json # dumps
-import pathlib # Path
+from pathlib import Path
import sys # platform
import urllib.parse # urlparse
import urllib.request # pathname2url
@@ -47,9 +47,9 @@
# This allows a single source file to produce multiple HTML files, as well as supporting a one-to-one relationship. The list captures the order of appearance of the XML IDs in the tree -- element 0 is the first XML ID, etc.
def map_path_to_xml_id(
# A path to the root XML file in the pretext book being processed.
- xml: str,
+ xml: Path,
# A path to the project directory, which (should) contain ``codechat_config.yaml``.
- project_path: pathlib.Path,
+ project_path: Path,
# A path to the destination or output directory. The resulting JSON file will be stored there.
dest_dir: str,
) -> None:
@@ -57,14 +57,14 @@ def map_path_to_xml_id(
path_to_xml_id = collections.defaultdict(list)
# Normalize path separators to current OS.
- xml = str(pathlib.Path(xml).resolve())
+ _xml = str(xml.resolve())
# This follows the `Python recommendations `_.
is_win = sys.platform == "win32"
# Look at all HTML files in the output directory. Store only their stem, since this is what an XML ID specifies. Note that all output files will have the same path prefix (the ``dest_dir`` and the same suffix (``.html``); the stem is the only unique part.
html_files = set(
- pathlib.Path(html_file).stem for html_file in glob.glob(dest_dir + "/*.html")
+ Path(html_file).stem for html_file in glob.glob(dest_dir + "/*.html")
)
# lxml turns ``xml:id`` into the string below.
@@ -73,7 +73,9 @@ def map_path_to_xml_id(
xml_id_attrib = f"{xml_ns}id"
# Define a loader which sets the ``xml:base`` of an xincluded element. While lxml `evidently used to do this in 2013 `_, a change eliminated this ability per some `dicussion `_, which included a rejected patch fixing this problem. `Current source `_ lacks this patch.
- def my_loader(href, parse, encoding=None, parser=None):
+ #
+ # Since there's few docs on this function, ignore the lack of types.
+ def my_loader(href, parse, encoding=None, parser=None): # type: ignore
ret = lxml.ElementInclude._lxml_default_loader(href, parse, encoding, parser)
# The return value may not be an element.
if isinstance(ret, ET._Element):
@@ -82,11 +84,11 @@ def my_loader(href, parse, encoding=None, parser=None):
# Load the XML, performing xincludes using this loader.
huge_parser = ET.XMLParser(huge_tree=True)
- src_tree = ET.parse(xml, parser=huge_parser)
+ src_tree = ET.parse(_xml, parser=huge_parser)
lxml.ElementInclude.include(src_tree, loader=my_loader)
- # Walk though every element with an xml ID.
- for elem in src_tree.iterfind(f"//*[@{xml_id_attrib}]"):
+ # Walk though every element with an xml ID. Note: the type stubs don't have the ``iterfind`` method, hence the ignore in the next line.
+ for elem in src_tree.iterfind(f"//*[@{xml_id_attrib}]"): # type: ignore
# Consider only elemets whose ID produced an HTML file. TODO: use a walrus operator after Python 3.7 is EOL.
xml_id = elem.get(xml_id_attrib)
if xml_id in html_files:
@@ -103,11 +105,11 @@ def my_loader(href, parse, encoding=None, parser=None):
# Decode the URL-encoded filename.
path = urllib.parse.unquote(path)
# Use ``resolve()`` to standardize capitalization on Windows.
- stdpath = pathlib.Path(path).resolve()
+ stdpath = Path(path).resolve()
# Make this path relative to the project directory, to avoid writing potentially confidential information (username / local filesystem paths) to the mapping file, which might be published to the web.
relpath = stdpath.relative_to(project_path)
# Add this XML ID to others for this path.
path_to_xml_id[str(relpath)].append(xml_id)
# Save the result as a JSON file in the ``dest_dir``.
- (pathlib.Path(dest_dir) / ".mapping.json").write_text(json.dumps(path_to_xml_id))
+ (Path(dest_dir) / ".mapping.json").write_text(json.dumps(path_to_xml_id))
diff --git a/pretext/config/xml_overlay.py b/pretext/config/xml_overlay.py
index 7d392fcc..9d93e1dd 100644
--- a/pretext/config/xml_overlay.py
+++ b/pretext/config/xml_overlay.py
@@ -28,16 +28,27 @@ class ShadowXmlNodeType(t.TypedDict):
class XmlOverlayType(click.ParamType):
name = "xml_overlay"
- def convert(self, value, param, ctx):
- print("got", value, param, ctx.args, ctx.obj)
+ def convert(
+ self,
+ value: t.Any,
+ param: t.Optional[click.Parameter],
+ ctx: t.Optional[click.Context],
+ ) -> t.Any:
+ print(
+ "got",
+ value,
+ param,
+ None if ctx is None else ctx.args,
+ None if ctx is None else ctx.obj,
+ )
return value
class ShadowXmlDocument:
- def __init__(self):
+ def __init__(self) -> None:
self._nodes_dict: t.Dict[str, ShadowXmlNodeType] = {}
- def upsert_node_or_attribute(self, path: str, value: t.Union[str, None]):
+ def upsert_node_or_attribute(self, path: str, value: str) -> "ShadowXmlDocument":
"""
Upserts a node into the shadow document.
@@ -68,7 +79,7 @@ def upsert_node_or_attribute(self, path: str, value: t.Union[str, None]):
self._nodes_dict[path] = node
return self
- def overlay_tree(self, root: ET.Element = ET.Element("root")) -> t.List[str]:
+ def overlay_tree(self, root: ET._Element = ET.Element("root")) -> t.List[str]:
"""
Overlay `root` with the current ShadowXmlDocument's nodes and attributes.
A list of string messages are returned about what elements were changed.
@@ -80,9 +91,9 @@ def overlay_tree(self, root: ET.Element = ET.Element("root")) -> t.List[str]:
def upsert_node(
path: t.List[str],
- current: ET.Element = root,
+ current: ET._Element = root,
current_path: t.List[str] = [],
- ) -> t.List[ET.Element]:
+ ) -> t.List[ET._Element]:
if len(path) == 0:
return [current]
needed_tag = path[0]
diff --git a/pretext/core/resources.py b/pretext/core/resources.py
index 0e561309..abcf9b03 100644
--- a/pretext/core/resources.py
+++ b/pretext/core/resources.py
@@ -4,7 +4,7 @@
from .. import CORE_COMMIT
-def path(*args) -> Path:
+def path(*args: str) -> Path:
# Checks that the local static path ~/.ptx/ contains the static files needed for core, and installs them if they are missing (or if the version is different from the installed version of pretext). Then returns the absolute path to the static files (appending arguments)
local_base_path = Path.home() / ".ptx"
local_commit_file = Path(local_base_path) / ".commit"
@@ -20,7 +20,7 @@ def path(*args) -> Path:
return local_base_path.joinpath(*args)
-def install(local_base_path):
+def install(local_base_path: Path) -> None:
with importlib.resources.path("pretext.core", "resources.zip") as static_zip:
with zipfile.ZipFile(static_zip, "r") as zip:
zip.extractall(local_base_path)
diff --git a/pretext/generate.py b/pretext/generate.py
index 3b979979..7cc24b07 100644
--- a/pretext/generate.py
+++ b/pretext/generate.py
@@ -3,6 +3,7 @@
import logging
from . import utils, core
from pathlib import Path
+from typing import Dict, List, Optional
# Get access to logger
log = logging.getLogger("ptxlogger")
@@ -14,23 +15,23 @@ def latex_image(
ptxfile: Path,
pub_file: Path,
output: Path,
- params,
- target_format,
- xmlid_root,
- pdf_method,
- all_formats=False,
-):
+ params: Dict[str, str],
+ target_format: str,
+ xmlid_root: Optional[str],
+ pdf_method: str,
+ all_formats: bool = False,
+) -> None:
# Dictionary of formats for images based on target
formats = {
- "pdf": None,
- "latex": None,
+ "pdf": [],
+ "latex": [],
"html": ["svg"],
"epub": ["svg"],
"kindle": ["png"],
}
# set overwrite formats to all when appropriate
if all_formats:
- formats[target_format] = {key: ["all"] for key in formats[target_format]}
+ formats[target_format] = ["all"]
# We assume passed paths are absolute.
# set images directory
# parse source so we can check for latex-image.
@@ -38,7 +39,10 @@ def latex_image(
for _ in range(20):
source_xml.xinclude()
if (
- len(source_xml.xpath("/pretext/*[not(docinfo)]//latex-image")) > 0
+ isinstance(
+ li := source_xml.xpath("/pretext/*[not(docinfo)]//latex-image"), List
+ )
+ and len(li) > 0
and formats[target_format] is not None
):
image_output = (output / "latex-image").resolve()
@@ -78,11 +82,11 @@ def sageplot(
ptxfile: Path,
pub_file: Path,
output: Path,
- params,
- target_format,
- xmlid_root,
- all_formats=False,
-):
+ params: Dict[str, str],
+ target_format: str,
+ xmlid_root: Optional[str],
+ all_formats: bool = False,
+) -> None:
# Dictionary of formats for images based on target
formats = {
"pdf": ["pdf", "png"],
@@ -93,7 +97,7 @@ def sageplot(
}
# set overwrite formats to all when appropriate
if all_formats:
- formats[target_format] = {key: ["all"] for key in formats[target_format]}
+ formats[target_format] = ["all"]
# We assume passed paths are absolute.
# set images directory
# parse source so we can check for sageplot.
@@ -101,7 +105,8 @@ def sageplot(
for _ in range(20):
source_xml.xinclude()
if (
- len(source_xml.xpath("/pretext/*[not(docinfo)]//sageplot")) > 0
+ isinstance(li := source_xml.xpath("/pretext/*[not(docinfo)]//sageplot"), List)
+ and len(li) > 0
and formats[target_format] is not None
):
image_output = (output / "sageplot").resolve()
@@ -139,11 +144,11 @@ def asymptote(
ptxfile: Path,
pub_file: Path,
output: Path,
- params,
- target_format,
- xmlid_root,
- all_formats=False,
-):
+ params: Dict[str, str],
+ target_format: str,
+ xmlid_root: Optional[str],
+ all_formats: bool = False,
+) -> None:
# Dictionary of formats for images based on target
formats = {
"pdf": ["pdf"],
@@ -161,7 +166,8 @@ def asymptote(
for _ in range(20):
source_xml.xinclude()
if (
- len(source_xml.xpath("/pretext/*[not(docinfo)]//asymptote")) > 0
+ isinstance(li := source_xml.xpath("/pretext/*[not(docinfo)]//asymptote"), List)
+ and len(li) > 0
and formats[target_format] is not None
):
image_output = (output / "asymptote").resolve()
@@ -194,13 +200,24 @@ def asymptote(
# generate interactive preview assets
-def interactive(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
+def interactive(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str],
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for interactives.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
- if len(source_xml.xpath("/pretext/*[not(docinfo)]//interactive")) > 0:
+ if (
+ isinstance(
+ li := source_xml.xpath("/pretext/*[not(docinfo)]//interactive"), List
+ )
+ and len(li) > 0
+ ):
# First verify that playwright has dependencies installed:
utils.playwright_install()
image_output = (output / "preview").resolve()
@@ -230,13 +247,24 @@ def interactive(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root)
# generate youtube thumbnail assets
-def youtube(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
+def youtube(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str],
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for videos.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
- if len(source_xml.xpath("/pretext/*[not(docinfo)]//video[@youtube]")) > 0:
+ if (
+ isinstance(
+ li := source_xml.xpath("/pretext/*[not(docinfo)]//video[@youtube]"), List
+ )
+ and len(li) > 0
+ ):
image_output = (output / "youtube").resolve()
os.makedirs(image_output, exist_ok=True)
log.info("Now generating youtube previews\n\n")
@@ -264,13 +292,19 @@ def youtube(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
# generate webwork assets
-def webwork(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root=None):
+def webwork(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str] = None,
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for webwork.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
- if len(source_xml.xpath("//webwork[node()|@*]")) > 0:
+ if isinstance(li := source_xml.xpath("//webwork[node()|@*]"), List) and len(li) > 0:
ww_output = (output / "webwork").resolve()
os.makedirs(ww_output, exist_ok=True)
log.info("Now generating webwork representation\n\n")
@@ -300,13 +334,22 @@ def webwork(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root=None
# generate codelens trace assets
-def codelens(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
+def codelens(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str],
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for webwork.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
- if len(source_xml.xpath("//program[@interactive = 'codelens']")) > 0:
+ if (
+ isinstance(li := source_xml.xpath("//program[@interactive = 'codelens']"), List)
+ and len(li) > 0
+ ):
trace_output = (output / "trace").resolve()
os.makedirs(trace_output, exist_ok=True)
log.info("Now generating codelens trace\n\n")
@@ -332,15 +375,26 @@ def codelens(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
# generate qr code assets
-def qrcodes(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
+def qrcodes(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str],
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for videos.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
if (
- len(source_xml.xpath("/pretext/*[not(docinfo)]//video")) > 0
- or len(source_xml.xpath("/pretext/*[not(docinfo)]//interactive")) > 0
+ isinstance(li1 := source_xml.xpath("/pretext/*[not(docinfo)]//video"), List)
+ and len(li1) > 0
+ ) or (
+ isinstance(
+ li2 := source_xml.xpath("/pretext/*[not(docinfo)]//interactive"), List
+ )
+ and len(li2) > 0
):
image_output = (output / "qrcode").resolve()
os.makedirs(image_output, exist_ok=True)
@@ -365,7 +419,7 @@ def qrcodes(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
# No else clause needed, since this isn't called specifically.
-def play_button(output: Path):
+def play_button(output: Path) -> None:
# Currently we do not parse source to look for videos, as this can run regardless of the source.
image_output = (output / "play-button").resolve()
os.makedirs(image_output, exist_ok=True)
@@ -385,13 +439,22 @@ def play_button(output: Path):
# generate datafile assets
-def datafiles(ptxfile: Path, pub_file: Path, output: Path, params, xmlid_root):
+def datafiles(
+ ptxfile: Path,
+ pub_file: Path,
+ output: Path,
+ params: Dict[str, str],
+ xmlid_root: Optional[str],
+) -> None:
# We assume passed paths are absolute.
# parse source so we can check for datafile elements.
source_xml = ET.parse(ptxfile)
for _ in range(20):
source_xml.xinclude()
- if len(source_xml.xpath("/pretext/*[not(docinfo)]//datafile")) > 0:
+ if (
+ isinstance(li := source_xml.xpath("/pretext/*[not(docinfo)]//datafile"), List)
+ and len(li) > 0
+ ):
datafile_output = (output / "datafile").resolve()
os.makedirs(datafile_output, exist_ok=True)
log.info("Now generating base64 versions of datafiles\n\n")
diff --git a/pretext/project.py b/pretext/project.py
index aef87062..f2dbcd89 100644
--- a/pretext/project.py
+++ b/pretext/project.py
@@ -1,6 +1,6 @@
import pickle
from lxml import etree as ET
-from lxml.etree import Element
+from lxml.etree import _Element
import os
import shutil
import logging
@@ -11,31 +11,46 @@
from pathlib import Path
import sys
from .config.xml_overlay import ShadowXmlDocument
-import typing as t
+from typing import Dict, List, Optional, Tuple
import hashlib
log = logging.getLogger("ptxlogger")
+asset_table_type = Dict[Tuple[str, str], bytes]
+
class Target:
- def __init__(self, xml_element, project_path):
+ def __init__(self, xml_element: _Element, project_path: Path):
# construction is done!
self.__xml_element = xml_element
self.__project_path = Path(project_path).resolve()
# ensure assets directories exist as assumed by core PreTeXt
- if self.external_dir() is not None:
- os.makedirs(self.external_dir(), exist_ok=True)
- if self.generated_dir() is not None:
- os.makedirs(self.generated_dir(), exist_ok=True)
+ if (ex_dir := self.external_dir()) is not None:
+ os.makedirs(ex_dir, exist_ok=True)
+ if (gen_dir := self.generated_dir()) is not None:
+ os.makedirs(gen_dir, exist_ok=True)
- def xml_element(self) -> Element:
+ def xml_element(self) -> _Element:
return self.__xml_element
- def project_path(self):
+ def project_path(self) -> Path:
return self.__project_path
+ # Perform basic schema checking of the project file: the given attribute must be present.
+ def require_str_value(self, value: Optional[str], err: str) -> str:
+ assert value is not None, f"Invalid project file: missing {err}."
+ return value
+
+ def require_tag_text(self, tag_name: str) -> str:
+ element = self.xml_element().find(tag_name)
+ assert element is not None, f"Invalid project file: missing {tag_name} tag."
+ return self.require_str_value(element.text, f"{tag_name} contents")
+
def name(self) -> str:
- return self.xml_element().get("name").strip()
+ # Targets should have a name attribute.
+ return self.require_str_value(
+ self.xml_element().get("name"), "name attribute"
+ ).strip()
def pdf_method(self) -> str:
pdf_method = self.xml_element().get("pdf-method")
@@ -45,21 +60,21 @@ def pdf_method(self) -> str:
return "xelatex" # default
def format(self) -> str:
- return self.xml_element().find("format").text.strip()
+ return self.require_tag_text("format").strip()
def source(self) -> Path:
- return self.project_path() / self.xml_element().find("source").text.strip()
+ return self.project_path() / self.require_tag_text("source").strip()
def source_dir(self) -> Path:
return Path(self.source()).parent
- def source_xml(self):
+ def source_xml(self) -> _Element:
ele_tree = ET.parse(self.source())
ele_tree.xinclude()
return ele_tree.getroot()
def publication(self) -> Path:
- return self.project_path() / self.xml_element().find("publication").text.strip()
+ return self.project_path() / self.require_tag_text("publication").strip()
def publication_dir(self) -> Path:
return self.publication().parent
@@ -67,90 +82,127 @@ def publication_dir(self) -> Path:
def publication_rel_from_source(self) -> Path:
return self.publication().relative_to(self.source_dir())
- def publication_xml(self):
+ def publication_xml(self) -> _Element:
ele_tree = ET.parse(self.publication())
ele_tree.xinclude()
return ele_tree.getroot()
- def external_dir(self) -> t.Optional[Path]:
+ def external_dir(self) -> Optional[Path]:
dir_ele = self.publication_xml().find("source/directories")
if dir_ele is None:
log.error("Publication file does not specify asset directories.")
return None
rel_dir = dir_ele.get("external")
+ assert (
+ rel_dir is not None
+ ), "Invalid project file: missing value in source/directories/external tag."
return self.source_dir() / rel_dir
- def generated_dir(self) -> t.Optional[Path]:
+ # Like the above function, but asserts if the external directory wasn't found.
+ def external_dir_found(self) -> Path:
+ ed = self.external_dir()
+ assert ed is not None, "Internal error: external directory not found."
+ return ed
+
+ def generated_dir(self) -> Optional[Path]:
dir_ele = self.publication_xml().find("source/directories")
if dir_ele is None:
log.error("Publication file does not specify asset directories.")
return None
rel_dir = dir_ele.get("generated")
+ assert (
+ rel_dir is not None
+ ), "Invalid project file: missing value in source/directories/generated tag."
return self.source_dir() / rel_dir
+ # Like the above function, but asserts if the external directory wasn't found.
+ def generated_dir_found(self) -> Path:
+ gd = self.generated_dir()
+ assert gd is not None, "Internal error: generated directory not found."
+ return gd
+
def output_dir(self) -> Path:
return (
- Path(self.__project_path)
- / self.xml_element().find("output-dir").text.strip()
+ Path(self.__project_path) / self.require_tag_text("output-dir").strip()
).resolve()
- def output_filename(self) -> t.Optional[str]:
+ def output_filename(self) -> Optional[str]:
if self.xml_element().find("output-filename") is None:
return None
else:
- return self.xml_element().find("output-filename").text.strip()
+ return self.require_tag_text("output-filename").strip()
def port(self) -> int:
view_ele = self.xml_element().find("view")
- if view_ele is not None and view_ele.get("port") is not None:
- return int(view_ele.get("port"))
+ if view_ele is not None and (port := view_ele.get("port")) is not None:
+ return int(port)
else:
return 8000
- def stringparams(self):
- return {
- sp_ele.get("key").strip(): sp_ele.get("value").strip()
- for sp_ele in self.xml_element().xpath("stringparam")
- }
+ def stringparams(self) -> Dict[str, str]:
+ sp = self.xml_element().xpath("stringparam")
+ assert isinstance(sp, List), "Project file error: stringparam is empty."
+ ret = {}
+ for sp_ele in sp:
+ assert isinstance(
+ sp_ele, _Element
+ ), "Project file error: stringparam contents must be key/value pairs."
+ key = self.require_str_value(
+ sp_ele.get("key"), "Project file error: stringparam missing key."
+ )
+ value = self.require_str_value(
+ sp_ele.get("value"), "Project file error: stringparam missing value."
+ )
+ ret[key.strip()] = value.strip()
+ return ret
- def xsl_path(self) -> t.Optional[Path]:
+ def xsl_path(self) -> Optional[Path]:
if self.xml_element().find("xsl") is not None:
return (
- Path(self.__project_path) / self.xml_element().find("xsl").text.strip()
+ Path(self.__project_path) / self.require_tag_text("xsl").strip()
).resolve()
else:
return None
- def xmlid_root(self):
+ def xmlid_root(self) -> Optional[str]:
ele = self.xml_element().find("xmlid-root")
if ele is None:
return None
else:
- return ele.text.strip()
+ return self.require_str_value(ele.text, "xmlid-root").strip()
- def asset_hash(self):
+ def asset_hash(self) -> asset_table_type:
asset_hash_dict = {}
for asset in ASSETS:
if asset == "webwork":
+ ww = self.source_xml().xpath(".//webwork[@*|*]")
+ assert isinstance(ww, List)
# WeBWorK must be regenerated every time *any* of the ww exercises change.
- if len(self.source_xml().xpath(".//webwork[@*|*]")) == 0:
+ if len(ww) == 0:
# Only generate a hash if there are actually ww exercises in the source
continue
h = hashlib.sha256()
- for node in self.source_xml().xpath(".//webwork[@*|*]"):
+ for node in ww:
+ assert isinstance(node, _Element)
h.update(ET.tostring(node))
asset_hash_dict[(asset, "")] = h.digest()
elif asset != "ALL":
# everything else can be updated individually, if it has an xml:id
- if len(self.source_xml().xpath(f".//{asset}")) == 0:
+ source_assets = self.source_xml().xpath(f".//{asset}")
+ assert isinstance(source_assets, List)
+ if len(source_assets) == 0:
# Only generate a hash if there are actually assets of this type in the source
continue
h_no_id = hashlib.sha256()
- for node in self.source_xml().xpath(f".//{asset}"):
+ for node in source_assets:
+ assert isinstance(node, _Element)
# First see if the node has an xml:id, or if it is a child of a node with an xml:id (but we haven't already made this key)
if (
- id := node.xpath("@xml:id") or node.xpath("parent::*/@xml:id")
- ) and (asset, id[0]) not in asset_hash_dict:
+ (id := node.xpath("@xml:id") or node.xpath("parent::*/@xml:id"))
+ and isinstance(id, List)
+ and (asset, id[0]) not in asset_hash_dict
+ ):
+ assert isinstance(id, _Element)
asset_hash_dict[(asset, id[0])] = hashlib.sha256(
ET.tostring(node)
).digest()
@@ -160,62 +212,70 @@ def asset_hash(self):
asset_hash_dict[(asset, "")] = h_no_id.digest()
return asset_hash_dict
- def save_asset_table(self, asset_table: dict):
+ def save_asset_table(self, asset_table: asset_table_type) -> None:
"""
Saves the asset_table to a pickle file in the generated assets directory based on the target name.
"""
with open(
- self.generated_dir().joinpath(f".{self.name()}_assets.pkl"), "wb"
+ self.generated_dir_found().joinpath(f".{self.name()}_assets.pkl"), "wb"
) as f:
pickle.dump(asset_table, f)
- def load_asset_table(self) -> dict:
+ def load_asset_table(self) -> asset_table_type:
"""
Loads the asset_table from a pickle file in the generated assets directory based on the target name.
"""
try:
with open(
- self.generated_dir().joinpath(f".{self.name()}_assets.pkl"), "rb"
+ self.generated_dir_found().joinpath(f".{self.name()}_assets.pkl"), "rb"
) as f:
return pickle.load(f)
except Exception:
return {}
- def needs_ww_reps(self):
+ def needs_ww_reps(self) -> bool:
return self.source_xml().find(".//webwork/statement") is not None
- def has_ww_reps(self):
+ def has_ww_reps(self) -> bool:
return Path.exists(
- self.generated_dir() / "webwork" / "webwork-representations.xml"
+ self.generated_dir_found() / "webwork" / "webwork-representations.xml"
)
class Project:
- def __init__(self, project_path=None):
+ def __init__(self, project_path: Optional[Path] = None):
project_path = project_path or utils.project_path()
+ assert project_path is not None, "Unable to find project path."
xml_element = ET.parse(project_path / "project.ptx").getroot()
self.__xml_element = xml_element
self.__project_path = project_path
# prepre core PreTeXt python scripts
self.init_ptxcore()
- def apply_overlay(self, overlay: ShadowXmlDocument):
+ def apply_overlay(self, overlay: ShadowXmlDocument) -> List[str]:
"""
Modify the internal data structure of the `project.ptx` XML tree by applying the supplied overlay.
This modification happens in-memory only.
"""
return overlay.overlay_tree(self.__xml_element)
- def xml_element(self) -> Element:
+ def xml_element(self) -> _Element:
return self.__xml_element
- def targets(self):
- return [
- Target(xml_element=target_element, project_path=self.__project_path)
- for target_element in self.xml_element().xpath("targets/target")
- ]
+ def targets(self) -> List[Target]:
+ t = self.xml_element().xpath("targets/target")
+ assert isinstance(
+ t, List
+ ), "Project file error: expected list of targets in targets/target tags."
+ ret: List[Target] = []
+ for target_element in t:
+ assert isinstance(t, _Element), "Project file error: target must be a tag."
+ t.append(
+ Target(xml_element=target_element, project_path=self.__project_path)
+ )
+ return ret
- def target_names(self, *args):
+ def target_names(self, *args: str) -> List[str]:
# Optional arguments are formats: returns list of targets that have that format.
names = []
for target in self.targets():
@@ -223,11 +283,11 @@ def target_names(self, *args):
names.append(target.name())
return names
- def print_target_names(self):
+ def print_target_names(self) -> None:
for target in self.targets():
print(target.name())
- def target(self, name=None) -> Target:
+ def target(self, name: Optional[str] = None) -> Optional[Target]:
if name is None:
target_element = self.xml_element().find("targets/target")
else:
@@ -235,6 +295,7 @@ def target(self, name=None) -> Target:
if target_element is not None:
return Target(xml_element=target_element, project_path=self.__project_path)
else:
+ log.error("Unable to find target.")
return None
def view(
@@ -244,8 +305,11 @@ def view(
port: int,
watch: bool = False,
no_launch: bool = False,
- ):
+ ) -> None:
target = self.target(target_name)
+ if target is None:
+ log.error("Unable to find target.")
+ return
directory = target.output_dir()
if watch:
@@ -259,15 +323,18 @@ def view(
)
return
- def watch_callback():
- return self.build(target_name)
+ def watch_callback() -> None:
+ self.build(target_name)
utils.run_server(
directory, access, port, watch_directory, watch_callback, no_launch
)
- def build(self, target_name, clean=False):
+ def build(self, target_name: str, clean: bool = False) -> None:
target = self.target(target_name)
+ if target is None:
+ log.error(f"Target `{target_name}` not found.")
+ return
# Check for xml syntax errors and quit if xml invalid:
if not self.xml_source_is_valid(target_name):
return
@@ -300,13 +367,11 @@ def build(self, target_name, clean=False):
shutil.rmtree(target.output_dir())
# if custom xsl, copy it into a temporary directory (different from the building temporary directory)
custom_xsl = None
- if target.xsl_path() is not None:
+ if (txp := target.xsl_path()) is not None:
temp_xsl_path = Path(tempfile.mkdtemp())
- log.info(
- f"Building with custom xsl {target.xsl_path()} specified in project.ptx"
- )
- utils.copy_custom_xsl(target.xsl_path(), temp_xsl_path)
- custom_xsl = temp_xsl_path / target.xsl_path().name
+ log.info(f"Building with custom xsl {txp} specified in project.ptx")
+ utils.copy_custom_xsl(txp, temp_xsl_path)
+ custom_xsl = temp_xsl_path / txp.name
# warn if "publisher" is one of the string-param keys:
if "publisher" in target.stringparams():
log.warning(
@@ -335,12 +400,12 @@ def build(self, target_name, clean=False):
)
# core script doesn't put a copy of images in output for latex builds, so we do it instead here
shutil.copytree(
- target.external_dir(),
+ target.external_dir_found(),
target.output_dir() / "external",
dirs_exist_ok=True,
)
shutil.copytree(
- target.generated_dir(),
+ target.generated_dir_found(),
target.output_dir() / "generated",
dirs_exist_ok=True,
)
@@ -421,20 +486,26 @@ def build(self, target_name, clean=False):
# errors may occur in Windows so we do the best we can
shutil.rmtree(custom_xsl.parent, ignore_errors=True)
- def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
+ def generate(
+ self,
+ target_name: str,
+ asset_list: Optional[List[str]] = None,
+ all_formats: bool = False,
+ xmlid: Optional[str] = None,
+ ) -> None:
if asset_list is None:
asset_list = []
gen_all = True
else:
gen_all = False
target = self.target(target_name)
- xmlid = xmlid or target.xmlid_root()
if target is None:
log.error(f"Target `{target_name}` not found.")
return
+ xmlid = xmlid or target.xmlid_root()
# build targets:
if gen_all or "webwork" in asset_list:
- webwork_output = target.generated_dir() / "webwork"
+ webwork_output = target.generated_dir_found() / "webwork"
generate.webwork(
target.source(),
target.publication(),
@@ -446,7 +517,7 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.latex_image(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
target.format(),
xmlid,
@@ -457,7 +528,7 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.asymptote(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
target.format(),
xmlid,
@@ -467,7 +538,7 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.sageplot(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
target.format(),
xmlid,
@@ -477,7 +548,7 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.interactive(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
xmlid,
)
@@ -485,18 +556,18 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.youtube(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
xmlid,
)
generate.play_button(
- target.generated_dir(),
+ target.generated_dir_found(),
)
if gen_all or "codelens" in asset_list:
generate.codelens(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
xmlid,
)
@@ -504,7 +575,7 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.datafiles(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
xmlid,
)
@@ -512,14 +583,14 @@ def generate(self, target_name, asset_list=None, all_formats=False, xmlid=None):
generate.qrcodes(
target.source(),
target.publication(),
- target.generated_dir(),
+ target.generated_dir_found(),
target.stringparams(),
xmlid,
)
# Delete temporary directories left behind by core:
core.release_temporary_directories()
- def deploy(self, target_name, update_source):
+ def deploy(self, target_name: str, update_source: bool) -> None:
try:
import git
import ghp_import
@@ -530,12 +601,15 @@ def deploy(self, target_name, update_source):
log.error("Visit https://github.com/git-guides/install-git for assistance.")
return
target = self.target(target_name)
+ if target is None:
+ log.error(f"Target `{target_name}` not found.")
+ return
if target.format() != "html": # redundant for CLI
log.error("Only HTML format targets are supported.")
return
try:
repo = git.Repo(self.__project_path)
- except git.exc.InvalidGitRepositoryError:
+ except git.exc.InvalidGitRepositoryError: # type: ignore
log.info("Initializing project with Git.")
repo = git.Repo.init(self.__project_path)
try:
@@ -625,7 +699,7 @@ def deploy(self, target_name, update_source):
try:
origin.push(refspec=f"{repo.active_branch.name}:{repo.active_branch.name}")
origin.push(refspec="gh-pages:gh-pages")
- except git.exc.GitCommandError:
+ except git.exc.GitCommandError: # type: ignore
log.warning(
f"There was an issue connecting to GitHub repository located at {repo_url}"
)
@@ -659,16 +733,23 @@ def deploy(self, target_name, update_source):
log.info("Your built project will soon be available to the public at:")
log.info(f" {pages_url}")
- def xml_source_is_valid(self, target_name):
+ def xml_source_is_valid(self, target_name: str) -> bool:
target = self.target(target_name)
+ if target is None:
+ return False
return utils.xml_syntax_is_valid(target.source())
- def xml_schema_validate(self, target_name):
+ def xml_schema_validate(self, target_name: str) -> bool:
target = self.target(target_name)
+ if target is None:
+ return False
return utils.xml_source_validates_against_schema(target.source())
- def xml_publication_is_valid(self, target_name):
+ def xml_publication_is_valid(self, target_name: str) -> bool:
target = self.target(target_name)
+ if target is None:
+ log.error(f"Target `{target_name}` not found.")
+ return False
try:
publication_xml = ET.parse(target.publication())
# Would we ever have a publication with xi:include? Just in case...
@@ -684,8 +765,24 @@ def xml_publication_is_valid(self, target_name):
return False
return True
- def executables(self):
- return {ele.tag: ele.text for ele in self.xml_element().xpath("executables/*")}
-
- def init_ptxcore(self):
+ def executables(self) -> Dict[str, str]:
+ ret = {}
+ exec = self.xml_element().xpath("executables/*")
+ assert isinstance(
+ exec, List
+ ), "Invalid project file: executables tag contents must be tags."
+ for ele in exec:
+ assert isinstance(
+ ele, _Element
+ ), "Invalid project file: children of must be tags."
+ key = ele.tag
+ value = ele.text
+ assert (
+ value is not None
+ ), "Invalid project file: missing value in tag."
+ ret[key] = value
+
+ return ret
+
+ def init_ptxcore(self) -> None:
core.set_executables(self.executables())
diff --git a/pretext/templates/__init__.py b/pretext/templates/__init__.py
index a34f7599..ca9a76f5 100644
--- a/pretext/templates/__init__.py
+++ b/pretext/templates/__init__.py
@@ -1,8 +1,8 @@
-from pathlib import Path
+from contextlib import AbstractContextManager
import importlib.resources as ir
-def resource_path(filename: str) -> Path:
+def resource_path(filename: str) -> AbstractContextManager:
"""
Returns resource manager
Usage:
diff --git a/pretext/utils.py b/pretext/utils.py
index 99eaf31d..b7de49cf 100644
--- a/pretext/utils.py
+++ b/pretext/utils.py
@@ -1,6 +1,7 @@
import os
import random
import json
+from collections.abc import Generator
from contextlib import contextmanager
from http.server import SimpleHTTPRequestHandler
import shutil
@@ -12,6 +13,7 @@
import subprocess
import sys
import logging
+import logging.handlers
import threading
import watchdog.events
import watchdog.observers
@@ -19,7 +21,8 @@
import webbrowser
import typing as t
from lxml import etree as ET
-from typing import Optional
+from lxml.etree import _ElementTree, _Element
+from typing import Any, cast, Callable, List, Optional
from . import core, templates, BUILD_FORMATS
@@ -28,7 +31,7 @@
@contextmanager
-def working_directory(path: Path):
+def working_directory(path: Path) -> Generator:
"""
Temporarily change the current working directory.
@@ -48,7 +51,7 @@ def working_directory(path: Path):
# Grabs project directory based on presence of `project.ptx`
-def project_path(dirpath: Optional[Path] = None) -> Path:
+def project_path(dirpath: Optional[Path] = None) -> Optional[Path]:
if dirpath is None:
dirpath = Path().resolve() # current directory
if (dirpath / "project.ptx").is_file():
@@ -62,29 +65,40 @@ def project_path(dirpath: Optional[Path] = None) -> Path:
return project_path(dirpath=dirpath.parent)
-def project_xml(dirpath: t.Optional[Path] = None) -> Path:
+# Like above, but asserts if the project path can't be found.
+def project_path_found(dirpath: Optional[Path] = None) -> Path:
+ pp = project_path(dirpath)
+ assert pp is not None, "Invalid project path"
+ return pp
+
+
+def project_xml(dirpath: t.Optional[Path] = None) -> _ElementTree:
if dirpath is None:
dirpath = Path() # current directory
- if project_path(dirpath) is None:
+ pp = project_path(dirpath)
+ if pp is None:
with templates.resource_path("project.ptx") as project_manifest:
return ET.parse(project_manifest)
else:
- project_manifest = project_path(dirpath) / "project.ptx"
+ project_manifest = pp / "project.ptx"
return ET.parse(project_manifest)
-def requirements_version(dirpath: Optional[Path] = None) -> str:
+def requirements_version(dirpath: Optional[Path] = None) -> Optional[str]:
if dirpath is None:
dirpath = Path() # current directory
+ pp = project_path(dirpath)
+ if pp is None:
+ return None
try:
- with open(project_path(dirpath) / "requirements.txt", "r") as f:
+ with open(pp / "requirements.txt", "r") as f:
for line in f.readlines():
if "pretext" or "pretextbook" in line:
return line.split("==")[1].strip()
except Exception as e:
log.debug("Could not read `requirements.txt`:")
log.debug(e)
- return None
+ return None
def project_xml_string(dirpath: Optional[Path] = None) -> str:
@@ -93,21 +107,24 @@ def project_xml_string(dirpath: Optional[Path] = None) -> str:
return ET.tostring(project_xml(dirpath), encoding="unicode")
+# TODO: is this ever called?
def target_xml(
alias: t.Optional[str] = None, dirpath: t.Optional[Path] = None
-) -> ET.Element:
+) -> Optional[_Element]:
if dirpath is None:
dirpath = Path() # current directory
if alias is None:
return project_xml().find("targets/target") # first target
xpath = f'targets/target[@name="{alias}"]'
- matches = project_xml().xpath(xpath)
+ _matches = project_xml().xpath(xpath)
+ # Given that this is a project target, narrow the type of the match: ``xpath`` can return a wide variety of results.
+ matches = cast(List[_Element], _matches)
if len(matches) == 0:
log.info(
f"No targets with alias {alias} found in project manifest file project.ptx."
)
return None
- return project_xml().xpath(xpath)[0]
+ return matches[0]
# check xml syntax
@@ -180,15 +197,15 @@ def cocalc_project_id() -> t.Optional[str]:
# watchdog handler for watching changes to source
class HTMLRebuildHandler(watchdog.events.FileSystemEventHandler):
- def __init__(self, callback):
+ def __init__(self, callback: Callable[[], None]):
self.last_trigger_at = time.time() - 5
self.callback = callback
- def on_any_event(self, event):
+ def on_any_event(self, event: watchdog.events.FileSystemEvent) -> None:
self.last_trigger_at = time.time()
# only run callback once triggers halt for a second
- def timeout_callback(handler):
+ def timeout_callback(handler: "HTMLRebuildHandler") -> None:
time.sleep(1.5)
if time.time() > handler.last_trigger_at + 1:
handler.last_trigger_at = time.time()
@@ -200,14 +217,14 @@ def timeout_callback(handler):
# boilerplate to prevent overzealous caching by preview server, and
# avoid port issues
-def binding_for_access(access="private"):
+def binding_for_access(access: str = "private") -> str:
if access == "private":
return "localhost"
else:
return "0.0.0.0"
-def url_for_access(access="private", port=8000):
+def url_for_access(access: str = "private", port: int = 8000) -> str:
if access == "public":
return f"http://{socket.gethostbyname(socket.gethostname())}:{port}"
else:
@@ -215,8 +232,8 @@ def url_for_access(access="private", port=8000):
def serve_forever(
- directory: Path, access="private", port=8000, no_launch: bool = False
-):
+ directory: Path, access: str = "private", port: int = 8000, no_launch: bool = False
+) -> None:
log.info(f"Now preparing local server to preview directory `{directory}`.")
log.info(
" (Reminder: use `pretext deploy` to deploy your built project to a public"
@@ -229,16 +246,16 @@ def serve_forever(
binding = binding_for_access(access)
class RequestHandler(SimpleHTTPRequestHandler):
- def __init__(self, *args, **kwargs):
+ def __init__(self, *args: Any, **kwargs: Any):
super().__init__(*args, directory=directory.as_posix(), **kwargs)
"""HTTP request handler with no caching"""
- def end_headers(self):
+ def end_headers(self) -> None:
self.send_my_headers()
SimpleHTTPRequestHandler.end_headers(self)
- def send_my_headers(self):
+ def send_my_headers(self) -> None:
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
@@ -272,9 +289,9 @@ def run_server(
access: str,
port: int,
watch_directory: t.Optional[Path] = None,
- watch_callback=lambda: None,
+ watch_callback: Callable[[], None] = lambda: None,
no_launch: bool = False,
-):
+) -> None:
threading.Thread(
target=lambda: serve_forever(directory, access, port, no_launch), daemon=True
).start()
@@ -306,7 +323,7 @@ def nstag(prefix: str, suffix: str) -> str:
return "{" + NSMAP[prefix] + "}" + suffix
-def copy_custom_xsl(xsl_path: Path, output_dir: Path):
+def copy_custom_xsl(xsl_path: Path, output_dir: Path) -> None:
"""
Copy relevant files that share a directory with `xsl_path`.
Pre-processing the `.xsl` files to point to subdirectory for graceful deprecation.
@@ -319,16 +336,17 @@ def copy_custom_xsl(xsl_path: Path, output_dir: Path):
shutil.copytree(core.resources.path("xsl"), output_dir / "core")
-def check_executable(exec_name: str):
+def check_executable(exec_name: str) -> Optional[str]:
try:
exec_cmd = core.get_executable_cmd(exec_name)[0]
log.debug(f"PTX-CLI: Executable command {exec_name} found at {exec_cmd}")
return exec_cmd
except OSError as e:
log.debug(e)
+ return None
-def check_asset_execs(element, outformats=None):
+def check_asset_execs(element: str, outformats: Optional[List[str]] = None) -> None:
# outformats is assumed to be a list of formats.
if outformats is None:
outformats = []
@@ -402,7 +420,12 @@ def no_project(task: str) -> bool:
return False
-def show_target_hints(target_format: str, project, task: str):
+def show_target_hints(
+ target_format: Optional[str],
+ # TODO: the type is ``project.Project``, but we can't ``import project`` due to circular imports.
+ project: Any,
+ task: str,
+) -> None:
"""
This will give the user hints about why they have provided a bad target and make helpful suggestions for them to fix the problem. We will only run this function when the target_name is not the name in any target in project.ptx.
"""
@@ -433,7 +456,7 @@ def show_target_hints(target_format: str, project, task: str):
)
-def npm_install():
+def npm_install() -> None:
with working_directory(core.resources.path("script", "mjsre")):
log.info("Attempting to install/update required node packages.")
try:
@@ -446,7 +469,7 @@ def npm_install():
log.debug("", exc_info=True)
-def playwright_install():
+def playwright_install() -> None:
"""
Run `playwright install` to ensure that its required browsers and tools are available to it.
"""
@@ -463,14 +486,14 @@ def playwright_install():
log.debug("", exc_info=True)
-def remove_path(path: Path):
+def remove_path(path: Path) -> None:
if path.is_file() or path.is_symlink():
path.unlink() # remove the file
elif path.is_dir():
shutil.rmtree(path) # remove dir and all it contains
-def exit_command(mh):
+def exit_command(mh: logging.handlers.MemoryHandler) -> None:
"""
Clean's up at the end of a run.
Checks to see if anything (errors etc.) is in the memory handler. If it is, reports that there are errors before the handler gets flushed. Otherwise, adds a single blank line.
diff --git a/pyproject.toml b/pyproject.toml
index 10b17069..14eb1702 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,14 @@
+# ********************************
+# |docname| - Poetry configuration
+# ********************************
+#
+# Poetry
+# ======
+# See https://python-poetry.org/docs/dependency-specification/ to get an understanding of
+# how poetry specifies dependencies.
+#
+# Project metadata
+# ----------------
[tool.poetry]
name = "pretext"
version = "1.6.1"
@@ -13,6 +24,8 @@ include = [
"pretext/templates/resources/*",
]
+# Dependencies
+# ------------
[tool.poetry.dependencies]
python = "^3.8.5"
lxml = "^4.8"
@@ -28,21 +41,74 @@ single-version = "^1"
playwright = "^1"
qrcode = "^7"
-[tool.poetry.scripts]
-pretext = 'pretext.cli:main'
-
+# Development dependencies
+# ------------------------
[tool.poetry.group.dev.dependencies]
-pytest = "^7.0.0"
-pytest-console-scripts = "^1.3.1"
+black = "^23"
+codechat-server = "^0"
+flake8 = "^6"
+lxml-stubs = "^0"
+mypy = "^1"
+pytest = "^7"
+pytest-console-scripts = "^1"
pytest-cov = "^4"
-pytest-mock = "^3.8.2"
-black = "^22.12.0"
-codechat-server = "^0.2.9"
-flake8 = "^6.0.0"
+pytest-mock = "^3"
+# Needed by coverage, so it can read its configuration from this file. See the [Coverage docs](https://coverage.readthedocs.io/en/7.2.7/config.html).
+toml = "^0"
-[tool.pytest.ini_options]
-script_launch_mode = "subprocess"
+# Misc
+# ----
+[tool.poetry.scripts]
+pretext = 'pretext.cli:main'
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
+
+
+# Pytest configuration
+# ====================
+[tool.pytest.ini_options]
+script_launch_mode = "subprocess"
+
+
+# mypy configuration
+# ==================
+# To run, execute ``mypy`` from the directory containing this file.
+#
+# This section `must `_ be present.
+[tool.mypy]
+# See `files `_.
+files = "pretext"
+exclude = "^pretext/core/pretext.py"
+check_untyped_defs = true
+disallow_untyped_defs = true
+
+# Ignores for libraries with no type hints
+# ----------------------------------------
+[[tool.mypy.overrides]]
+module = [
+ "click_log",
+ "single_version",
+ "ghp_import",
+ # We're installing ``lxml-stubs``, but it doesn't have stubs for this yet.
+ "lxml.ElementInclude",
+]
+ignore_missing_imports = true
+
+
+# Coverage configuration
+# ======================
+[tool.coverage.run]
+# Select code to be covered -- everything in the `pretext` subdirectory.
+source = [ "pretext" ]
+# This doesn't work. It works if the path is absolute, but I can't find any relative path that does work.
+#omit = [ "pretext/core/pretext.py" ]
+
+[tool.coverage.report]
+# This is a simple workaround for the `omit` setting above: don't report coverage for the PreTeXt core.
+omit = [ "pretext/core/pretext.py" ]
+
+[tool.coverage.html]
+# For better organization, place generated reports under the tests.
+directory = "tests/htmlcov"