diff --git a/.github/workflows/downstream.yml b/.github/workflows/downstream.yml index 635c404e32..8763635490 100644 --- a/.github/workflows/downstream.yml +++ b/.github/workflows/downstream.yml @@ -107,6 +107,23 @@ jobs: test_command: pip install pytest-jupyter[server] && pytest -vv -raXxs -W default --durations 10 --color=yes package_name: jupyter_server_terminals + jupytext: + runs-on: ubuntu-latest + timeout-minutes: 10 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Base Setup + uses: jupyterlab/maintainer-tools/.github/actions/base-setup@v1 + + - name: Test jupytext + uses: jupyterlab/maintainer-tools/.github/actions/downstream-test@v1 + with: + package_name: jupytext + test_command: pip install pytest-jupyter[server] gitpython pre-commit && python -m ipykernel install --name jupytext-dev --user && pytest -vv -raXxs -W default --durations 10 --color=yes --ignore=tests/test_doc_files_are_notebooks.py --ignore=tests/test_changelog.py + downstream_check: # This job does nothing and is only used for the branch protection if: always() needs: @@ -115,6 +132,7 @@ jobs: - jupyterlab_server - notebook - nbclassic + - jupytext runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6ea5ed59ba..1fce7856ec 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,7 +21,7 @@ repos: - id: trailing-whitespace - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.27.1 + rev: 0.27.2 hooks: - id: check-github-workflows @@ -33,7 +33,7 @@ repos: [mdformat-gfm, mdformat-frontmatter, mdformat-footnote] - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.0.3" + rev: "v3.1.0" hooks: - id: prettier types_or: [yaml, html, json] @@ -58,7 +58,7 @@ repos: - id: rst-inline-touching-normal - repo: https://github.com/pre-commit/mirrors-mypy - rev: "v1.6.1" + rev: "v1.7.1" hooks: - id: mypy files: jupyter_server @@ -67,7 +67,7 @@ repos: ["traitlets>=5.13", "jupyter_core>=5.5", "jupyter_client>=8.5"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.1.5 + rev: v0.1.6 hooks: - id: ruff types_or: [python, jupyter] @@ -76,7 +76,7 @@ repos: types_or: [python, jupyter] - repo: https://github.com/scientific-python/cookie - rev: "2023.10.27" + rev: "2023.11.17" hooks: - id: sp-repo-review additional_dependencies: ["repo-review[cli]"] diff --git a/CHANGELOG.md b/CHANGELOG.md index 01dbdbb593..fd6d970440 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,108 @@ All notable changes to this project will be documented in this file. +## 2.12.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.12.0...a59beb9b7bf3decc00af782821561435f47bbb16)) + +### Enhancements made + +- log extension import time at debug level unless it's actually slow [#1375](https://github.com/jupyter-server/jupyter_server/pull/1375) ([@minrk](https://github.com/minrk)) +- Add support for async Authorizers (part 2) [#1374](https://github.com/jupyter-server/jupyter_server/pull/1374) ([@Zsailer](https://github.com/Zsailer)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-12-05&to=2023-12-06&type=c)) + +[@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-12-05..2023-12-06&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-12-05..2023-12-06&type=Issues) + + + +## 2.12.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.11.2...3bd347b6f2ead5897a18c6171db1174eaaf6176d)) + +### Enhancements made + +- Support async Authorizers [#1373](https://github.com/jupyter-server/jupyter_server/pull/1373) ([@Zsailer](https://github.com/Zsailer)) + +### Maintenance and upkeep improvements + +- Update for tornado 6.4 [#1372](https://github.com/jupyter-server/jupyter_server/pull/1372) ([@blink1073](https://github.com/blink1073)) +- chore: update pre-commit hooks [#1370](https://github.com/jupyter-server/jupyter_server/pull/1370) ([@pre-commit-ci](https://github.com/pre-commit-ci)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-12-04&to=2023-12-05&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-12-04..2023-12-05&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-12-04..2023-12-05&type=Issues) | [@Zsailer](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AZsailer+updated%3A2023-12-04..2023-12-05&type=Issues) + +## 2.11.2 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.11.1)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-27&to=2023-12-04&type=c)) + +## 2.11.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.11.0...40a95e5f39d3f167bebf9232da9fab64818ba97d)) + +### Bugs fixed + +- avoid unhandled error on some invalid paths [#1369](https://github.com/jupyter-server/jupyter_server/pull/1369) ([@minrk](https://github.com/minrk)) +- Change md5 to hash and hash_algorithm, fix incompatibility [#1367](https://github.com/jupyter-server/jupyter_server/pull/1367) ([@Wh1isper](https://github.com/Wh1isper)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-21&to=2023-11-27&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-21..2023-11-27&type=Issues) | [@fcollonval](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Afcollonval+updated%3A2023-11-21..2023-11-27&type=Issues) | [@minrk](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Aminrk+updated%3A2023-11-21..2023-11-27&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2023-11-21..2023-11-27&type=Issues) + +## 2.11.0 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.10.1...e7c0f331d4cbf82eb1a9e9bc6c260faabda0255a)) + +### Enhancements made + +- Support get file(notebook) md5 [#1363](https://github.com/jupyter-server/jupyter_server/pull/1363) ([@Wh1isper](https://github.com/Wh1isper)) + +### Maintenance and upkeep improvements + +- Update ruff and typings [#1365](https://github.com/jupyter-server/jupyter_server/pull/1365) ([@blink1073](https://github.com/blink1073)) + +### Documentation improvements + +- Update api docs with md5 param [#1364](https://github.com/jupyter-server/jupyter_server/pull/1364) ([@Wh1isper](https://github.com/Wh1isper)) +- typo: ServerApp [#1361](https://github.com/jupyter-server/jupyter_server/pull/1361) ([@IITII](https://github.com/IITII)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-15&to=2023-11-21&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-15..2023-11-21&type=Issues) | [@IITII](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AIITII+updated%3A2023-11-15..2023-11-21&type=Issues) | [@welcome](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Awelcome+updated%3A2023-11-15..2023-11-21&type=Issues) | [@Wh1isper](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3AWh1isper+updated%3A2023-11-15..2023-11-21&type=Issues) + +## 2.10.1 + +([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.10.0...9f8ff2886903a6744c5eb483f9e5bd7e63d5d015)) + +### Bugs fixed + +- ContentsHandler return 404 rather than raise exc [#1357](https://github.com/jupyter-server/jupyter_server/pull/1357) ([@bloomsa](https://github.com/bloomsa)) + +### Maintenance and upkeep improvements + +- Clean up ruff config [#1358](https://github.com/jupyter-server/jupyter_server/pull/1358) ([@blink1073](https://github.com/blink1073)) +- Add more typings [#1356](https://github.com/jupyter-server/jupyter_server/pull/1356) ([@blink1073](https://github.com/blink1073)) +- chore: update pre-commit hooks [#1355](https://github.com/jupyter-server/jupyter_server/pull/1355) ([@pre-commit-ci](https://github.com/pre-commit-ci)) + +### Contributors to this release + +([GitHub contributors page for this release](https://github.com/jupyter-server/jupyter_server/graphs/contributors?from=2023-11-06&to=2023-11-15&type=c)) + +[@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-11-06..2023-11-15&type=Issues) | [@bloomsa](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Abloomsa+updated%3A2023-11-06..2023-11-15&type=Issues) | [@pre-commit-ci](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Apre-commit-ci+updated%3A2023-11-06..2023-11-15&type=Issues) + ## 2.10.0 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.9.1...e71e95884483c7ce2d9fd5ee83059a0269741aa1)) @@ -25,8 +127,6 @@ All notable changes to this project will be documented in this file. [@blink1073](https://github.com/search?q=repo%3Ajupyter-server%2Fjupyter_server+involves%3Ablink1073+updated%3A2023-10-25..2023-11-06&type=Issues) - - ## 2.9.1 ([Full Changelog](https://github.com/jupyter-server/jupyter_server/compare/v2.9.0...bb293ec5cac5b277259f27e458da60fa8a926f46)) diff --git a/docs/source/conf.py b/docs/source/conf.py index 4cb0a01b01..7f59cb956b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# # Jupyter Server documentation build configuration file, created by # sphinx-quickstart on Mon Apr 13 09:51:11 2015. # @@ -44,7 +42,7 @@ ] try: - import enchant # type:ignore[import-not-found] # noqa + import enchant # type:ignore[import-not-found] extensions += ["sphinxcontrib.spelling"] except ImportError: @@ -338,7 +336,7 @@ spelling_word_list_filename = "spelling_wordlist.txt" # import before any doc is built, so _ is guaranteed to be injected -import jupyter_server.transutils # noqa: F401 +import jupyter_server.transutils CONFIG_HEADER = """\ .. _other-full-config: diff --git a/docs/source/developers/architecture.rst b/docs/source/developers/architecture.rst index 37d676bff5..0a32a2774c 100644 --- a/docs/source/developers/architecture.rst +++ b/docs/source/developers/architecture.rst @@ -28,7 +28,7 @@ Jupyter Server contains the following components: - **Config Manager** initializes configuration for the ServerApp. You can define custom classes for the Jupyter Server managers using this config and change - SererApp settings. Follow :ref:`the Config File Guide ` to + ServerApp settings. Follow :ref:`the Config File Guide ` to learn about configuration settings and how to build custom config. - **Custom Extensions** allow you to create the custom Server's REST API endpoints. diff --git a/docs/source/developers/contents.rst b/docs/source/developers/contents.rst index f20378314a..6910535f30 100644 --- a/docs/source/developers/contents.rst +++ b/docs/source/developers/contents.rst @@ -33,36 +33,48 @@ which we refer to as **models**. Models may contain the following entries: -+--------------------+-----------+------------------------------+ -| Key | Type |Info | -+====================+===========+==============================+ -|**name** |unicode |Basename of the entity. | -+--------------------+-----------+------------------------------+ -|**path** |unicode |Full | -| | |(:ref:`API-style`) | -| | |path to the entity. | -+--------------------+-----------+------------------------------+ -|**type** |unicode |The entity type. One of | -| | |``"notebook"``, ``"file"`` or | -| | |``"directory"``. | -+--------------------+-----------+------------------------------+ -|**created** |datetime |Creation date of the entity. | -+--------------------+-----------+------------------------------+ -|**last_modified** |datetime |Last modified date of the | -| | |entity. | -+--------------------+-----------+------------------------------+ -|**content** |variable |The "content" of the entity. | -| | |(:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ -|**mimetype** |unicode or |The mimetype of ``content``, | -| |``None`` |if any. (:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ -|**format** |unicode or |The format of ``content``, | -| |``None`` |if any. (:ref:`See | -| | |Below`) | -+--------------------+-----------+------------------------------+ ++--------------------+------------+-------------------------------+ +| Key | Type | Info | ++====================+============+===============================+ +| **name** | unicode | Basename of the entity. | ++--------------------+------------+-------------------------------+ +| **path** | unicode | Full | +| | | (:ref:`API-style`) | +| | | path to the entity. | ++--------------------+------------+-------------------------------+ +| **type** | unicode | The entity type. One of | +| | | ``"notebook"``, ``"file"`` or | +| | | ``"directory"``. | ++--------------------+------------+-------------------------------+ +| **created** | datetime | Creation date of the entity. | ++--------------------+------------+-------------------------------+ +| **last_modified** | datetime | Last modified date of the | +| | | entity. | ++--------------------+------------+-------------------------------+ +| **content** | variable | The "content" of the entity. | +| | | (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| **mimetype** | unicode or | The mimetype of ``content``, | +| | ``None`` | if any. (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| **format** | unicode or | The format of ``content``, | +| | ``None`` | if any. (:ref:`See | +| | | Below`) | ++--------------------+------------+-------------------------------+ +| [optional] | | | +| **hash** | unicode or | The hash of the contents. | +| | ``None`` | It cannot be null if | +| | | ``hash_algorithm`` is | +| | | defined. | ++--------------------+------------+-------------------------------+ +| [optional] | | | +| **hash_algorithm** | unicode or | The algorithm used to compute | +| | ``None`` | hash value. | +| | | It cannot be null | +| | | if ``hash`` is defined. | ++--------------------+------------+-------------------------------+ .. _modelcontent: @@ -76,6 +88,9 @@ model. There are three model types: **notebook**, **file**, and **directory**. :class:`nbformat.notebooknode.NotebookNode` representing the .ipynb file represented by the model. See the `NBFormat`_ documentation for a full description. + - The ``hash`` field a hexdigest string of the hash value of the file. + If ``ContentManager.get`` not support hash, it should always be ``None``. + - ``hash_algorithm`` is the algorithm used to compute the hash value. - ``file`` models - The ``format`` field is either ``"text"`` or ``"base64"``. @@ -85,12 +100,16 @@ model. There are three model types: **notebook**, **file**, and **directory**. file models, ``content`` simply contains the file's bytes after decoding as UTF-8. Non-text (``base64``) files are read as bytes, base64 encoded, and then decoded as UTF-8. + - The ``hash`` field a hexdigest string of the hash value of the file. + If ``ContentManager.get`` not support hash, it should always be ``None``. + - ``hash_algorithm`` is the algorithm used to compute the hash value. - ``directory`` models - The ``format`` field is always ``"json"``. - The ``mimetype`` field is always ``None``. - The ``content`` field contains a list of :ref:`content-free` models representing the entities in the directory. + - The ``hash`` field is always ``None``. .. note:: @@ -107,7 +126,7 @@ model. There are three model types: **notebook**, **file**, and **directory**. .. code-block:: python - # Notebook Model with Content + # Notebook Model with Content and Hash { "content": { "metadata": {}, @@ -129,6 +148,8 @@ model. There are three model types: **notebook**, **file**, and **directory**. "path": "foo/a.ipynb", "type": "notebook", "writable": True, + "hash": "f5e43a0b1c2e7836ab3b4d6b1c35c19e2558688de15a6a14e137a59e4715d34b", + "hash_algorithm": "sha256", } # Notebook Model without Content diff --git a/examples/simple/simple_ext1/application.py b/examples/simple/simple_ext1/application.py index b28d8f8781..b77e57e4a8 100644 --- a/examples/simple/simple_ext1/application.py +++ b/examples/simple/simple_ext1/application.py @@ -36,11 +36,11 @@ class SimpleApp1(ExtensionAppJinjaMixin, ExtensionApp): # Local path to templates directory. template_paths = [DEFAULT_TEMPLATE_FILES_PATH] # type:ignore[assignment] - configA = Unicode("", config=True, help="Config A example.") # noqa + configA = Unicode("", config=True, help="Config A example.") - configB = Unicode("", config=True, help="Config B example.") # noqa + configB = Unicode("", config=True, help="Config B example.") - configC = Unicode("", config=True, help="Config C example.") # noqa + configC = Unicode("", config=True, help="Config C example.") def initialize_handlers(self): """Initialize handlers.""" diff --git a/examples/simple/simple_ext1/handlers.py b/examples/simple/simple_ext1/handlers.py index fefbdf610b..9d25057bc3 100644 --- a/examples/simple/simple_ext1/handlers.py +++ b/examples/simple/simple_ext1/handlers.py @@ -47,8 +47,6 @@ def get(self, matched_part=None, *args, **kwargs): class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): """The base template handler.""" - pass - class TypescriptHandler(BaseTemplateHandler): """A typescript handler.""" diff --git a/examples/simple/simple_ext2/application.py b/examples/simple/simple_ext2/application.py index 6f8498407d..b9da358131 100644 --- a/examples/simple/simple_ext2/application.py +++ b/examples/simple/simple_ext2/application.py @@ -29,7 +29,7 @@ class SimpleApp2(ExtensionAppJinjaMixin, ExtensionApp): # Local path to templates directory. template_paths = [DEFAULT_TEMPLATE_FILES_PATH] # type:ignore[assignment] - configD = Unicode("", config=True, help="Config D example.") # noqa + configD = Unicode("", config=True, help="Config D example.") def initialize_handlers(self): """Initialize handlers.""" diff --git a/examples/simple/simple_ext2/handlers.py b/examples/simple/simple_ext2/handlers.py index ea649b68d2..4f52e6f061 100644 --- a/examples/simple/simple_ext2/handlers.py +++ b/examples/simple/simple_ext2/handlers.py @@ -20,8 +20,6 @@ def get(self, matched_part=None, *args, **kwargs): class BaseTemplateHandler(ExtensionHandlerJinjaMixin, ExtensionHandlerMixin, JupyterHandler): """A base template handler.""" - pass - class IndexHandler(BaseTemplateHandler): """The root API handler.""" diff --git a/examples/simple/tests/test_handlers.py b/examples/simple/tests/test_handlers.py index efa302d5d5..59b9d045ae 100644 --- a/examples/simple/tests/test_handlers.py +++ b/examples/simple/tests/test_handlers.py @@ -2,7 +2,7 @@ import pytest -@pytest.fixture +@pytest.fixture() def jp_server_auth_resources(jp_server_auth_core_resources): """The server auth resources.""" for url_regex in [ @@ -12,7 +12,7 @@ def jp_server_auth_resources(jp_server_auth_core_resources): return jp_server_auth_core_resources -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_template_dir, jp_server_authorizer): """The server config.""" return { diff --git a/jupyter_server/_sysinfo.py b/jupyter_server/_sysinfo.py index 5e17215890..f167c4e92a 100644 --- a/jupyter_server/_sysinfo.py +++ b/jupyter_server/_sysinfo.py @@ -41,7 +41,7 @@ def pkg_commit_hash(pkg_path): if p.exists(p.join(cur_path, ".git")): try: proc = subprocess.Popen( - ["git", "rev-parse", "--short", "HEAD"], # noqa + ["git", "rev-parse", "--short", "HEAD"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=pkg_path, diff --git a/jupyter_server/_tz.py b/jupyter_server/_tz.py index df123ffe07..a7a495de85 100644 --- a/jupyter_server/_tz.py +++ b/jupyter_server/_tz.py @@ -13,7 +13,7 @@ ZERO = timedelta(0) -class tzUTC(tzinfo): # noqa +class tzUTC(tzinfo): """tzinfo object for UTC (zero offset)""" def utcoffset(self, d: datetime | None) -> timedelta: @@ -30,7 +30,7 @@ def utcnow() -> datetime: return datetime.now(timezone.utc) -def utcfromtimestamp(timestamp): +def utcfromtimestamp(timestamp: float) -> datetime: return datetime.fromtimestamp(timestamp, timezone.utc) diff --git a/jupyter_server/_version.py b/jupyter_server/_version.py index f9c81be5f1..45cb8501b5 100644 --- a/jupyter_server/_version.py +++ b/jupyter_server/_version.py @@ -6,7 +6,7 @@ from typing import List # Version string must appear intact for automatic versioning -__version__ = "2.11.0.dev0" +__version__ = "2.12.1" # Build up version_info tuple for backwards compatibility pattern = r"(?P\d+).(?P\d+).(?P\d+)(?P.*)" diff --git a/jupyter_server/auth/__init__.py b/jupyter_server/auth/__init__.py index bb7d345be2..36418f214b 100644 --- a/jupyter_server/auth/__init__.py +++ b/jupyter_server/auth/__init__.py @@ -1,4 +1,4 @@ -from .authorizer import * # noqa: F403 -from .decorator import authorized # noqa: F401 -from .identity import * # noqa: F403 -from .security import passwd # noqa: F401 +from .authorizer import * +from .decorator import authorized +from .identity import * +from .security import passwd diff --git a/jupyter_server/auth/__main__.py b/jupyter_server/auth/__main__.py index a564a55864..d1573f11a1 100644 --- a/jupyter_server/auth/__main__.py +++ b/jupyter_server/auth/__main__.py @@ -20,7 +20,7 @@ def set_password(args): password_repeat = getpass("" if args.quiet else "Repeat password: ") if password1 != password_repeat: warnings.warn("Passwords do not match, try again", stacklevel=2) - elif len(password1) < 4: # noqa + elif len(password1) < 4: warnings.warn("Please provide at least 4 characters", stacklevel=2) else: password = password1 diff --git a/jupyter_server/auth/authorizer.py b/jupyter_server/auth/authorizer.py index f22dbe5463..aaeb3a6eea 100644 --- a/jupyter_server/auth/authorizer.py +++ b/jupyter_server/auth/authorizer.py @@ -9,7 +9,7 @@ # Distributed under the terms of the Modified BSD License. from __future__ import annotations -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Awaitable from traitlets import Instance from traitlets.config import LoggingConfigurable @@ -44,7 +44,7 @@ class Authorizer(LoggingConfigurable): def is_authorized( self, handler: JupyterHandler, user: User, action: str, resource: str - ) -> bool: + ) -> Awaitable[bool] | bool: """A method to determine if ``user`` is authorized to perform ``action`` (read, write, or execute) on the ``resource`` type. diff --git a/jupyter_server/auth/decorator.py b/jupyter_server/auth/decorator.py index a5d6c0543f..a92866b4e8 100644 --- a/jupyter_server/auth/decorator.py +++ b/jupyter_server/auth/decorator.py @@ -2,9 +2,11 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +import asyncio from functools import wraps from typing import Any, Callable, Optional, TypeVar, Union, cast +from jupyter_core.utils import ensure_async from tornado.log import app_log from tornado.web import HTTPError @@ -42,7 +44,7 @@ def authorized( def wrapper(method): @wraps(method) - def inner(self, *args, **kwargs): + async def inner(self, *args, **kwargs): # default values for action, resource nonlocal action nonlocal resource @@ -61,8 +63,15 @@ def inner(self, *args, **kwargs): raise HTTPError(status_code=403, log_message=message) # If the user is allowed to do this action, # call the method. - if self.authorizer.is_authorized(self, user, action, resource): - return method(self, *args, **kwargs) + authorized = await ensure_async( + self.authorizer.is_authorized(self, user, action, resource) + ) + if authorized: + out = method(self, *args, **kwargs) + # If the method is a coroutine, await it + if asyncio.iscoroutine(out): + return await out + return out # else raise an exception. else: raise HTTPError(status_code=403, log_message=message) @@ -73,6 +82,6 @@ def inner(self, *args, **kwargs): method = action action = None # no-arguments `@authorized` decorator called - return wrapper(method) + return cast(FuncT, wrapper(method)) return cast(FuncT, wrapper) diff --git a/jupyter_server/auth/identity.py b/jupyter_server/auth/identity.py index 2440710186..adeb567b5b 100644 --- a/jupyter_server/auth/identity.py +++ b/jupyter_server/auth/identity.py @@ -13,10 +13,10 @@ import os import re import sys +import typing as t import uuid from dataclasses import asdict, dataclass from http.cookies import Morsel -from typing import TYPE_CHECKING, Any, Awaitable from tornado import escape, httputil, web from traitlets import Bool, Dict, Type, Unicode, default @@ -27,11 +27,6 @@ from .security import passwd_check, set_password from .utils import get_anonymous_username -# circular imports for type checking -if TYPE_CHECKING: - from jupyter_server.base.handlers import AuthenticatedHandler, JupyterHandler - from jupyter_server.serverapp import ServerApp - _non_alphanum = re.compile(r"[^A-Za-z0-9]") @@ -82,7 +77,7 @@ def fill_defaults(self): self.display_name = self.name -def _backward_compat_user(got_user: Any) -> User: +def _backward_compat_user(got_user: t.Any) -> User: """Backward-compatibility for LoginHandler.get_user Prior to 2.0, LoginHandler.get_user could return anything truthy. @@ -128,7 +123,7 @@ class IdentityProvider(LoggingConfigurable): .. versionadded:: 2.0 """ - cookie_name: str | Unicode = Unicode( + cookie_name: str | Unicode[str, str | bytes] = Unicode( "", config=True, help=_i18n("Name of the cookie to set for persisting login. Default: username-${Host}."), @@ -142,7 +137,7 @@ class IdentityProvider(LoggingConfigurable): ), ) - secure_cookie: bool | Bool = Bool( + secure_cookie: bool | Bool[bool | None, bool | int | None] = Bool( None, allow_none=True, config=True, @@ -160,7 +155,7 @@ class IdentityProvider(LoggingConfigurable): ), ) - token: str | Unicode = Unicode( + token: str | Unicode[str, str | bytes] = Unicode( "", help=_i18n( """Token used for authenticating first-time connections to the server. @@ -211,9 +206,9 @@ def _token_default(self): self.token_generated = True return binascii.hexlify(os.urandom(24)).decode("ascii") - need_token: bool | Bool = Bool(True) + need_token: bool | Bool[bool, t.Union[bool, int]] = Bool(True) - def get_user(self, handler: JupyterHandler) -> User | None | Awaitable[User | None]: + def get_user(self, handler: web.RequestHandler) -> User | None | t.Awaitable[User | None]: """Get the authenticated user for a request Must return a :class:`jupyter_server.auth.User`, @@ -228,17 +223,17 @@ def get_user(self, handler: JupyterHandler) -> User | None | Awaitable[User | No # not sure how to have optional-async type signature # on base class with `async def` without splitting it into two methods - async def _get_user(self, handler: JupyterHandler) -> User | None: + async def _get_user(self, handler: web.RequestHandler) -> User | None: """Get the user.""" if getattr(handler, "_jupyter_current_user", None): # already authenticated - return handler._jupyter_current_user - _token_user: User | None | Awaitable[User | None] = self.get_user_token(handler) - if isinstance(_token_user, Awaitable): + return t.cast(User, handler._jupyter_current_user) # type:ignore[attr-defined] + _token_user: User | None | t.Awaitable[User | None] = self.get_user_token(handler) + if isinstance(_token_user, t.Awaitable): _token_user = await _token_user token_user: User | None = _token_user # need second variable name to collapse type _cookie_user = self.get_user_cookie(handler) - if isinstance(_cookie_user, Awaitable): + if isinstance(_cookie_user, t.Awaitable): _cookie_user = await _cookie_user cookie_user: User | None = _cookie_user # prefer token to cookie if both given, @@ -273,12 +268,12 @@ async def _get_user(self, handler: JupyterHandler) -> User | None: return user - def identity_model(self, user: User) -> dict: + def identity_model(self, user: User) -> dict[str, t.Any]: """Return a User as an Identity model""" # TODO: validate? return asdict(user) - def get_handlers(self) -> list: + def get_handlers(self) -> list[tuple[str, object]]: """Return list of additional handlers for this identity provider For example, an OAuth callback handler. @@ -321,7 +316,7 @@ def user_from_cookie(self, cookie_value: str) -> User | None: user["color"], ) - def get_cookie_name(self, handler: AuthenticatedHandler) -> str: + def get_cookie_name(self, handler: web.RequestHandler) -> str: """Return the login cookie name Uses IdentityProvider.cookie_name, if defined. @@ -333,7 +328,7 @@ def get_cookie_name(self, handler: AuthenticatedHandler) -> str: else: return _non_alphanum.sub("-", f"username-{handler.request.host}") - def set_login_cookie(self, handler: AuthenticatedHandler, user: User) -> None: + def set_login_cookie(self, handler: web.RequestHandler, user: User) -> None: """Call this on handlers to set the login cookie for success""" cookie_options = {} cookie_options.update(self.cookie_options) @@ -345,12 +340,12 @@ def set_login_cookie(self, handler: AuthenticatedHandler, user: User) -> None: secure_cookie = handler.request.protocol == "https" if secure_cookie: cookie_options.setdefault("secure", True) - cookie_options.setdefault("path", handler.base_url) + cookie_options.setdefault("path", handler.base_url) # type:ignore[attr-defined] cookie_name = self.get_cookie_name(handler) handler.set_secure_cookie(cookie_name, self.user_to_cookie(user), **cookie_options) def _force_clear_cookie( - self, handler: AuthenticatedHandler, name: str, path: str = "/", domain: str | None = None + self, handler: web.RequestHandler, name: str, path: str = "/", domain: str | None = None ) -> None: """Deletes the cookie with the given name. @@ -368,7 +363,7 @@ def _force_clear_cookie( name = escape.native_str(name) expires = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=365) - morsel: Morsel = Morsel() + morsel: Morsel[t.Any] = Morsel() morsel.set(name, "", '""') morsel["expires"] = httputil.format_timestamp(expires) morsel["path"] = path @@ -376,11 +371,11 @@ def _force_clear_cookie( morsel["domain"] = domain handler.add_header("Set-Cookie", morsel.OutputString()) - def clear_login_cookie(self, handler: AuthenticatedHandler) -> None: + def clear_login_cookie(self, handler: web.RequestHandler) -> None: """Clear the login cookie, effectively logging out the session.""" cookie_options = {} cookie_options.update(self.cookie_options) - path = cookie_options.setdefault("path", handler.base_url) + path = cookie_options.setdefault("path", handler.base_url) # type:ignore[attr-defined] cookie_name = self.get_cookie_name(handler) handler.clear_cookie(cookie_name, path=path) if path and path != "/": @@ -390,7 +385,9 @@ def clear_login_cookie(self, handler: AuthenticatedHandler) -> None: # two cookies with the same name. See the method above. self._force_clear_cookie(handler, cookie_name) - def get_user_cookie(self, handler: JupyterHandler) -> User | None | Awaitable[User | None]: + def get_user_cookie( + self, handler: web.RequestHandler + ) -> User | None | t.Awaitable[User | None]: """Get user from a cookie Calls user_from_cookie to deserialize cookie value @@ -413,7 +410,7 @@ def get_user_cookie(self, handler: JupyterHandler) -> User | None | Awaitable[Us auth_header_pat = re.compile(r"(token|bearer)\s+(.+)", re.IGNORECASE) - def get_token(self, handler: JupyterHandler) -> str | None: + def get_token(self, handler: web.RequestHandler) -> str | None: """Get the user token from a request Default: @@ -429,14 +426,14 @@ def get_token(self, handler: JupyterHandler) -> str | None: user_token = m.group(2) return user_token - async def get_user_token(self, handler: JupyterHandler) -> User | None: + async def get_user_token(self, handler: web.RequestHandler) -> User | None: """Identify the user based on a token in the URL or Authorization header Returns: - uuid if authenticated - None if not """ - token = handler.token + token = t.cast("str | None", handler.token) # type:ignore[attr-defined] if not token: return None # check login token from URL argument or Authorization header @@ -455,7 +452,7 @@ async def get_user_token(self, handler: JupyterHandler) -> User | None: # which is stored in a cookie. # still check the cookie for the user id _user = self.get_user_cookie(handler) - if isinstance(_user, Awaitable): + if isinstance(_user, t.Awaitable): _user = await _user user: User | None = _user if user is None: @@ -464,7 +461,7 @@ async def get_user_token(self, handler: JupyterHandler) -> User | None: else: return None - def generate_anonymous_user(self, handler: JupyterHandler) -> User: + def generate_anonymous_user(self, handler: web.RequestHandler) -> User: """Generate a random anonymous user. For use when a single shared token is used, @@ -475,10 +472,10 @@ def generate_anonymous_user(self, handler: JupyterHandler) -> User: name = display_name = f"Anonymous {moon}" initials = f"A{moon[0]}" color = None - handler.log.debug(f"Generating new user for token-authenticated request: {user_id}") + handler.log.debug(f"Generating new user for token-authenticated request: {user_id}") # type:ignore[attr-defined] return User(user_id, name, display_name, initials, None, color) - def should_check_origin(self, handler: AuthenticatedHandler) -> bool: + def should_check_origin(self, handler: web.RequestHandler) -> bool: """Should the Handler check for CORS origin validation? Origin check should be skipped for token-authenticated requests. @@ -489,7 +486,7 @@ def should_check_origin(self, handler: AuthenticatedHandler) -> bool: """ return not self.is_token_authenticated(handler) - def is_token_authenticated(self, handler: AuthenticatedHandler) -> bool: + def is_token_authenticated(self, handler: web.RequestHandler) -> bool: """Returns True if handler has been token authenticated. Otherwise, False. Login with a token is used to signal certain things, such as: @@ -499,13 +496,13 @@ def is_token_authenticated(self, handler: AuthenticatedHandler) -> bool: - skip origin-checks for scripts """ # ensure get_user has been called, so we know if we're token-authenticated - handler.current_user # noqa + handler.current_user # noqa: B018 return getattr(handler, "_token_authenticated", False) def validate_security( self, - app: ServerApp, - ssl_options: dict | None = None, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, ) -> None: """Check the application's security. @@ -526,7 +523,7 @@ def validate_security( " Anyone who can connect to this server will be able to run code." ) - def process_login_form(self, handler: JupyterHandler) -> User | None: + def process_login_form(self, handler: web.RequestHandler) -> User | None: """Process login form data Return authenticated User if successful, None if not. @@ -538,7 +535,7 @@ def process_login_form(self, handler: JupyterHandler) -> User | None: return self.generate_anonymous_user(handler) if self.token and self.token == typed_password: - return self.user_for_token(typed_password) # type:ignore[attr-defined] + return t.cast(User, self.user_for_token(typed_password)) # type:ignore[attr-defined] return user @@ -633,7 +630,7 @@ def passwd_check(self, password): """Check password against our stored hashed password""" return passwd_check(self.hashed_password, password) - def process_login_form(self, handler: JupyterHandler) -> User | None: + def process_login_form(self, handler: web.RequestHandler) -> User | None: """Process login form data Return authenticated User if successful, None if not. @@ -659,8 +656,8 @@ def process_login_form(self, handler: JupyterHandler) -> User | None: def validate_security( self, - app: ServerApp, - ssl_options: dict | None = None, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, ) -> None: """Handle security validation.""" super().validate_security(app, ssl_options) @@ -700,7 +697,7 @@ def _default_login_handler_class(self): def auth_enabled(self): return self.login_available - def get_user(self, handler: JupyterHandler) -> User | None: + def get_user(self, handler: web.RequestHandler) -> User | None: """Get the user.""" user = self.login_handler_class.get_user(handler) # type:ignore[attr-defined] if user is None: @@ -708,23 +705,25 @@ def get_user(self, handler: JupyterHandler) -> User | None: return _backward_compat_user(user) @property - def login_available(self): - return self.login_handler_class.get_login_available( # type:ignore[attr-defined] - self.settings + def login_available(self) -> bool: + return bool( + self.login_handler_class.get_login_available( # type:ignore[attr-defined] + self.settings + ) ) - def should_check_origin(self, handler: AuthenticatedHandler) -> bool: + def should_check_origin(self, handler: web.RequestHandler) -> bool: """Whether we should check origin.""" - return self.login_handler_class.should_check_origin(handler) # type:ignore[attr-defined] + return bool(self.login_handler_class.should_check_origin(handler)) # type:ignore[attr-defined] - def is_token_authenticated(self, handler: AuthenticatedHandler) -> bool: + def is_token_authenticated(self, handler: web.RequestHandler) -> bool: """Whether we are token authenticated.""" - return self.login_handler_class.is_token_authenticated(handler) # type:ignore[attr-defined] + return bool(self.login_handler_class.is_token_authenticated(handler)) # type:ignore[attr-defined] def validate_security( self, - app: ServerApp, - ssl_options: dict | None = None, + app: t.Any, + ssl_options: dict[str, t.Any] | None = None, ) -> None: """Validate security.""" if self.password_required and (not self.hashed_password): @@ -734,6 +733,6 @@ def validate_security( self.log.critical(_i18n("Hint: run the following command to set a password")) self.log.critical(_i18n("\t$ python -m jupyter_server.auth password")) sys.exit(1) - return self.login_handler_class.validate_security( # type:ignore[attr-defined] + self.login_handler_class.validate_security( # type:ignore[attr-defined] app, ssl_options ) diff --git a/jupyter_server/auth/login.py b/jupyter_server/auth/login.py index b9eda58e08..22832df341 100644 --- a/jupyter_server/auth/login.py +++ b/jupyter_server/auth/login.py @@ -179,7 +179,7 @@ def is_token_authenticated(cls, handler): """DEPRECATED in 2.0, use IdentityProvider API""" if getattr(handler, "_user_id", None) is None: # ensure get_user has been called, so we know if we're token-authenticated - handler.current_user # noqa + handler.current_user # noqa: B018 return getattr(handler, "_token_authenticated", False) @classmethod @@ -233,7 +233,7 @@ def get_user_token(cls, handler): """DEPRECATED in 2.0, use IdentityProvider API""" token = handler.token if not token: - return + return None # check login token from URL argument or Authorization header user_token = cls.get_token(handler) authenticated = False diff --git a/jupyter_server/auth/security.py b/jupyter_server/auth/security.py index 1a7107eb11..a5ae185f1e 100644 --- a/jupyter_server/auth/security.py +++ b/jupyter_server/auth/security.py @@ -52,8 +52,7 @@ def passwd(passphrase=None, algorithm="argon2"): if p0 == p1: passphrase = p0 break - else: - warnings.warn("Passwords do not match.", stacklevel=2) + warnings.warn("Passwords do not match.", stacklevel=2) else: msg = "No matching passwords found. Giving up." raise ValueError(msg) diff --git a/jupyter_server/auth/utils.py b/jupyter_server/auth/utils.py index 3c73f6f794..b0f790be1f 100644 --- a/jupyter_server/auth/utils.py +++ b/jupyter_server/auth/utils.py @@ -166,4 +166,4 @@ def get_anonymous_username() -> str: Get a random user-name based on the moons of Jupyter. This function returns names like "Anonymous Io" or "Anonymous Metis". """ - return moons_of_jupyter[random.randint(0, len(moons_of_jupyter) - 1)] # noqa + return moons_of_jupyter[random.randint(0, len(moons_of_jupyter) - 1)] diff --git a/jupyter_server/base/handlers.py b/jupyter_server/base/handlers.py index c4d080cf18..b1b783cca9 100644 --- a/jupyter_server/base/handlers.py +++ b/jupyter_server/base/handlers.py @@ -10,7 +10,6 @@ import mimetypes import os import re -import traceback import types import warnings from http.client import responses @@ -64,7 +63,7 @@ def json_sys_info(): """Get sys info as json.""" - global _sys_info_cache # noqa + global _sys_info_cache # noqa: PLW0603 if _sys_info_cache is None: _sys_info_cache = json.dumps(get_sys_info()) return _sys_info_cache @@ -73,7 +72,7 @@ def json_sys_info(): def log() -> Logger: """Get the application log.""" if Application.initialized(): - return Application.instance().log + return cast(Logger, Application.instance().log) else: return app_log @@ -83,7 +82,7 @@ class AuthenticatedHandler(web.RequestHandler): @property def base_url(self) -> str: - return self.settings.get("base_url", "/") + return cast(str, self.settings.get("base_url", "/")) @property def content_security_policy(self) -> str: @@ -93,7 +92,7 @@ def content_security_policy(self) -> str: """ if "Content-Security-Policy" in self.settings.get("headers", {}): # user-specified, don't override - return self.settings["headers"]["Content-Security-Policy"] + return cast(str, self.settings["headers"]["Content-Security-Policy"]) return "; ".join( [ @@ -171,7 +170,7 @@ def get_current_user(self) -> str: DeprecationWarning, stacklevel=2, ) - return self._jupyter_current_user + return cast(str, self._jupyter_current_user) # haven't called get_user in prepare, raise raise RuntimeError(msg) @@ -195,7 +194,7 @@ def token_authenticated(self) -> bool: def logged_in(self) -> bool: """Is a user currently logged in?""" user = self.current_user - return user and user != "anonymous" + return bool(user and user != "anonymous") @property def login_handler(self) -> Any: @@ -222,7 +221,7 @@ def login_available(self) -> bool: whether the user is already logged in or not. """ - return self.identity_provider.login_available + return cast(bool, self.identity_provider.login_available) @property def authorizer(self) -> Authorizer: @@ -266,7 +265,7 @@ def identity_provider(self) -> IdentityProvider: self.settings["identity_provider"] = IdentityProvider( config=self.settings.get("config", None) ) - return self.settings["identity_provider"] + return cast("IdentityProvider", self.settings["identity_provider"]) class JupyterHandler(AuthenticatedHandler): @@ -277,7 +276,7 @@ class JupyterHandler(AuthenticatedHandler): @property def config(self) -> dict[str, Any] | None: - return self.settings.get("config", None) + return cast("dict[str, Any] | None", self.settings.get("config", None)) @property def log(self) -> Logger: @@ -287,11 +286,11 @@ def log(self) -> Logger: @property def jinja_template_vars(self) -> dict[str, Any]: """User-supplied values to supply to jinja templates.""" - return self.settings.get("jinja_template_vars", {}) + return cast("dict[str, Any]", self.settings.get("jinja_template_vars", {})) @property def serverapp(self) -> ServerApp | None: - return self.settings["serverapp"] + return cast("ServerApp | None", self.settings["serverapp"]) # --------------------------------------------------------------- # URLs @@ -300,26 +299,26 @@ def serverapp(self) -> ServerApp | None: @property def version_hash(self) -> str: """The version hash to use for cache hints for static files""" - return self.settings.get("version_hash", "") + return cast(str, self.settings.get("version_hash", "")) @property def mathjax_url(self) -> str: - url = self.settings.get("mathjax_url", "") + url = cast(str, self.settings.get("mathjax_url", "")) if not url or url_is_absolute(url): return url return url_path_join(self.base_url, url) @property def mathjax_config(self) -> str: - return self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe") + return cast(str, self.settings.get("mathjax_config", "TeX-AMS-MML_HTMLorMML-full,Safe")) @property def default_url(self) -> str: - return self.settings.get("default_url", "") + return cast(str, self.settings.get("default_url", "")) @property def ws_url(self) -> str: - return self.settings.get("websocket_url", "") + return cast(str, self.settings.get("websocket_url", "")) @property def contents_js_source(self) -> str: @@ -327,7 +326,7 @@ def contents_js_source(self) -> str: "Using contents: %s", self.settings.get("contents_js_source", "services/contents"), ) - return self.settings.get("contents_js_source", "services/contents") + return cast(str, self.settings.get("contents_js_source", "services/contents")) # --------------------------------------------------------------- # Manager objects @@ -335,31 +334,31 @@ def contents_js_source(self) -> str: @property def kernel_manager(self) -> AsyncMappingKernelManager: - return self.settings["kernel_manager"] + return cast("AsyncMappingKernelManager", self.settings["kernel_manager"]) @property def contents_manager(self) -> ContentsManager: - return self.settings["contents_manager"] + return cast("ContentsManager", self.settings["contents_manager"]) @property def session_manager(self) -> SessionManager: - return self.settings["session_manager"] + return cast("SessionManager", self.settings["session_manager"]) @property def terminal_manager(self) -> TerminalManager: - return self.settings["terminal_manager"] + return cast("TerminalManager", self.settings["terminal_manager"]) @property def kernel_spec_manager(self) -> KernelSpecManager: - return self.settings["kernel_spec_manager"] + return cast("KernelSpecManager", self.settings["kernel_spec_manager"]) @property def config_manager(self) -> ConfigManager: - return self.settings["config_manager"] + return cast("ConfigManager", self.settings["config_manager"]) @property def event_logger(self) -> EventLogger: - return self.settings["event_logger"] + return cast("EventLogger", self.settings["event_logger"]) # --------------------------------------------------------------- # CORS @@ -368,17 +367,17 @@ def event_logger(self) -> EventLogger: @property def allow_origin(self) -> str: """Normal Access-Control-Allow-Origin""" - return self.settings.get("allow_origin", "") + return cast(str, self.settings.get("allow_origin", "")) @property - def allow_origin_pat(self) -> str: + def allow_origin_pat(self) -> str | None: """Regular expression version of allow_origin""" - return self.settings.get("allow_origin_pat", None) + return cast("str | None", self.settings.get("allow_origin_pat", None)) @property def allow_credentials(self) -> bool: """Whether to set Access-Control-Allow-Credentials""" - return self.settings.get("allow_credentials", False) + return cast(bool, self.settings.get("allow_credentials", False)) def set_default_headers(self) -> None: """Add CORS headers, if defined""" @@ -462,7 +461,7 @@ def check_origin(self, origin_to_satisfy_tornado: str = "") -> bool: # Check CORS headers if self.allow_origin: - allow = self.allow_origin == origin + allow = bool(self.allow_origin == origin) elif self.allow_origin_pat: allow = bool(re.match(self.allow_origin_pat, origin)) else: @@ -527,11 +526,11 @@ def check_xsrf_cookie(self) -> None: """Bypass xsrf cookie checks when token-authenticated""" if not hasattr(self, "_jupyter_current_user"): # Called too early, will be checked later - return + return None if self.token_authenticated or self.settings.get("disable_check_xsrf", False): # Token-authenticated requests do not need additional XSRF-check # Servers without authentication are vulnerable to XSRF - return + return None try: return super().check_xsrf_cookie() except web.HTTPError as e: @@ -608,11 +607,11 @@ async def prepare(self) -> Awaitable[None] | None: # type:ignore[override] # check for overridden get_current_user + default IdentityProvider # deprecated way to override auth (e.g. JupyterHub < 3.0) # allow deprecated, overridden get_current_user - warnings.warn( # noqa + warnings.warn( "Overriding JupyterHandler.get_current_user is deprecated in jupyter-server 2.0." " Use an IdentityProvider class.", DeprecationWarning, - # stacklevel not useful here + stacklevel=1, ) user = User(self.get_current_user()) else: @@ -682,7 +681,7 @@ def get_json_body(self) -> dict[str, Any] | None: self.log.debug("Bad JSON: %r", body) self.log.error("Couldn't parse JSON", exc_info=True) raise web.HTTPError(400, "Invalid JSON in body of request") from e - return model + return cast("dict[str, Any]", model) def write_error(self, status_code: int, **kwargs: Any) -> None: """render custom error pages""" @@ -695,7 +694,7 @@ def write_error(self, status_code: int, **kwargs: Any) -> None: # get the custom message, if defined try: message = exception.log_message % exception.args - except Exception: # noqa + except Exception: pass # construct the custom reason, if defined @@ -736,7 +735,7 @@ def write_error(self, status_code: int, **kwargs: Any) -> None: """APIHandler errors are JSON, not human pages""" self.set_header("Content-Type", "application/json") message = responses.get(status_code, "Unknown HTTP Error") - reply: dict = { + reply: dict[str, Any] = { "message": message, } exc_info = kwargs.get("exc_info") @@ -748,7 +747,9 @@ def write_error(self, status_code: int, **kwargs: Any) -> None: else: reply["message"] = "Unhandled error" reply["reason"] = None - reply["traceback"] = "".join(traceback.format_exception(*exc_info)) + # backward-compatibility: traceback field is present, + # but always empty + reply["traceback"] = "" self.log.warning("wrote error: %r", reply["message"], exc_info=True) self.finish(json.dumps(reply)) @@ -962,7 +963,7 @@ class FileFindHandler(JupyterHandler, web.StaticFileHandler): """ # cache search results, don't search for files more than once - _static_paths: dict[str, Any] = {} + _static_paths: dict[str, str] = {} root: tuple[str] # type:ignore[assignment] def set_headers(self) -> None: @@ -1102,8 +1103,8 @@ async def redirect_to_files(self: Any, path: str) -> None: self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url) - def get(self, path: str = "") -> Awaitable: - return self.redirect_to_files(self, path) + async def get(self, path: str = "") -> None: + return await self.redirect_to_files(self, path) class RedirectWithParams(web.RequestHandler): diff --git a/jupyter_server/base/websocket.py b/jupyter_server/base/websocket.py index 82082299e6..a27b7a72a7 100644 --- a/jupyter_server/base/websocket.py +++ b/jupyter_server/base/websocket.py @@ -81,7 +81,6 @@ def check_origin(self, origin: Optional[str] = None) -> bool: def clear_cookie(self, *args, **kwargs): """meaningless for websockets""" - pass @no_type_check def open(self, *args, **kwargs): diff --git a/jupyter_server/config_manager.py b/jupyter_server/config_manager.py index 76268d8a23..87480d7609 100644 --- a/jupyter_server/config_manager.py +++ b/jupyter_server/config_manager.py @@ -76,7 +76,7 @@ def directory(self, section_name: str) -> str: """Returns the directory name for the section name: {config_dir}/{section_name}.d""" return os.path.join(self.config_dir, section_name + ".d") - def get(self, section_name: str, include_root: bool = True) -> t.Any: + def get(self, section_name: str, include_root: bool = True) -> dict[str, t.Any]: """Retrieve the config data for the specified section. Returns the data as a dictionary, or an empty dictionary if the file @@ -99,7 +99,7 @@ def get(self, section_name: str, include_root: bool = True) -> t.Any: section_name, "\n\t".join(paths), ) - data: dict = {} + data: dict[str, t.Any] = {} for path in paths: if os.path.isfile(path): with open(path, encoding="utf-8") as f: @@ -123,7 +123,7 @@ def set(self, section_name: str, data: t.Any) -> None: with open(filename, "w", encoding="utf-8") as f: f.write(json_content) - def update(self, section_name: str, new_data: t.Any) -> None: + def update(self, section_name: str, new_data: t.Any) -> dict[str, t.Any]: """Modify the config section by recursively updating it with new_data. Returns the modified config data as a dictionary. diff --git a/jupyter_server/extension/application.py b/jupyter_server/extension/application.py index f0e47f9dd7..aeeab5a94d 100644 --- a/jupyter_server/extension/application.py +++ b/jupyter_server/extension/application.py @@ -28,7 +28,7 @@ def _preparse_for_subcommand(application_klass, argv): """Preparse command line to look for subcommands.""" # Read in arguments from command line. if len(argv) == 0: - return + return None # Find any subcommands. if application_klass.subcommands and len(argv) > 0: @@ -144,7 +144,7 @@ class method. This method can be set as a entry_point in # A useful class property that subclasses can override to # configure the underlying Jupyter Server when this extension # is launched directly (using its `launch_instance` method). - serverapp_config: dict = {} + serverapp_config: dict[str, t.Any] = {} # Some subclasses will likely override this trait to flip # the default value to False if they don't offer a browser @@ -174,7 +174,7 @@ def config_file_paths(self): # file, jupyter_{name}_config. # This should also match the jupyter subcommand used to launch # this extension from the CLI, e.g. `jupyter {name}`. - name: str | Unicode = "ExtensionApp" # type:ignore[assignment] + name: str | Unicode[str, str] = "ExtensionApp" # type:ignore[assignment] @classmethod def get_extension_package(cls): @@ -218,7 +218,7 @@ def _default_serverapp(self): if ServerApp.initialized(): try: return ServerApp.instance() - except Exception: # noqa + except Exception: # error retrieving instance, e.g. MultipleInstanceError pass @@ -271,7 +271,7 @@ def _default_static_url_prefix(self): handlers: List[tuple[t.Any, ...]] = List( help=_i18n("""Handlers appended to the server.""") - ).tag(config=True) # type:ignore[assignment] + ).tag(config=True) def _config_file_name_default(self): """The default config file name.""" @@ -281,15 +281,12 @@ def _config_file_name_default(self): def initialize_settings(self): """Override this method to add handling of settings.""" - pass def initialize_handlers(self): """Override this method to append handlers to a Jupyter Server.""" - pass def initialize_templates(self): """Override this method to add handling of template files.""" - pass def _prepare_config(self): """Builds a Config object from the extension's traits and passes @@ -336,7 +333,7 @@ def _prepare_handlers(self): handler = handler_items[1] # Get handler kwargs, if given - kwargs: dict = {} + kwargs: dict[str, t.Any] = {} if issubclass(handler, ExtensionHandlerMixin): kwargs["name"] = self.name @@ -599,7 +596,7 @@ def launch_instance(cls, argv=None, **kwargs): extension's landing page. """ # Handle arguments. - if argv is None: # noqa + if argv is None: # noqa: SIM108 args = sys.argv[1:] # slice out extension config. else: args = argv diff --git a/jupyter_server/extension/handler.py b/jupyter_server/extension/handler.py index 3018aae1c2..55f5aff2c3 100644 --- a/jupyter_server/extension/handler.py +++ b/jupyter_server/extension/handler.py @@ -1,15 +1,14 @@ """An extension handler.""" from __future__ import annotations -from typing import TYPE_CHECKING, Any +from logging import Logger +from typing import TYPE_CHECKING, Any, cast from jinja2.exceptions import TemplateNotFound from jupyter_server.base.handlers import FileFindHandler if TYPE_CHECKING: - from logging import Logger - from traitlets.config import Config from jupyter_server.extension.application import ExtensionApp @@ -25,9 +24,9 @@ def get_template(self, name: str) -> str: """Return the jinja template object for a given name""" try: env = f"{self.name}_jinja2_env" # type:ignore[attr-defined] - return self.settings[env].get_template(name) # type:ignore[attr-defined] + return cast(str, self.settings[env].get_template(name)) # type:ignore[attr-defined] except TemplateNotFound: - return super().get_template(name) # type:ignore[misc] + return cast(str, super().get_template(name)) # type:ignore[misc] class ExtensionHandlerMixin: @@ -41,6 +40,8 @@ class ExtensionHandlerMixin: other extensions. """ + settings: dict[str, Any] + def initialize(self, name: str, *args: Any, **kwargs: Any) -> None: self.name = name try: @@ -50,34 +51,34 @@ def initialize(self, name: str, *args: Any, **kwargs: Any) -> None: @property def extensionapp(self) -> ExtensionApp: - return self.settings[self.name] # type:ignore[attr-defined] + return cast("ExtensionApp", self.settings[self.name]) @property def serverapp(self) -> ServerApp: key = "serverapp" - return self.settings[key] # type:ignore[attr-defined] + return cast("ServerApp", self.settings[key]) @property def log(self) -> Logger: if not hasattr(self, "name"): - return super().log # type:ignore[misc] + return cast(Logger, super().log) # type:ignore[misc] # Attempt to pull the ExtensionApp's log, otherwise fall back to ServerApp. try: - return self.extensionapp.log + return cast(Logger, self.extensionapp.log) except AttributeError: - return self.serverapp.log + return cast(Logger, self.serverapp.log) @property def config(self) -> Config: - return self.settings[f"{self.name}_config"] # type:ignore[attr-defined] + return cast("Config", self.settings[f"{self.name}_config"]) @property def server_config(self) -> Config: - return self.settings["config"] # type:ignore[attr-defined] + return cast("Config", self.settings["config"]) @property def base_url(self) -> str: - return self.settings.get("base_url", "/") # type:ignore[attr-defined] + return cast(str, self.settings.get("base_url", "/")) @property def static_url_prefix(self) -> str: @@ -85,7 +86,7 @@ def static_url_prefix(self) -> str: @property def static_path(self) -> str: - return self.settings[f"{self.name}_static_paths"] # type:ignore[attr-defined] + return cast(str, self.settings[f"{self.name}_static_paths"]) def static_url(self, path: str, include_host: bool | None = None, **kwargs: Any) -> str: """Returns a static URL for the given relative static file path. @@ -108,7 +109,7 @@ def static_url(self, path: str, include_host: bool | None = None, **kwargs: Any) try: self.require_setting(key, "static_url") # type:ignore[attr-defined] except Exception as e: - if key in self.settings: # type:ignore[attr-defined] + if key in self.settings: msg = ( "This extension doesn't have any static paths listed. Check that the " "extension's `static_paths` trait is set." @@ -117,17 +118,14 @@ def static_url(self, path: str, include_host: bool | None = None, **kwargs: Any) else: raise e - get_url = self.settings.get( # type:ignore[attr-defined] - "static_handler_class", FileFindHandler - ).make_static_url + get_url = self.settings.get("static_handler_class", FileFindHandler).make_static_url if include_host is None: include_host = getattr(self, "include_host", False) - if include_host: # noqa + base = "" + if include_host: base = self.request.protocol + "://" + self.request.host # type:ignore[attr-defined] - else: - base = "" # Hijack settings dict to send extension templates to extension # static directory. @@ -136,4 +134,4 @@ def static_url(self, path: str, include_host: bool | None = None, **kwargs: Any) "static_url_prefix": self.static_url_prefix, } - return base + get_url(settings, path, **kwargs) + return base + cast(str, get_url(settings, path, **kwargs)) diff --git a/jupyter_server/extension/manager.py b/jupyter_server/extension/manager.py index 156eba825a..3509e2e9f6 100644 --- a/jupyter_server/extension/manager.py +++ b/jupyter_server/extension/manager.py @@ -2,6 +2,7 @@ from __future__ import annotations import importlib +from itertools import starmap from tornado.gen import multi from traitlets import Any, Bool, Dict, HasTraits, Instance, List, Unicode, default, observe @@ -392,12 +393,7 @@ def load_all_extensions(self): async def stop_all_extensions(self): """Call the shutdown hooks in all extensions.""" - await multi( - [ - self.stop_extension(name, apps) - for name, apps in sorted(dict(self.extension_apps).items()) - ] - ) + await multi(list(starmap(self.stop_extension, sorted(dict(self.extension_apps).items())))) def any_activity(self): """Check for any activity currently happening across all extension applications.""" diff --git a/jupyter_server/extension/serverextension.py b/jupyter_server/extension/serverextension.py index 2d4359bd06..19f3a30709 100644 --- a/jupyter_server/extension/serverextension.py +++ b/jupyter_server/extension/serverextension.py @@ -381,7 +381,7 @@ class ServerExtensionApp(BaseExtensionApp): description: str = "Work with Jupyter server extensions" examples = _examples - subcommands: dict = { + subcommands: dict[str, t.Any] = { "enable": (EnableServerExtensionApp, "Enable a server extension"), "disable": (DisableServerExtensionApp, "Disable a server extension"), "list": (ListServerExtensionsApp, "List server extensions"), diff --git a/jupyter_server/extension/utils.py b/jupyter_server/extension/utils.py index 69af455b7c..1ba44ee0d2 100644 --- a/jupyter_server/extension/utils.py +++ b/jupyter_server/extension/utils.py @@ -7,26 +7,18 @@ class ExtensionLoadingError(Exception): """An extension loading error.""" - pass - class ExtensionMetadataError(Exception): """An extension metadata error.""" - pass - class ExtensionModuleNotFound(Exception): """An extension module not found error.""" - pass - class NotAnExtensionApp(Exception): """An error raised when a module is not an extension.""" - pass - def get_loader(obj, logger=None): """Looks for _load_jupyter_server_extension as an attribute @@ -36,12 +28,12 @@ def get_loader(obj, logger=None): underscore prefix. """ try: - return getattr(obj, "_load_jupyter_server_extension") # noqa B009 + return obj._load_jupyter_server_extension except AttributeError: pass try: - func = getattr(obj, "load_jupyter_server_extension") # noqa B009 + func = obj.load_jupyter_server_extension except AttributeError: msg = "_load_jupyter_server_extension function was not found." raise ExtensionLoadingError(msg) from None @@ -76,7 +68,8 @@ def get_metadata(package_name, logger=None): # each module took to import. This makes it much easier for users to report # slow loading modules upstream, as slow loading modules will block server startup if logger: - logger.info(f"Package {package_name} took {duration:.4f}s to import") + log = logger.info if duration > 0.1 else logger.debug + log(f"Extension package {package_name} took {duration:.4f}s to import") try: return module, module._jupyter_server_extension_points() diff --git a/jupyter_server/gateway/connections.py b/jupyter_server/gateway/connections.py index 401fe86a21..028a0f8f4e 100644 --- a/jupyter_server/gateway/connections.py +++ b/jupyter_server/gateway/connections.py @@ -1,6 +1,7 @@ """Gateway connection classes.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations import asyncio import logging @@ -41,11 +42,11 @@ async def connect(self): "channels", ) self.log.info(f"Connecting to {ws_url}") - kwargs: dict = {} + kwargs: dict[str, Any] = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) request = HTTPRequest(ws_url, **kwargs) - self.ws_future = cast(Future, tornado_websocket.websocket_connect(request)) + self.ws_future = cast("Future[Any]", tornado_websocket.websocket_connect(request)) self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() @@ -103,7 +104,7 @@ async def _read_messages(self): # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: - jitter = random.randint(10, 100) * 0.01 # noqa + jitter = random.randint(10, 100) * 0.01 retry_interval = ( min( GatewayClient.instance().gateway_retry_interval * (2**self.retry), diff --git a/jupyter_server/gateway/gateway_client.py b/jupyter_server/gateway/gateway_client.py index 395906177c..437d54d227 100644 --- a/jupyter_server/gateway/gateway_client.py +++ b/jupyter_server/gateway/gateway_client.py @@ -1,6 +1,8 @@ """A kernel gateway client.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import asyncio import json import logging @@ -44,8 +46,6 @@ class GatewayTokenRenewerMeta(ABCMeta, type(LoggingConfigurable)): # type: ignore[misc] """The metaclass necessary for proper ABC behavior in a Configurable.""" - pass - class GatewayTokenRenewerBase( # type:ignore[misc] ABC, LoggingConfigurable, metaclass=GatewayTokenRenewerMeta @@ -69,7 +69,6 @@ def get_token( Given the current authorization header key, scheme, and token, this method returns a (potentially renewed) token for use against the Gateway server. """ - pass class NoOpTokenRenewer(GatewayTokenRenewerBase): # type:ignore[misc] @@ -538,7 +537,7 @@ def gateway_enabled(self): # Ensure KERNEL_LAUNCH_TIMEOUT has a default value. KERNEL_LAUNCH_TIMEOUT = int(os.environ.get("KERNEL_LAUNCH_TIMEOUT", 40)) - _connection_args: dict # initialized on first use + _connection_args: dict[str, ty.Any] # initialized on first use gateway_token_renewer: GatewayTokenRenewerBase @@ -549,7 +548,7 @@ def __init__(self, **kwargs): self.gateway_token_renewer = self.gateway_token_renewer_class(parent=self, log=self.log) # type:ignore[abstract] # store of cookies with store time - self._cookies: ty.Dict[str, ty.Tuple[Morsel, datetime]] = {} + self._cookies: dict[str, tuple[Morsel[ty.Any], datetime]] = {} def init_connection_args(self): """Initialize arguments used on every request. Since these are primarily static values, @@ -661,7 +660,7 @@ def _clear_expired_cookies(self) -> None: for key in expired_keys: self._cookies.pop(key) - def _update_cookie_header(self, connection_args: dict) -> None: + def _update_cookie_header(self, connection_args: dict[str, ty.Any]) -> None: """Update a cookie header.""" self._clear_expired_cookies() @@ -698,9 +697,9 @@ class RetryableHTTPClient: MAX_RETRIES_CAP = 10 # The upper limit to max_retries value. max_retries: int = int(os.getenv("JUPYTER_GATEWAY_MAX_REQUEST_RETRIES", MAX_RETRIES_DEFAULT)) max_retries = max(0, min(max_retries, MAX_RETRIES_CAP)) # Enforce boundaries - retried_methods: ty.Set[str] = {"GET", "DELETE"} - retried_errors: ty.Set[int] = {502, 503, 504, 599} - retried_exceptions: ty.Set[type] = {ConnectionError} + retried_methods: set[str] = {"GET", "DELETE"} + retried_errors: set[int] = {502, 503, 504, 599} + retried_exceptions: set[type] = {ConnectionError} backoff_factor: float = 0.1 def __init__(self): diff --git a/jupyter_server/gateway/handlers.py b/jupyter_server/gateway/handlers.py index 952253ad8e..dcde4cd5ca 100644 --- a/jupyter_server/gateway/handlers.py +++ b/jupyter_server/gateway/handlers.py @@ -1,13 +1,15 @@ """Gateway API handlers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import asyncio import logging import mimetypes import os import random import warnings -from typing import Optional, cast +from typing import Any, Optional, cast from jupyter_client.session import Session from tornado import web @@ -47,7 +49,6 @@ def check_origin(self, origin=None): def set_default_headers(self): """Undo the set_default_headers in JupyterHandler which doesn't make sense for websockets""" - pass def get_compression_options(self): """Get the compression options for the socket.""" @@ -159,7 +160,7 @@ def __init__(self, **kwargs): super().__init__() self.kernel_id = None self.ws = None - self.ws_future: Future = Future() + self.ws_future: Future[Any] = Future() self.disconnected = False self.retry = 0 @@ -178,11 +179,11 @@ async def _connect(self, kernel_id, message_callback): "channels", ) self.log.info(f"Connecting to {ws_url}") - kwargs: dict = {} + kwargs: dict[str, Any] = {} kwargs = client.load_connection_args(**kwargs) request = HTTPRequest(ws_url, **kwargs) - self.ws_future = cast(Future, websocket_connect(request)) + self.ws_future = cast("Future[Any]", websocket_connect(request)) self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() @@ -238,7 +239,7 @@ async def _read_messages(self, callback): # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: - jitter = random.randint(10, 100) * 0.01 # noqa + jitter = random.randint(10, 100) * 0.01 retry_interval = ( min( GatewayClient.instance().gateway_retry_interval * (2**self.retry), diff --git a/jupyter_server/gateway/managers.py b/jupyter_server/gateway/managers.py index c77e0edb14..cd0b27b50d 100644 --- a/jupyter_server/gateway/managers.py +++ b/jupyter_server/gateway/managers.py @@ -1,6 +1,8 @@ """Kernel gateway managers.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import asyncio import datetime import json @@ -9,7 +11,7 @@ from queue import Empty, Queue from threading import Thread from time import monotonic -from typing import Any, Dict, Optional +from typing import Any, Optional, cast import websocket from jupyter_client.asynchronous.client import AsyncKernelClient @@ -36,7 +38,7 @@ class GatewayMappingKernelManager(AsyncMappingKernelManager): """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" # We'll maintain our own set of kernel ids - _kernels: Dict[str, "GatewayKernelManager"] = {} # type:ignore[assignment] + _kernels: dict[str, GatewayKernelManager] = {} # type:ignore[assignment] @default("kernel_manager_class") def _default_kernel_manager_class(self): @@ -311,7 +313,7 @@ async def get_kernel_spec(self, kernel_name, **kwargs): try: response = await gateway_request(kernel_spec_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: # noqa: PLR2004 + if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects # message is not used, but might as well make it useful for troubleshooting msg = f"kernelspec {kernel_name} not found on Gateway server at: {GatewayClient.instance().url}" @@ -340,7 +342,7 @@ async def get_kernel_spec_resource(self, kernel_name, path): try: response = await gateway_request(kernel_spec_resource_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: # noqa: PLR2004 + if error.status_code == 404: kernel_spec_resource = None else: raise @@ -366,7 +368,7 @@ async def kernel_culled(self, kernel_id: str) -> bool: # typing: ignore # Note that should the redundant polling be consolidated, or replaced with an event-based # notification model, this will need to be revisited. km = self.kernel_manager.get_kernel(kernel_id) - except Exception: # noqa + except Exception: # Let exceptions here reflect culled kernel pass return km is None @@ -408,7 +410,7 @@ def has_kernel(self): def client(self, **kwargs): """Create a client configured to connect to our kernel""" - kw: dict = {} + kw: dict[str, Any] = {} kw.update(self.get_connection_info(session=True)) kw.update( { @@ -437,7 +439,7 @@ async def refresh_model(self, model=None): response = await gateway_request(self.kernel_url, method="GET") except web.HTTPError as error: - if error.status_code == 404: # noqa: PLR2004 + if error.status_code == 404: self.log.warning("Kernel not found at: %s" % self.kernel_url) model = None else: @@ -531,7 +533,7 @@ async def shutdown_kernel(self, now=False, restart=False): response = await gateway_request(self.kernel_url, method="DELETE") self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) except web.HTTPError as error: - if error.status_code == 404: # noqa: PLR2004 + if error.status_code == 404: self.log.debug("Shutdown kernel response: kernel not found (ignored)") else: raise @@ -583,13 +585,12 @@ async def is_alive(self): def cleanup_resources(self, restart=False): """Clean up resources when the kernel is shut down""" - pass KernelManagerABC.register(GatewayKernelManager) -class ChannelQueue(Queue): +class ChannelQueue(Queue): # type:ignore[type-arg] """A queue for a named channel.""" channel_name: Optional[str] = None @@ -623,7 +624,7 @@ async def _async_get(self, timeout=None): raise await asyncio.sleep(0) - async def get_msg(self, *args: Any, **kwargs: Any) -> dict: + async def get_msg(self, *args: Any, **kwargs: Any) -> dict[str, Any]: """Get a message from the queue.""" timeout = kwargs.get("timeout", 1) msg = await self._async_get(timeout=timeout) @@ -633,9 +634,9 @@ async def get_msg(self, *args: Any, **kwargs: Any) -> dict: ) ) self.task_done() - return msg + return cast("dict[str, Any]", msg) - def send(self, msg: dict) -> None: + def send(self, msg: dict[str, Any]) -> None: """Send a message to the queue.""" message = json.dumps(msg, default=ChannelQueue.serialize_datetime).replace(" None: """Start the queue.""" - pass def stop(self) -> None: """Stop the queue.""" @@ -710,7 +710,7 @@ class GatewayKernelClient(AsyncKernelClient): # flag for whether execute requests should be allowed to call raw_input: allow_stdin = False _channels_stopped: bool - _channel_queues: Optional[Dict[str, ChannelQueue]] + _channel_queues: Optional[dict[str, ChannelQueue]] _control_channel: Optional[ChannelQueue] # type:ignore[assignment] _hb_channel: Optional[ChannelQueue] # type:ignore[assignment] _stdin_channel: Optional[ChannelQueue] # type:ignore[assignment] diff --git a/jupyter_server/i18n/__init__.py b/jupyter_server/i18n/__init__.py index 2c7bb6dbea..896f41c57c 100644 --- a/jupyter_server/i18n/__init__.py +++ b/jupyter_server/i18n/__init__.py @@ -1,11 +1,14 @@ """Server functions for loading translations """ +from __future__ import annotations + import errno import json import re from collections import defaultdict from os.path import dirname from os.path import join as pjoin +from typing import Any I18N_DIR = dirname(__file__) # Cache structure: @@ -15,7 +18,7 @@ # ... # } # }} -TRANSLATIONS_CACHE: dict = {"nbjs": {}} +TRANSLATIONS_CACHE: dict[str, Any] = {"nbjs": {}} _accept_lang_re = re.compile( @@ -56,7 +59,7 @@ def parse_accept_lang_header(accept_lang): def load(language, domain="nbjs"): """Load translations from an nbjs.json file""" try: - f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") # noqa + f = open(pjoin(I18N_DIR, language, "LC_MESSAGES", "nbjs.json"), encoding="utf-8") # noqa: SIM115 except OSError as e: if e.errno != errno.ENOENT: raise @@ -84,7 +87,7 @@ def combine_translations(accept_language, domain="nbjs"): Returns data re-packaged in jed1.x format. """ lang_codes = parse_accept_lang_header(accept_language) - combined: dict = {} + combined: dict[str, Any] = {} for language in lang_codes: if language == "en": # en is default, all translations are in frontend. diff --git a/jupyter_server/kernelspecs/handlers.py b/jupyter_server/kernelspecs/handlers.py index aa44cb086f..c7cb141459 100644 --- a/jupyter_server/kernelspecs/handlers.py +++ b/jupyter_server/kernelspecs/handlers.py @@ -41,7 +41,7 @@ async def get(self, kernel_name, path, include_body=True): mimetype: str = mimetypes.guess_type(path)[0] or "text/plain" self.set_header("Content-Type", mimetype) self.finish(kernel_spec_res) - return + return None else: self.log.warning( "Kernelspec resource '{}' for '{}' not found. Kernel spec manager may" diff --git a/jupyter_server/log.py b/jupyter_server/log.py index 52eadadea8..705eaaf44c 100644 --- a/jupyter_server/log.py +++ b/jupyter_server/log.py @@ -55,12 +55,12 @@ def log_request(handler): except AttributeError: logger = access_log - if status < 300 or status == 304: # noqa: PLR2004 + if status < 300 or status == 304: # Successes (or 304 FOUND) are debug-level log_method = logger.debug - elif status < 400: # noqa: PLR2004 + elif status < 400: log_method = logger.info - elif status < 500: # noqa: PLR2004 + elif status < 500: log_method = logger.warning else: log_method = logger.error @@ -84,11 +84,11 @@ def log_request(handler): ns["username"] = username msg = "{status} {method} {uri} ({username}@{ip}) {request_time:.2f}ms" - if status >= 400: # noqa: PLR2004 + if status >= 400: # log bad referrers ns["referer"] = _scrub_uri(request.headers.get("Referer", "None")) msg = msg + " referer={referer}" - if status >= 500 and status != 502: # noqa: PLR2004 + if status >= 500 and status != 502: # Log a subset of the headers if it caused an error. headers = {} for header in ["Host", "Accept", "Referer", "User-Agent"]: diff --git a/jupyter_server/nbconvert/handlers.py b/jupyter_server/nbconvert/handlers.py index 4a3f68ff2b..b7a39d0c8b 100644 --- a/jupyter_server/nbconvert/handlers.py +++ b/jupyter_server/nbconvert/handlers.py @@ -139,7 +139,7 @@ async def get(self, format, path): raise web.HTTPError(500, "nbconvert failed: %s" % e) from e if respond_zip(self, name, output, resources): - return + return None # Force download if requested if self.get_argument("download", "false").lower() == "true": diff --git a/jupyter_server/pytest_plugin.py b/jupyter_server/pytest_plugin.py index 505c46e823..f77448f866 100644 --- a/jupyter_server/pytest_plugin.py +++ b/jupyter_server/pytest_plugin.py @@ -2,6 +2,7 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json +from pathlib import Path import pytest @@ -17,8 +18,8 @@ } -@pytest.fixture -def jp_kernelspecs(jp_data_dir): +@pytest.fixture() # type:ignore[misc] +def jp_kernelspecs(jp_data_dir: Path) -> None: # noqa: PT004 """Configures some sample kernelspecs in the Jupyter data directory.""" spec_names = ["sample", "sample2", "bad"] for name in spec_names: @@ -41,7 +42,7 @@ def jp_contents_manager(request, tmp_path): return AsyncFileContentsManager(root_dir=str(tmp_path), use_atomic_writing=request.param) -@pytest.fixture +@pytest.fixture() def jp_large_contents_manager(tmp_path): """Returns an AsyncLargeFileManager instance.""" return AsyncLargeFileManager(root_dir=str(tmp_path)) diff --git a/jupyter_server/serverapp.py b/jupyter_server/serverapp.py index 9bfdd87ecb..9e4a57375d 100644 --- a/jupyter_server/serverapp.py +++ b/jupyter_server/serverapp.py @@ -1,6 +1,8 @@ """A tornado based Jupyter server.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import datetime import errno import gettext @@ -201,7 +203,7 @@ def random_ports(port: int, n: int) -> t.Generator[int, None, None]: for i in range(min(5, n)): yield port + i for _ in range(n - 5): - yield max(1, port + random.randint(-2 * n, 2 * n)) # noqa + yield max(1, port + random.randint(-2 * n, 2 * n)) def load_handlers(name: str) -> t.Any: @@ -309,10 +311,10 @@ def init_settings( _template_path = (_template_path,) template_path = [os.path.expanduser(path) for path in _template_path] - jenv_opt: dict = {"autoescape": True} + jenv_opt: dict[str, t.Any] = {"autoescape": True} jenv_opt.update(jinja_env_options if jinja_env_options else {}) - env = Environment( # noqa: S701 + env = Environment( loader=FileSystemLoader(template_path), extensions=["jinja2.ext.i18n"], **jenv_opt ) sys_info = get_sys_info() @@ -795,7 +797,7 @@ class ServerApp(JupyterApp): ZMQChannelsWebsocketConnection, ] - subcommands: dict = { + subcommands: dict[str, t.Any] = { "list": ( JupyterServerListApp, JupyterServerListApp.description.splitlines()[0], @@ -929,7 +931,7 @@ def _default_ip(self) -> str: @validate("ip") def _validate_ip(self, proposal: t.Any) -> str: - value = proposal["value"] + value = t.cast(str, proposal["value"]) if value == "*": value = "" return value @@ -987,7 +989,7 @@ def _port_retries_default(self) -> int: ) @validate("sock_mode") - def _validate_sock_mode(self, proposal: t.Any) -> int: + def _validate_sock_mode(self, proposal: t.Any) -> t.Any: value = proposal["value"] try: converted_value = int(value.encode(), 8) @@ -1403,7 +1405,7 @@ def _deprecated_cookie_config(self, change: t.Any) -> None: @validate("base_url") def _update_base_url(self, proposal: t.Any) -> str: - value = proposal["value"] + value = t.cast(str, proposal["value"]) if not value.startswith("/"): value = "/" + value if not value.endswith("/"): @@ -1420,14 +1422,14 @@ def _update_base_url(self, proposal: t.Any) -> str: ) @property - def static_file_path(self) -> t.List[str]: + def static_file_path(self) -> list[str]: """return extra paths + the default location""" return [*self.extra_static_paths, DEFAULT_STATIC_FILES_PATH] static_custom_path = List(Unicode(), help=_i18n("""Path to search for custom.js, css""")) @default("static_custom_path") - def _default_static_custom_path(self) -> t.List[str]: + def _default_static_custom_path(self) -> list[str]: return [os.path.join(d, "custom") for d in (self.config_dir, DEFAULT_STATIC_FILES_PATH)] extra_template_paths = List( @@ -1441,7 +1443,7 @@ def _default_static_custom_path(self) -> t.List[str]: ) @property - def template_file_path(self) -> t.List[str]: + def template_file_path(self) -> list[str]: """return extra paths + the default locations""" return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST @@ -1483,7 +1485,7 @@ def template_file_path(self) -> t.List[str]: ) @default("kernel_manager_class") - def _default_kernel_manager_class(self) -> t.Union[str, t.Type[AsyncMappingKernelManager]]: + def _default_kernel_manager_class(self) -> t.Union[str, type[AsyncMappingKernelManager]]: if self.gateway_config.gateway_enabled: return "jupyter_server.gateway.managers.GatewayMappingKernelManager" return AsyncMappingKernelManager @@ -1494,7 +1496,7 @@ def _default_kernel_manager_class(self) -> t.Union[str, t.Type[AsyncMappingKerne ) @default("session_manager_class") - def _default_session_manager_class(self) -> t.Union[str, t.Type[SessionManager]]: + def _default_session_manager_class(self) -> t.Union[str, type[SessionManager]]: if self.gateway_config.gateway_enabled: return "jupyter_server.gateway.managers.GatewaySessionManager" return SessionManager @@ -1508,7 +1510,7 @@ def _default_session_manager_class(self) -> t.Union[str, t.Type[SessionManager]] @default("kernel_websocket_connection_class") def _default_kernel_websocket_connection_class( self, - ) -> t.Union[str, t.Type[ZMQChannelsWebsocketConnection]]: + ) -> t.Union[str, type[ZMQChannelsWebsocketConnection]]: if self.gateway_config.gateway_enabled: return "jupyter_server.gateway.connections.GatewayWebSocketConnection" return ZMQChannelsWebsocketConnection @@ -1533,7 +1535,7 @@ def _default_kernel_websocket_connection_class( ) @default("kernel_spec_manager_class") - def _default_kernel_spec_manager_class(self) -> t.Union[str, t.Type[KernelSpecManager]]: + def _default_kernel_spec_manager_class(self) -> t.Union[str, type[KernelSpecManager]]: if self.gateway_config.gateway_enabled: return "jupyter_server.gateway.managers.GatewayKernelSpecManager" return KernelSpecManager @@ -1701,7 +1703,6 @@ def _root_dir_changed(self, change: t.Any) -> None: # record that root_dir is set, # which affects loading of deprecated notebook_dir self._root_dir_set = True - pass preferred_dir = Unicode( config=True, @@ -1856,7 +1857,7 @@ def starter_app(self) -> t.Any: """Get the Extension that started this server.""" return self._starter_app - def parse_command_line(self, argv: t.Optional[t.List[str]] = None) -> None: + def parse_command_line(self, argv: t.Optional[list[str]] = None) -> None: """Parse the command line options.""" super().parse_command_line(argv) @@ -1932,7 +1933,7 @@ def init_configurables(self) -> None: ) # Trigger a default/validation here explicitly while we still support the # deprecated trait on ServerApp (FIXME remove when deprecation finalized) - self.contents_manager.preferred_dir # noqa + self.contents_manager.preferred_dir # noqa: B018 self.session_manager = self.session_manager_class( parent=self, log=self.log, @@ -2148,7 +2149,9 @@ def init_resources(self) -> None: ) resource.setrlimit(resource.RLIMIT_NOFILE, (soft, hard)) - def _get_urlparts(self, path: t.Optional[str] = None, include_token: bool = False) -> t.Any: + def _get_urlparts( + self, path: t.Optional[str] = None, include_token: bool = False + ) -> urllib.parse.ParseResult: """Constructs a urllib named tuple, ParseResult, with default values set by server config. The returned tuple can be manipulated using the `_replace` method. @@ -2160,7 +2163,7 @@ def _get_urlparts(self, path: t.Optional[str] = None, include_token: bool = Fals if not self.ip: ip = "localhost" # Handle nonexplicit hostname. - elif self.ip in ("0.0.0.0", "::"): # noqa + elif self.ip in ("0.0.0.0", "::"): ip = "%s" % socket.gethostname() else: ip = f"[{self.ip}]" if ":" in self.ip else self.ip @@ -2301,7 +2304,6 @@ def _signal_info(self, sig: t.Any, frame: t.Any) -> None: def init_components(self) -> None: """Check the components submodule, and warn if it's unclean""" # TODO: this should still check, but now we use bower, not git submodule - pass def find_server_extensions(self) -> None: """ @@ -2487,14 +2489,13 @@ def _find_http_port(self) -> None: else: self.log.info(_i18n("The port %i is already in use.") % port) continue - elif e.errno in ( + if e.errno in ( errno.EACCES, getattr(errno, "WSAEACCES", errno.EACCES), ): self.log.warning(_i18n("Permission to listen on port %i denied.") % port) continue - else: - raise + raise else: success = True self.port = port @@ -2545,7 +2546,7 @@ def _init_asyncio_patch() -> None: @catch_config_error def initialize( self, - argv: t.Optional[t.List[str]] = None, + argv: t.Optional[list[str]] = None, find_extensions: bool = True, new_httpserver: bool = True, starter_extension: t.Any = None, @@ -2636,7 +2637,7 @@ async def cleanup_extensions(self) -> None: def running_server_info(self, kernel_count: bool = True) -> str: """Return the current working directory and the server url information""" - info = self.contents_manager.info_string() + "\n" + info = t.cast(str, self.contents_manager.info_string()) + "\n" if kernel_count: n_kernels = len(self.kernel_manager.list_kernel_ids()) kernel_msg = trans.ngettext("%d active kernel", "%d active kernels", n_kernels) @@ -2651,7 +2652,7 @@ def running_server_info(self, kernel_count: bool = True) -> str: ) return info - def server_info(self) -> t.Dict[str, t.Any]: + def server_info(self) -> dict[str, t.Any]: """Return a JSONable dict of information about this server.""" return { "url": self.connection_url, @@ -2784,7 +2785,7 @@ def remove_browser_open_file(self) -> None: if e.errno != errno.ENOENT: raise - def _prepare_browser_open(self) -> t.Tuple[str, t.Optional[str]]: + def _prepare_browser_open(self) -> tuple[str, t.Optional[str]]: """Prepare to open the browser.""" if not self.use_redirect_file: uri = self.default_url[len(self.base_url) :] @@ -2792,7 +2793,7 @@ def _prepare_browser_open(self) -> t.Tuple[str, t.Optional[str]]: if self.identity_provider.token: uri = url_concat(uri, {"token": self.identity_provider.token}) - if self.file_to_run: # noqa + if self.file_to_run: # noqa: SIM108 # Create a separate, temporary open-browser-file # pointing at a specific file. open_file = self.browser_open_file_to_run diff --git a/jupyter_server/services/api/api.yaml b/jupyter_server/services/api/api.yaml index 9e2c7162af..5ee5c416bd 100644 --- a/jupyter_server/services/api/api.yaml +++ b/jupyter_server/services/api/api.yaml @@ -106,6 +106,10 @@ paths: in: query description: "Return content (0 for no content, 1 for return content)" type: integer + - name: hash + in: query + description: "May return hash hexdigest string of content and the hash algorithm (0 for no hash - default, 1 for return hash). It may be ignored by the content manager." + type: integer responses: 404: description: No item found @@ -885,7 +889,7 @@ definitions: kernel: $ref: "#/definitions/Kernel" Contents: - description: "A contents object. The content and format keys may be null if content is not contained. If type is 'file', then the mimetype will be null." + description: "A contents object. The content and format keys may be null if content is not contained. The hash maybe null if hash is not required. If type is 'file', then the mimetype will be null." type: object required: - type @@ -934,6 +938,12 @@ definitions: format: type: string description: Format of content (one of null, 'text', 'base64', 'json') + hash: + type: string + description: "[optional] The hexdigest hash string of content, if requested (otherwise null). It cannot be null if hash_algorithm is defined." + hash_algorithm: + type: string + description: "[optional] The algorithm used to produce the hash, if requested (otherwise null). It cannot be null if hash is defined." Checkpoints: description: A checkpoint object. type: object diff --git a/jupyter_server/services/api/handlers.py b/jupyter_server/services/api/handlers.py index 9583732289..8b9e44f9cf 100644 --- a/jupyter_server/services/api/handlers.py +++ b/jupyter_server/services/api/handlers.py @@ -3,7 +3,7 @@ # Distributed under the terms of the Modified BSD License. import json import os -from typing import Dict, List +from typing import Any, Dict, List from jupyter_core.utils import ensure_async from tornado import web @@ -67,7 +67,7 @@ class IdentityHandler(APIHandler): """Get the current user's identity model""" @web.authenticated - def get(self): + async def get(self): """Get the identity model.""" permissions_json: str = self.get_argument("permissions", "") bad_permissions_msg = f'permissions should be a JSON dict of {{"resource": ["action",]}}, got {permissions_json!r}' @@ -94,10 +94,13 @@ def get(self): allowed = permissions[resource] = [] for action in actions: - if self.authorizer.is_authorized(self, user=user, resource=resource, action=action): + authorized = await ensure_async( + self.authorizer.is_authorized(self, user, action, resource) + ) + if authorized: allowed.append(action) - identity: Dict = self.identity_provider.identity_model(user) + identity: Dict[str, Any] = self.identity_provider.identity_model(user) model = { "identity": identity, "permissions": permissions, diff --git a/jupyter_server/services/config/manager.py b/jupyter_server/services/config/manager.py index bc42deb645..720c8e7bd7 100644 --- a/jupyter_server/services/config/manager.py +++ b/jupyter_server/services/config/manager.py @@ -3,6 +3,7 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import os.path +import typing as t from jupyter_core.paths import jupyter_config_dir, jupyter_config_path from traitlets import Instance, List, Unicode, default, observe @@ -22,7 +23,7 @@ class ConfigManager(LoggingConfigurable): def get(self, section_name): """Get the config from all config sections.""" - config: dict = {} + config: t.Dict[str, t.Any] = {} # step through back to front, to ensure front of the list is top priority for p in self.read_config_path[::-1]: cm = BaseJSONConfigManager(config_dir=p) diff --git a/jupyter_server/services/contents/filecheckpoints.py b/jupyter_server/services/contents/filecheckpoints.py index f6d1ef44e7..522b3bbd01 100644 --- a/jupyter_server/services/contents/filecheckpoints.py +++ b/jupyter_server/services/contents/filecheckpoints.py @@ -252,7 +252,7 @@ def get_file_checkpoint(self, checkpoint_id, path): if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) - content, format = self._read_file(os_checkpoint_path, format=None) + content, format = self._read_file(os_checkpoint_path, format=None) # type: ignore[misc] return { "type": "file", "content": content, @@ -318,7 +318,7 @@ async def get_file_checkpoint(self, checkpoint_id, path): if not os.path.isfile(os_checkpoint_path): self.no_such_checkpoint(path, checkpoint_id) - content, format = await self._read_file(os_checkpoint_path, format=None) + content, format = await self._read_file(os_checkpoint_path, format=None) # type: ignore[misc] return { "type": "file", "content": content, diff --git a/jupyter_server/services/contents/fileio.py b/jupyter_server/services/contents/fileio.py index 3033ebe3fa..19f84f4653 100644 --- a/jupyter_server/services/contents/fileio.py +++ b/jupyter_server/services/contents/fileio.py @@ -3,7 +3,11 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. + +from __future__ import annotations + import errno +import hashlib import os import shutil from base64 import decodebytes, encodebytes @@ -13,7 +17,7 @@ import nbformat from anyio.to_thread import run_sync from tornado.web import HTTPError -from traitlets import Bool +from traitlets import Bool, Enum from traitlets.config import Configurable from traitlets.config.configurable import LoggingConfigurable @@ -105,9 +109,9 @@ def atomic_writing(path, text=True, encoding="utf-8", log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa: SIM115 else: - fileobj = open(path, "wb", **kwargs) # noqa + fileobj = open(path, "wb", **kwargs) # noqa: SIM115 try: yield fileobj @@ -153,9 +157,9 @@ def _simple_writing(path, text=True, encoding="utf-8", log=None, **kwargs): if text: # Make sure that text files have Unix linefeeds by default kwargs.setdefault("newline", "\n") - fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa + fileobj = open(path, "w", encoding=encoding, **kwargs) # noqa: SIM115 else: - fileobj = open(path, "wb", **kwargs) # noqa + fileobj = open(path, "wb", **kwargs) # noqa: SIM115 try: yield fileobj @@ -192,6 +196,13 @@ class FileManagerMixin(LoggingConfigurable, Configurable): If set to False, the new notebook is written directly on the old one which could fail (eg: full filesystem or quota )""", ) + hash_algorithm = Enum( # type: ignore[call-overload] + hashlib.algorithms_available, + default_value="sha256", + config=True, + help="Hash algorithm to use for file content, support by hashlib", + ) + @contextmanager def open(self, os_path, *args, **kwargs): """wrapper around io.open that turns permission errors into 403""" @@ -259,39 +270,57 @@ def _get_os_path(self, path): if os.path.splitdrive(path)[0]: raise HTTPError(404, "%s is not a relative API path" % path) os_path = to_os_path(ApiPath(path), root) + # validate os path + # e.g. "foo\0" raises ValueError: embedded null byte + try: + os.lstat(os_path) + except OSError: + # OSError could be FileNotFound, PermissionError, etc. + # those should raise (or not) elsewhere + pass + except ValueError: + raise HTTPError(404, f"{path} is not a valid path") from None + if not (os.path.abspath(os_path) + os.path.sep).startswith(root): raise HTTPError(404, "%s is outside root contents directory" % path) return os_path - def _read_notebook(self, os_path, as_version=4, capture_validation_error=None): + def _read_notebook( + self, os_path, as_version=4, capture_validation_error=None, raw: bool = False + ): """Read a notebook from an os path.""" - with self.open(os_path, "r", encoding="utf-8") as f: - try: - return nbformat.read( - f, as_version=as_version, capture_validation_error=capture_validation_error - ) - except Exception as e: - e_orig = e - - # If use_atomic_writing is enabled, we'll guess that it was also - # enabled when this notebook was written and look for a valid - # atomic intermediate. - tmp_path = path_to_intermediate(os_path) - - if not self.use_atomic_writing or not os.path.exists(tmp_path): - raise HTTPError( - 400, - f"Unreadable Notebook: {os_path} {e_orig!r}", - ) + answer = self._read_file(os_path, "text", raw=raw) - # Move the bad file aside, restore the intermediate, and try again. - invalid_file = path_to_invalid(os_path) - replace_file(os_path, invalid_file) - replace_file(tmp_path, os_path) - return self._read_notebook( - os_path, as_version, capture_validation_error=capture_validation_error + try: + nb = nbformat.reads( + answer[0], + as_version=as_version, + capture_validation_error=capture_validation_error, ) + return (nb, answer[2]) if raw else nb # type:ignore[misc] + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + f"Unreadable Notebook: {os_path} {e_orig!r}", + ) + + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + replace_file(os_path, invalid_file) + replace_file(tmp_path, os_path) + return self._read_notebook( + os_path, as_version, capture_validation_error=capture_validation_error, raw=raw + ) + def _save_notebook(self, os_path, nb, capture_validation_error=None): """Save a notebook to an os_path.""" with self.atomic_writing(os_path, encoding="utf-8") as f: @@ -302,14 +331,46 @@ def _save_notebook(self, os_path, nb, capture_validation_error=None): capture_validation_error=capture_validation_error, ) - def _read_file(self, os_path, format): + def _get_hash(self, byte_content: bytes) -> dict[str, str]: + """Compute the hash hexdigest for the provided bytes. + + The hash algorithm is provided by the `hash_algorithm` attribute. + + Parameters + ---------- + byte_content : bytes + The bytes to hash + + Returns + ------- + A dictionary to be appended to a model {"hash": str, "hash_algorithm": str}. + """ + algorithm = self.hash_algorithm + h = hashlib.new(algorithm) + h.update(byte_content) + return {"hash": h.hexdigest(), "hash_algorithm": algorithm} + + def _read_file( + self, os_path: str, format: str | None, raw: bool = False + ) -> tuple[str | bytes, str] | tuple[str | bytes, str, bytes]: """Read a non-notebook file. - os_path: The path to be read. - format: - If 'text', the contents will be decoded as UTF-8. - If 'base64', the raw bytes contents will be encoded as base64. - If not specified, try to decode as UTF-8, and fall back to base64 + Parameters + ---------- + os_path: str + The path to be read. + format: str + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If 'byte', the raw bytes contents will be returned. + If not specified, try to decode as UTF-8, and fall back to base64 + raw: bool + [Optional] If True, will return as third argument the raw bytes content + + Returns + ------- + (content, format, byte_content) It returns the content in the given format + as well as the raw byte content. """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) @@ -317,11 +378,22 @@ def _read_file(self, os_path, format): with self.open(os_path, "rb") as f: bcontent = f.read() + if format == "byte": + # Not for http response but internal use + return (bcontent, "byte", bcontent) if raw else (bcontent, "byte") + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode("utf8"), "text" + return ( + (bcontent.decode("utf8"), "text", bcontent) + if raw + else ( + bcontent.decode("utf8"), + "text", + ) + ) except UnicodeError as e: if format == "text": raise HTTPError( @@ -329,7 +401,14 @@ def _read_file(self, os_path, format): "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e - return encodebytes(bcontent).decode("ascii"), "base64" + return ( + (encodebytes(bcontent).decode("ascii"), "base64", bcontent) + if raw + else ( + encodebytes(bcontent).decode("ascii"), + "base64", + ) + ) def _save_file(self, os_path, content, format): """Save content of a generic file.""" @@ -363,39 +442,45 @@ async def _copy(self, src, dest): """ await async_copy2_safe(src, dest, log=self.log) - async def _read_notebook(self, os_path, as_version=4, capture_validation_error=None): + async def _read_notebook( + self, os_path, as_version=4, capture_validation_error=None, raw: bool = False + ): """Read a notebook from an os path.""" - with self.open(os_path, encoding="utf-8") as f: - try: - return await run_sync( - partial( - nbformat.read, - as_version=as_version, - capture_validation_error=capture_validation_error, - ), - f, - ) - except Exception as e: - e_orig = e - - # If use_atomic_writing is enabled, we'll guess that it was also - # enabled when this notebook was written and look for a valid - # atomic intermediate. - tmp_path = path_to_intermediate(os_path) - - if not self.use_atomic_writing or not os.path.exists(tmp_path): - raise HTTPError( - 400, - f"Unreadable Notebook: {os_path} {e_orig!r}", - ) + answer = await self._read_file(os_path, "text", raw) - # Move the bad file aside, restore the intermediate, and try again. - invalid_file = path_to_invalid(os_path) - await async_replace_file(os_path, invalid_file) - await async_replace_file(tmp_path, os_path) - return await self._read_notebook( - os_path, as_version, capture_validation_error=capture_validation_error + try: + nb = await run_sync( + partial( + nbformat.reads, + as_version=as_version, + capture_validation_error=capture_validation_error, + ), + answer[0], ) + return (nb, answer[2]) if raw else nb # type:ignore[misc] + except Exception as e: + e_orig = e + + # If use_atomic_writing is enabled, we'll guess that it was also + # enabled when this notebook was written and look for a valid + # atomic intermediate. + tmp_path = path_to_intermediate(os_path) + + if not self.use_atomic_writing or not os.path.exists(tmp_path): + raise HTTPError( + 400, + f"Unreadable Notebook: {os_path} {e_orig!r}", + ) + + # Move the bad file aside, restore the intermediate, and try again. + invalid_file = path_to_invalid(os_path) + await async_replace_file(os_path, invalid_file) + await async_replace_file(tmp_path, os_path) + answer = await self._read_notebook( + os_path, as_version, capture_validation_error=capture_validation_error, raw=raw + ) + + return answer async def _save_notebook(self, os_path, nb, capture_validation_error=None): """Save a notebook to an os_path.""" @@ -410,14 +495,27 @@ async def _save_notebook(self, os_path, nb, capture_validation_error=None): f, ) - async def _read_file(self, os_path, format): + async def _read_file( # type: ignore[override] + self, os_path: str, format: str | None, raw: bool = False + ) -> tuple[str | bytes, str] | tuple[str | bytes, str, bytes]: """Read a non-notebook file. - os_path: The path to be read. - format: - If 'text', the contents will be decoded as UTF-8. - If 'base64', the raw bytes contents will be encoded as base64. - If not specified, try to decode as UTF-8, and fall back to base64 + Parameters + ---------- + os_path: str + The path to be read. + format: str + If 'text', the contents will be decoded as UTF-8. + If 'base64', the raw bytes contents will be encoded as base64. + If 'byte', the raw bytes contents will be returned. + If not specified, try to decode as UTF-8, and fall back to base64 + raw: bool + [Optional] If True, will return as third argument the raw bytes content + + Returns + ------- + (content, format, byte_content) It returns the content in the given format + as well as the raw byte content. """ if not os.path.isfile(os_path): raise HTTPError(400, "Cannot read non-file %s" % os_path) @@ -425,11 +523,22 @@ async def _read_file(self, os_path, format): with self.open(os_path, "rb") as f: bcontent = await run_sync(f.read) + if format == "byte": + # Not for http response but internal use + return (bcontent, "byte", bcontent) if raw else (bcontent, "byte") + if format is None or format == "text": # Try to interpret as unicode if format is unknown or if unicode # was explicitly requested. try: - return bcontent.decode("utf8"), "text" + return ( + (bcontent.decode("utf8"), "text", bcontent) + if raw + else ( + bcontent.decode("utf8"), + "text", + ) + ) except UnicodeError as e: if format == "text": raise HTTPError( @@ -437,7 +546,11 @@ async def _read_file(self, os_path, format): "%s is not UTF-8 encoded" % os_path, reason="bad format", ) from e - return encodebytes(bcontent).decode("ascii"), "base64" + return ( + (encodebytes(bcontent).decode("ascii"), "base64", bcontent) + if raw + else (encodebytes(bcontent).decode("ascii"), "base64") + ) async def _save_file(self, os_path, content, format): """Save content of a generic file.""" diff --git a/jupyter_server/services/contents/filemanager.py b/jupyter_server/services/contents/filemanager.py index fe12fb1b7a..c56a1acc70 100644 --- a/jupyter_server/services/contents/filemanager.py +++ b/jupyter_server/services/contents/filemanager.py @@ -1,6 +1,8 @@ """A contents manager that uses the local file system for storage.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from __future__ import annotations + import errno import math import mimetypes @@ -10,6 +12,7 @@ import stat import subprocess import sys +import typing as t import warnings from datetime import datetime from pathlib import Path @@ -265,6 +268,8 @@ def _base_model(self, path): model["mimetype"] = None model["size"] = size model["writable"] = self.is_writable(path) + model["hash"] = None + model["hash_algorithm"] = None return model @@ -332,7 +337,7 @@ def _dir_model(self, path, content=True): return model - def _file_model(self, path, content=True, format=None): + def _file_model(self, path, content=True, format=None, require_hash=False): """Build a model for a file if content is requested, include the file contents. @@ -341,6 +346,8 @@ def _file_model(self, path, content=True, format=None): If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "file" @@ -348,8 +355,9 @@ def _file_model(self, path, content=True, format=None): os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] + bytes_content = None if content: - content, format = self._read_file(os_path, format) + content, format, bytes_content = self._read_file(os_path, format, raw=True) # type: ignore[misc] if model["mimetype"] is None: default_mime = { "text": "text/plain", @@ -362,31 +370,44 @@ def _file_model(self, path, content=True, format=None): format=format, ) + if require_hash: + if bytes_content is None: + bytes_content, _ = self._read_file(os_path, "byte") # type: ignore[assignment,misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - def _notebook_model(self, path, content=True): + def _notebook_model(self, path, content=True, require_hash=False): """Build a notebook model if content is requested, the notebook content will be populated as a JSON structure (not double-serialized) + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "notebook" os_path = self._get_os_path(path) + bytes_content = None if content: - validation_error: dict = {} - nb = self._read_notebook( - os_path, as_version=4, capture_validation_error=validation_error + validation_error: dict[str, t.Any] = {} + nb, bytes_content = self._read_notebook( + os_path, as_version=4, capture_validation_error=validation_error, raw=True ) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model, validation_error) + if require_hash: + if bytes_content is None: + bytes_content, _ = self._read_file(os_path, "byte") # type: ignore[misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - def get(self, path, content=True, type=None, format=None): + def get(self, path, content=True, type=None, format=None, require_hash=False): """Takes a path for an entity and returns its model Parameters @@ -401,6 +422,8 @@ def get(self, path, content=True, type=None, format=None): format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. + require_hash: bool, optional + Whether to include the hash of the file contents. Returns ------- @@ -428,11 +451,13 @@ def get(self, path, content=True, type=None, format=None): ) model = self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): - model = self._notebook_model(path, content=content) + model = self._notebook_model(path, content=content, require_hash=require_hash) else: if type == "directory": raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") - model = self._file_model(path, content=content, format=format) + model = self._file_model( + path, content=content, format=format, require_hash=require_hash + ) self.emit(data={"action": "get", "path": path}) return model @@ -465,7 +490,7 @@ def save(self, model, path=""): self.log.debug("Saving %s", os_path) - validation_error: dict = {} + validation_error: dict[str, t.Any] = {} try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) @@ -683,7 +708,9 @@ def _get_dir_size(self, path="."): ).stdout.split() else: result = subprocess.run( - ["du", "-s", "--block-size=1", path], capture_output=True, check=True + ["du", "-s", "--block-size=1", path], + capture_output=True, + check=True, ).stdout.split() self.log.info(f"current status of du command {result}") @@ -781,7 +808,7 @@ async def _dir_model(self, path, content=True): return model - async def _file_model(self, path, content=True, format=None): + async def _file_model(self, path, content=True, format=None, require_hash=False): """Build a model for a file if content is requested, include the file contents. @@ -790,6 +817,8 @@ async def _file_model(self, path, content=True, format=None): If 'text', the contents will be decoded as UTF-8. If 'base64', the raw bytes contents will be encoded as base64. If not specified, try to decode as UTF-8, and fall back to base64 + + if require_hash is true, the model will include 'hash' """ model = self._base_model(path) model["type"] = "file" @@ -797,8 +826,9 @@ async def _file_model(self, path, content=True, format=None): os_path = self._get_os_path(path) model["mimetype"] = mimetypes.guess_type(os_path)[0] + bytes_content = None if content: - content, format = await self._read_file(os_path, format) + content, format, bytes_content = await self._read_file(os_path, format, raw=True) # type: ignore[misc] if model["mimetype"] is None: default_mime = { "text": "text/plain", @@ -811,9 +841,14 @@ async def _file_model(self, path, content=True, format=None): format=format, ) + if require_hash: + if bytes_content is None: + bytes_content, _ = await self._read_file(os_path, "byte") # type: ignore[assignment,misc] + model.update(**self._get_hash(bytes_content)) # type: ignore[arg-type] + return model - async def _notebook_model(self, path, content=True): + async def _notebook_model(self, path, content=True, require_hash=False): """Build a notebook model if content is requested, the notebook content will be populated @@ -823,19 +858,25 @@ async def _notebook_model(self, path, content=True): model["type"] = "notebook" os_path = self._get_os_path(path) + bytes_content = None if content: - validation_error: dict = {} - nb = await self._read_notebook( - os_path, as_version=4, capture_validation_error=validation_error + validation_error: dict[str, t.Any] = {} + nb, bytes_content = await self._read_notebook( + os_path, as_version=4, capture_validation_error=validation_error, raw=True ) self.mark_trusted_cells(nb, path) model["content"] = nb model["format"] = "json" self.validate_notebook_model(model, validation_error) + if require_hash: + if bytes_content is None: + bytes_content, _ = await self._read_file(os_path, "byte") # type: ignore[misc] + model.update(**(self._get_hash(bytes_content))) # type: ignore[arg-type] + return model - async def get(self, path, content=True, type=None, format=None): + async def get(self, path, content=True, type=None, format=None, require_hash=False): """Takes a path for an entity and returns its model Parameters @@ -850,6 +891,8 @@ async def get(self, path, content=True, type=None, format=None): format : str, optional The requested format for file contents. 'text' or 'base64'. Ignored if this returns a notebook or directory model. + require_hash: bool, optional + Whether to include the hash of the file contents. Returns ------- @@ -872,11 +915,13 @@ async def get(self, path, content=True, type=None, format=None): ) model = await self._dir_model(path, content=content) elif type == "notebook" or (type is None and path.endswith(".ipynb")): - model = await self._notebook_model(path, content=content) + model = await self._notebook_model(path, content=content, require_hash=require_hash) else: if type == "directory": raise web.HTTPError(400, "%s is not a directory" % path, reason="bad type") - model = await self._file_model(path, content=content, format=format) + model = await self._file_model( + path, content=content, format=format, require_hash=require_hash + ) self.emit(data={"action": "get", "path": path}) return model @@ -906,7 +951,7 @@ async def save(self, model, path=""): os_path = self._get_os_path(path) self.log.debug("Saving %s", os_path) - validation_error: dict = {} + validation_error: dict[str, t.Any] = {} try: if model["type"] == "notebook": nb = nbformat.from_dict(model["content"]) @@ -1094,7 +1139,7 @@ async def copy(self, from_path, to_path=None): async def _copy_dir( self, from_path: str, to_path_original: str, to_name: str, to_path: str - ) -> dict: + ) -> dict[str, t.Any]: """ handles copying directories returns the model for the copied directory @@ -1111,7 +1156,7 @@ async def _copy_dir( f"Can't copy '{from_path}' into read-only Folder '{to_path}'", ) from err - return model + return model # type:ignore[no-any-return] async def check_folder_size(self, path: str) -> None: """ @@ -1144,7 +1189,9 @@ async def _get_dir_size(self, path: str = ".") -> str: ).stdout.split() else: result = subprocess.run( - ["du", "-s", "--block-size=1", path], capture_output=True, check=True + ["du", "-s", "--block-size=1", path], + capture_output=True, + check=True, ).stdout.split() self.log.info(f"current status of du command {result}") diff --git a/jupyter_server/services/contents/handlers.py b/jupyter_server/services/contents/handlers.py index 15b3a5c920..a7c7ffff17 100644 --- a/jupyter_server/services/contents/handlers.py +++ b/jupyter_server/services/contents/handlers.py @@ -5,6 +5,8 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json +from http import HTTPStatus +from typing import Any, Dict, List try: from jupyter_client.jsonutil import json_default @@ -21,12 +23,35 @@ AUTH_RESOURCE = "contents" -def validate_model(model, expect_content): +def _validate_keys(expect_defined: bool, model: Dict[str, Any], keys: List[str]): + """ + Validate that the keys are defined (i.e. not None) or not (i.e. None) + """ + + if expect_defined: + errors = [key for key in keys if model[key] is None] + if errors: + raise web.HTTPError( + 500, + f"Keys unexpectedly None: {errors}", + ) + else: + errors = {key: model[key] for key in keys if model[key] is not None} # type: ignore[assignment] + if errors: + raise web.HTTPError( + 500, + f"Keys unexpectedly not None: {errors}", + ) + + +def validate_model(model, expect_content=False, expect_hash=False): """ Validate a model returned by a ContentsManager method. If expect_content is True, then we expect non-null entries for 'content' and 'format'. + + If expect_hash is True, then we expect non-null entries for 'hash' and 'hash_algorithm'. """ required_keys = { "name", @@ -39,6 +64,8 @@ def validate_model(model, expect_content): "content", "format", } + if expect_hash: + required_keys.update(["hash", "hash_algorithm"]) missing = required_keys - set(model.keys()) if missing: raise web.HTTPError( @@ -46,21 +73,10 @@ def validate_model(model, expect_content): f"Missing Model Keys: {missing}", ) - maybe_none_keys = ["content", "format"] - if expect_content: - errors = [key for key in maybe_none_keys if model[key] is None] - if errors: - raise web.HTTPError( - 500, - f"Keys unexpectedly None: {errors}", - ) - else: - errors = {key: model[key] for key in maybe_none_keys if model[key] is not None} # type: ignore[assignment] - if errors: - raise web.HTTPError( - 500, - f"Keys unexpectedly not None: {errors}", - ) + content_keys = ["content", "format"] + _validate_keys(expect_content, model, content_keys) + if expect_hash: + _validate_keys(expect_hash, model, ["hash", "hash_algorithm"]) class ContentsAPIHandler(APIHandler): @@ -91,6 +107,12 @@ def _finish_model(self, model, location=True): self.set_header("Content-Type", "application/json") self.finish(json.dumps(model, default=json_default)) + async def _finish_error(self, code, message): + """Finish a JSON request with an error code and descriptive message""" + self.set_status(code) + self.write(message) + await self.finish() + @web.authenticated @authorized async def get(self, path=""): @@ -115,19 +137,49 @@ async def get(self, path=""): raise web.HTTPError(400, "Content %r is invalid" % content_str) content = int(content_str or "") - if not cm.allow_hidden and await ensure_async(cm.is_hidden(path)): - raise web.HTTPError(404, f"file or directory {path!r} does not exist") + hash_str = self.get_query_argument("hash", default="0") + if hash_str not in {"0", "1"}: + raise web.HTTPError(400, f"Content {hash_str!r} is invalid") + require_hash = int(hash_str) - model = await ensure_async( - self.contents_manager.get( - path=path, - type=type, - format=format, - content=content, + if not cm.allow_hidden and await ensure_async(cm.is_hidden(path)): + await self._finish_error( + HTTPStatus.NOT_FOUND, f"file or directory {path!r} does not exist" ) - ) - validate_model(model, expect_content=content) - self._finish_model(model, location=False) + + try: + expect_hash = require_hash + try: + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + require_hash=require_hash, + ) + ) + except TypeError: + # Fallback for ContentsManager not handling the require_hash argument + # introduced in 2.11 + expect_hash = False + model = await ensure_async( + self.contents_manager.get( + path=path, + type=type, + format=format, + content=content, + ) + ) + validate_model(model, expect_content=content, expect_hash=expect_hash) + self._finish_model(model, location=False) + except web.HTTPError as exc: + # 404 is okay in this context, catch exception and return 404 code to prevent stack trace on client + if exc.status_code == HTTPStatus.NOT_FOUND: + await self._finish_error( + HTTPStatus.NOT_FOUND, f"file or directory {path!r} does not exist" + ) + raise @web.authenticated @authorized @@ -149,7 +201,7 @@ async def patch(self, path=""): raise web.HTTPError(400, f"Cannot rename file or directory {path!r}") model = await ensure_async(cm.update(model, path)) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _copy(self, copy_from, copy_to=None): @@ -162,7 +214,7 @@ async def _copy(self, copy_from, copy_to=None): ) model = await ensure_async(self.contents_manager.copy(copy_from, copy_to)) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _upload(self, model, path): @@ -170,7 +222,7 @@ async def _upload(self, model, path): self.log.info("Uploading file to %s", path) model = await ensure_async(self.contents_manager.new(model, path)) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _new_untitled(self, path, type="", ext=""): @@ -180,7 +232,7 @@ async def _new_untitled(self, path, type="", ext=""): self.contents_manager.new_untitled(path=path, type=type, ext=ext) ) self.set_status(201) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) async def _save(self, model, path): @@ -189,7 +241,7 @@ async def _save(self, model, path): if not chunk or chunk == -1: # Avoid tedious log information self.log.info("Saving file at %s", path) model = await ensure_async(self.contents_manager.save(model, path)) - validate_model(model, expect_content=False) + validate_model(model) self._finish_model(model) @web.authenticated @@ -340,7 +392,13 @@ async def delete(self, path, checkpoint_id): class NotebooksRedirectHandler(JupyterHandler): """Redirect /api/notebooks to /api/contents""" - SUPPORTED_METHODS = ("GET", "PUT", "PATCH", "POST", "DELETE") # type:ignore[assignment] + SUPPORTED_METHODS = ( + "GET", + "PUT", + "PATCH", + "POST", + "DELETE", + ) # type:ignore[assignment] def get(self, path): """Handle a notebooks redirect.""" diff --git a/jupyter_server/services/contents/manager.py b/jupyter_server/services/contents/manager.py index f4f70fc338..b12a2055ec 100644 --- a/jupyter_server/services/contents/manager.py +++ b/jupyter_server/services/contents/manager.py @@ -447,8 +447,16 @@ def exists(self, path): """ return self.file_exists(path) or self.dir_exists(path) - def get(self, path, content=True, type=None, format=None): - """Get a file or directory model.""" + def get(self, path, content=True, type=None, format=None, require_hash=False): + """Get a file or directory model. + + Parameters + ---------- + require_hash : bool + Whether the file hash must be returned or not. + + *Changed in version 2.11*: The *require_hash* parameter was added. + """ raise NotImplementedError def save(self, model, path): @@ -849,8 +857,16 @@ async def exists(self, path): self.dir_exists(path) ) - async def get(self, path, content=True, type=None, format=None): - """Get a file or directory model.""" + async def get(self, path, content=True, type=None, format=None, require_hash=False): + """Get a file or directory model. + + Parameters + ---------- + require_hash : bool + Whether the file hash must be returned or not. + + *Changed in version 2.11*: The *require_hash* parameter was added. + """ raise NotImplementedError async def save(self, model, path): diff --git a/jupyter_server/services/events/handlers.py b/jupyter_server/services/events/handlers.py index 0ba11c0985..1ca28b948c 100644 --- a/jupyter_server/services/events/handlers.py +++ b/jupyter_server/services/events/handlers.py @@ -2,11 +2,14 @@ .. versionadded:: 2.0 """ +from __future__ import annotations + import json from datetime import datetime from typing import Any, Dict, Optional, cast import jupyter_events.logger +from jupyter_core.utils import ensure_async from tornado import web, websocket from jupyter_server.auth.decorator import authorized @@ -25,7 +28,7 @@ class SubscribeWebsocket( auth_resource = AUTH_RESOURCE - def pre_get(self): + async def pre_get(self): """Handles authentication/authorization when attempting to subscribe to events emitted by Jupyter Server's eventbus. @@ -37,18 +40,21 @@ def pre_get(self): raise web.HTTPError(403) # authorize the user. - if not self.authorizer.is_authorized(self, user, "execute", "events"): + authorized = await ensure_async( + self.authorizer.is_authorized(self, user, "execute", "events") + ) + if not authorized: raise web.HTTPError(403) async def get(self, *args, **kwargs): """Get an event socket.""" - self.pre_get() + await ensure_async(self.pre_get()) res = super().get(*args, **kwargs) if res is not None: await res async def event_listener( - self, logger: jupyter_events.logger.EventLogger, schema_id: str, data: dict + self, logger: jupyter_events.logger.EventLogger, schema_id: str, data: dict[str, Any] ) -> None: """Write an event message.""" capsule = dict(schema_id=schema_id, **data) @@ -65,7 +71,7 @@ def on_close(self): self.event_logger.remove_listener(listener=self.event_listener) -def validate_model(data: Dict[str, Any]) -> None: +def validate_model(data: dict[str, Any]) -> None: """Validates for required fields in the JSON request body""" required_keys = {"schema_id", "version", "data"} for key in required_keys: @@ -73,7 +79,7 @@ def validate_model(data: Dict[str, Any]) -> None: raise web.HTTPError(400, f"Missing `{key}` in the JSON request body.") -def get_timestamp(data: Dict[str, Any]) -> Optional[datetime]: +def get_timestamp(data: dict[str, Any]) -> Optional[datetime]: """Parses timestamp from the JSON request body""" try: if "timestamp" in data: diff --git a/jupyter_server/services/kernels/connection/abc.py b/jupyter_server/services/kernels/connection/abc.py index 4bdf6e3edc..71f9e8254f 100644 --- a/jupyter_server/services/kernels/connection/abc.py +++ b/jupyter_server/services/kernels/connection/abc.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Any +from typing import Any, List class KernelWebsocketConnectionABC(ABC): @@ -15,19 +15,15 @@ class KernelWebsocketConnectionABC(ABC): @abstractmethod async def connect(self): """Connect the kernel websocket to the kernel ZMQ connections""" - ... @abstractmethod async def disconnect(self): """Disconnect the kernel websocket from the kernel ZMQ connections""" - ... @abstractmethod def handle_incoming_message(self, incoming_msg: str) -> None: """Broker the incoming websocket message to the appropriate ZMQ channel.""" - ... @abstractmethod - def handle_outgoing_message(self, stream: str, outgoing_msg: list) -> None: + def handle_outgoing_message(self, stream: str, outgoing_msg: List[Any]) -> None: """Broker outgoing ZMQ messages to the kernel websocket.""" - ... diff --git a/jupyter_server/services/kernels/connection/base.py b/jupyter_server/services/kernels/connection/base.py index 0f731f354f..1f6b2fdcf4 100644 --- a/jupyter_server/services/kernels/connection/base.py +++ b/jupyter_server/services/kernels/connection/base.py @@ -1,6 +1,7 @@ """Kernel connection helpers.""" import json import struct +from typing import Any, List from jupyter_client.session import Session from tornado.websocket import WebSocketHandler @@ -87,7 +88,7 @@ def serialize_msg_to_ws_v1(msg_or_list, channel, pack=None): else: msg_list = msg_or_list channel = channel.encode("utf-8") - offsets: list = [] + offsets: List[Any] = [] offsets.append(8 * (1 + 1 + len(msg_list) + 1)) offsets.append(len(channel) + offsets[-1]) for msg in msg_list: @@ -171,7 +172,7 @@ def handle_incoming_message(self, incoming_msg: str) -> None: """Handle an incoming message.""" raise NotImplementedError() - def handle_outgoing_message(self, stream: str, outgoing_msg: list) -> None: + def handle_outgoing_message(self, stream: str, outgoing_msg: List[Any]) -> None: """Handle an outgoing message.""" raise NotImplementedError() diff --git a/jupyter_server/services/kernels/connection/channels.py b/jupyter_server/services/kernels/connection/channels.py index d8a84db47f..05b9f6954e 100644 --- a/jupyter_server/services/kernels/connection/channels.py +++ b/jupyter_server/services/kernels/connection/channels.py @@ -99,8 +99,8 @@ def write_message(self): _open_sessions: dict[str, KernelWebsocketHandler] = {} _open_sockets: t.MutableSet[ZMQChannelsWebsocketConnection] = weakref.WeakSet() - _kernel_info_future: Future - _close_future: Future + _kernel_info_future: Future[t.Any] + _close_future: Future[t.Any] channels = Dict({}) kernel_info_channel = Any(allow_none=True) @@ -154,7 +154,7 @@ def create_stream(self): self.channels[channel] = stream = meth(identity=identity) stream.channel = channel - def nudge(self): # noqa + def nudge(self): """Nudge the zmq connections with kernel_info_requests Returns a Future that will resolve when we have received a shell or control reply and at least one iopub message, @@ -170,7 +170,7 @@ def nudge(self): # noqa # establishing its zmq subscriptions before processing the next request. if getattr(self.kernel_manager, "execution_state", None) == "busy": self.log.debug("Nudge: not nudging busy kernel %s", self.kernel_id) - f: Future = Future() + f: Future[t.Any] = Future() f.set_result(None) return _ensure_future(f) # Use a transient shell channel to prevent leaking @@ -182,8 +182,8 @@ def nudge(self): # noqa # The IOPub used by the client, whose subscriptions we are verifying. iopub_channel = self.channels["iopub"] - info_future: Future = Future() - iopub_future: Future = Future() + info_future: Future[t.Any] = Future() + iopub_future: Future[t.Any] = Future() both_done = gen.multi([info_future, iopub_future]) def finish(_=None): @@ -376,7 +376,7 @@ def replay(value): if not stream.closed(): stream.close() self.disconnect() - return + return None self.multi_kernel_manager.add_restart_callback(self.kernel_id, self.on_kernel_restarted) self.multi_kernel_manager.add_restart_callback( @@ -438,7 +438,7 @@ def disconnect(self): try: ZMQChannelsWebsocketConnection._open_sockets.remove(self) self._close_future.set_result(None) - except Exception: # noqa + except Exception: pass def handle_incoming_message(self, incoming_msg: str) -> None: @@ -486,7 +486,7 @@ def handle_incoming_message(self, incoming_msg: str) -> None: else: self.session.send(stream, msg) - def handle_outgoing_message(self, stream: str, outgoing_msg: list) -> None: + def handle_outgoing_message(self, stream: str, outgoing_msg: list[t.Any]) -> None: """Handle the outgoing messages from ZMQ sockets to Websocket.""" msg_list = outgoing_msg _, fed_msg_list = self.session.feed_identities(msg_list) diff --git a/jupyter_server/services/kernels/handlers.py b/jupyter_server/services/kernels/handlers.py index 2fb753e93c..217f0c9cc2 100644 --- a/jupyter_server/services/kernels/handlers.py +++ b/jupyter_server/services/kernels/handlers.py @@ -5,7 +5,6 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. import json -from traceback import format_tb try: from jupyter_client.jsonutil import json_default @@ -102,8 +101,7 @@ async def post(self, kernel_id, action): except Exception as e: message = "Exception restarting kernel" self.log.error(message, exc_info=True) - traceback = format_tb(e.__traceback__) - self.write(json.dumps({"message": message, "traceback": traceback})) + self.write(json.dumps({"message": message, "traceback": ""})) self.set_status(500) else: model = await ensure_async(km.kernel_model(kernel_id)) diff --git a/jupyter_server/services/kernels/kernelmanager.py b/jupyter_server/services/kernels/kernelmanager.py index fb3608d0b6..451a279a4e 100644 --- a/jupyter_server/services/kernels/kernelmanager.py +++ b/jupyter_server/services/kernels/kernelmanager.py @@ -373,7 +373,7 @@ def get_buffer(self, kernel_id, session_key): """ self.log.debug("Getting buffer for %s", kernel_id) if kernel_id not in self._kernel_buffers: - return + return None buffer_info = self._kernel_buffers[kernel_id] if buffer_info["session_key"] == session_key: @@ -440,7 +440,7 @@ async def _async_restart_kernel(self, kernel_id, now=False): kernel = self.get_kernel(kernel_id) # return a Future that will resolve when the kernel has successfully restarted channel = kernel.connect_shell() - future: Future = Future() + future: Future[Any] = Future() def finish(): """Common cleanup when restart finishes/fails for any reason.""" @@ -710,7 +710,7 @@ def __init__(self, **kwargs): self.last_kernel_activity = utcnow() -def emit_kernel_action_event(success_msg: str = "") -> t.Callable: +def emit_kernel_action_event(success_msg: str = "") -> t.Callable[..., t.Any]: """Decorate kernel action methods to begin emitting jupyter kernel action events. @@ -802,7 +802,7 @@ def core_event_schema_paths(self) -> list[pathlib.Path]: # This trait is intended for subclasses to override and define # custom event schemas. - extra_event_schema_paths: List[str] = List( # type:ignore[assignment] + extra_event_schema_paths: List[str] = List( default_value=[], help=""" A list of pathlib.Path objects pointing at to register with diff --git a/jupyter_server/services/kernels/websocket.py b/jupyter_server/services/kernels/websocket.py index 753ab01e26..4c2c1c8914 100644 --- a/jupyter_server/services/kernels/websocket.py +++ b/jupyter_server/services/kernels/websocket.py @@ -2,6 +2,7 @@ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. +from jupyter_core.utils import ensure_async from tornado import web from tornado.websocket import WebSocketHandler @@ -26,7 +27,6 @@ def set_default_headers(self): which doesn't make sense for websockets """ - pass def get_compression_options(self): """Get the socket connection options.""" @@ -41,7 +41,10 @@ async def pre_get(self): raise web.HTTPError(403) # authorize the user. - if not self.authorizer.is_authorized(self, user, "execute", "kernels"): + authorized = await ensure_async( + self.authorizer.is_authorized(self, user, "execute", "kernels") + ) + if not authorized: raise web.HTTPError(403) kernel = self.kernel_manager.get_kernel(self.kernel_id) diff --git a/jupyter_server/services/sessions/sessionmanager.py b/jupyter_server/services/sessions/sessionmanager.py index b20f4c98de..5f3a19c71d 100644 --- a/jupyter_server/services/sessions/sessionmanager.py +++ b/jupyter_server/services/sessions/sessionmanager.py @@ -5,7 +5,7 @@ import os import pathlib import uuid -from typing import Any, Dict, List, NewType, Optional, Union +from typing import Any, Dict, List, NewType, Optional, Union, cast KernelName = NewType("KernelName", str) ModelName = NewType("ModelName", str) @@ -31,8 +31,6 @@ class KernelSessionRecordConflict(Exception): merge because of conflicting data. """ - pass - @dataclass class KernelSessionRecord: @@ -293,7 +291,7 @@ async def create_session( session_id, path=path, name=name, type=type, kernel_id=kernel_id ) self._pending_sessions.remove(record) - return result + return cast(Dict[str, Any], result) def get_kernel_env( self, path: Optional[str], name: Optional[ModelName] = None @@ -347,7 +345,7 @@ async def start_kernel_for_session( kernel_name=kernel_name, env=kernel_env, ) - return kernel_id + return cast(str, kernel_id) async def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): """Saves the items for the session with the given session_id @@ -410,7 +408,7 @@ async def get_session(self, **kwargs): raise TypeError(msg) conditions.append("%s=?" % column) - query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) # noqa + query = "SELECT * FROM session WHERE %s" % (" AND ".join(conditions)) self.cursor.execute(query, list(kwargs.values())) try: @@ -458,7 +456,7 @@ async def update_session(self, session_id, **kwargs): if column not in self._columns: raise TypeError("No such column: %r" % column) sets.append("%s=?" % column) - query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) # noqa + query = "UPDATE session SET %s WHERE session_id=?" % (", ".join(sets)) self.cursor.execute(query, [*list(kwargs.values()), session_id]) if hasattr(self.kernel_manager, "update_env"): @@ -492,7 +490,7 @@ async def row_to_model(self, row, tolerate_culled=False): ) if tolerate_culled: self.log.warning(f"{msg} Continuing...") - return + return None raise KeyError(msg) kernel_model = await ensure_async(self.kernel_manager.kernel_model(row["kernel_id"])) diff --git a/jupyter_server/terminal/__init__.py b/jupyter_server/terminal/__init__.py index 641bad66b7..0dd1533c6a 100644 --- a/jupyter_server/terminal/__init__.py +++ b/jupyter_server/terminal/__init__.py @@ -2,9 +2,9 @@ import warnings # Shims -from jupyter_server_terminals import api_handlers # noqa -from jupyter_server_terminals.handlers import TermSocket # noqa -from jupyter_server_terminals.terminalmanager import TerminalManager # noqa +from jupyter_server_terminals import api_handlers +from jupyter_server_terminals.handlers import TermSocket +from jupyter_server_terminals.terminalmanager import TerminalManager warnings.warn( "Terminals support has moved to `jupyter_server_terminals`", diff --git a/jupyter_server/terminal/handlers.py b/jupyter_server/terminal/handlers.py index 23e19ee355..f3da8aa91c 100644 --- a/jupyter_server/terminal/handlers.py +++ b/jupyter_server/terminal/handlers.py @@ -1,4 +1,4 @@ """Tornado handlers for the terminal emulator.""" # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -from jupyter_server_terminals.handlers import TermSocket # noqa +from jupyter_server_terminals.handlers import TermSocket diff --git a/jupyter_server/terminal/terminalmanager.py b/jupyter_server/terminal/terminalmanager.py index d2bef723f8..6f9ada1b71 100644 --- a/jupyter_server/terminal/terminalmanager.py +++ b/jupyter_server/terminal/terminalmanager.py @@ -4,4 +4,4 @@ """ # Copyright (c) Jupyter Development Team. # Distributed under the terms of the Modified BSD License. -from jupyter_server_terminals.terminalmanager import TerminalManager # noqa +from jupyter_server_terminals.terminalmanager import TerminalManager diff --git a/jupyter_server/traittypes.py b/jupyter_server/traittypes.py index bd6f28a36b..f17f3a0a24 100644 --- a/jupyter_server/traittypes.py +++ b/jupyter_server/traittypes.py @@ -6,7 +6,7 @@ from traitlets.utils.descriptions import describe -class TypeFromClasses(ClassBasedTraitType): +class TypeFromClasses(ClassBasedTraitType): # type:ignore[type-arg] """A trait whose value must be a subclass of a class in a specified list of classes.""" default_value: Any @@ -76,7 +76,7 @@ def validate(self, obj, value): try: if self.subclass_from_klasses(value): return value - except Exception: # noqa + except Exception: pass self.error(obj, value) @@ -86,10 +86,10 @@ def info(self): result = "a subclass of " for klass in self.klasses: if not isinstance(klass, str): - klass = klass.__module__ + "." + klass.__name__ # noqa + klass = klass.__module__ + "." + klass.__name__ # noqa: PLW2901 result += f"{klass} or " # Strip the last "or" - result = result.strip(" or ") # noqa + result = result.strip(" or ") # noqa: B005 if self.allow_none: return result + " or None" return result @@ -106,9 +106,9 @@ def _resolve_classes(self): if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: - klass = self._resolve_string(klass) # noqa + klass = self._resolve_string(klass) # noqa: PLW2901 self.importable_klasses.append(klass) - except Exception: # noqa + except Exception: pass else: self.importable_klasses.append(klass) @@ -125,7 +125,7 @@ def default_value_repr(self): return repr(f"{value.__module__}.{value.__name__}") -class InstanceFromClasses(ClassBasedTraitType): +class InstanceFromClasses(ClassBasedTraitType): # type:ignore[type-arg] """A trait whose value must be an instance of a class in a specified list of classes. The value can also be an instance of a subclass of the specified classes. Subclasses can declare default classes by overriding the klass attribute @@ -156,7 +156,7 @@ class or its subclasses. Our implementation is quite different None, the None is replaced by ``()`` or ``{}``, respectively. """ # If class - if klasses is None: # noqa + if klasses is None: # noqa: SIM114 self.klasses = klasses # Verify all elements are either classes or strings. elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): @@ -200,7 +200,7 @@ def info(self): else: result += describe("a", klass) result += " or " - result = result.strip(" or ") # noqa + result = result.strip(" or ") # noqa: B005 if self.allow_none: result += " or None" return result @@ -218,9 +218,9 @@ def _resolve_classes(self): if isinstance(klass, str): # Try importing the classes to compare. Silently, ignore if not importable. try: - klass = self._resolve_string(klass) # noqa + klass = self._resolve_string(klass) # noqa: PLW2901 self.importable_klasses.append(klass) - except Exception: # noqa + except Exception: pass else: self.importable_klasses.append(klass) diff --git a/jupyter_server/utils.py b/jupyter_server/utils.py index 5801eb5f18..2a4c185d97 100644 --- a/jupyter_server/utils.py +++ b/jupyter_server/utils.py @@ -157,7 +157,7 @@ def check_version(v: str, check: str) -> bool: Users on dev branches are responsible for keeping their own packages up to date. """ try: - return Version(v) >= Version(check) + return bool(Version(v) >= Version(check)) except TypeError: return True @@ -382,7 +382,7 @@ def filefind(filename: str, path_dirs: Sequence[str] | str | None = None) -> str for path in path_dirs: if path == ".": - path = os.getcwd() # noqa + path = os.getcwd() # noqa: PLW2901 testname = expand_path(os.path.join(path, filename)) if os.path.isfile(testname): return os.path.abspath(testname) @@ -427,7 +427,7 @@ def import_item(name: str) -> Any: """ parts = name.rsplit(".", 1) - if len(parts) == 2: # noqa + if len(parts) == 2: # called with 'foo.bar....' package, obj = parts module = __import__(package, fromlist=[obj]) diff --git a/pyproject.toml b/pyproject.toml index 523e2fbab9..1787b32168 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ dependencies = [ "tornado>=6.2.0", "traitlets>=5.6.0", "websocket-client", - "jupyter_events>=0.6.0", + "jupyter_events>=0.9.0", "overrides" ] @@ -144,68 +144,49 @@ line-length = 100 [tool.ruff.lint] select = [ - "A", - "B", - "C", - "DTZ", - "E", - "EM", - "F", - "FBT", - "I", - "ICN", - "N", - "PLC", - "PLE", - "PLR", - "PLW", - "Q", - "RUF", - "S", - "SIM", - "T", - "TID", - "UP", - "W", - "YTT", + "B", # flake8-bugbear + "I", # isort + "C4", # flake8-comprehensions + "EM", # flake8-errmsg + "ICN", # flake8-import-conventions + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "RET", # flake8-return + "RUF", # Ruff-specific + "SIM", # flake8-simplify + "T20", # flake8-print + "UP", # pyupgrade + "YTT", # flake8-2020 + "EXE", # flake8-executable + "NPY", # NumPy specific rules + "PD", # pandas-vet + "PYI", # flake8-pyi ] ignore = [ # Allow non-abstract empty methods in abstract base classes "B027", - # Ignore McCabe complexity - "C901", - # Allow boolean positional values in function calls, like `dict.get(... True)` - "FBT003", # Use of `assert` detected "S101", - # Line too long - "E501", - # Relative imports are banned - "TID252", - # Boolean ... in function definition - "FBT001", - "FBT002", - # Module level import not at top of file - "E402", - # A001/A002/A003 .. is shadowing a python builtin - "A001", - "A002", - "A003", + # Use `contextlib.suppress(SchemaRegistryException)` instead of `try`-`except`-`pass` + "SIM105", + # Missing explicit `return` at the end of function able to return non-`None` value + "RET503" , + # Unnecessary assignment to + "RET504", + # Unnecessary `else` after `return` statement + "RET505", + # Unnecessary `elif` after `raise` statement + "RET506", # Possible hardcoded password "S105", "S106", - # Variable `xxx` in function should be lowercase - "N806", - # Exception name `KernelSessionRecordConflict` should be named with an Error suffix - "N818", - # SIM105 Use `contextlib.suppress(...)` - "SIM105", - # PLR0913 Too many arguments to function call - "PLR0913", - # PLR0912 Too many branches - "PLR0912", + "PLR", # Design related pylint codes # RUF012 Mutable class attributes should be annotated with `typing.ClassVar` "RUF012", + # Use `X | Y` for type annotations + "UP007", ] unfixable = [ # Don't touch print statements @@ -220,13 +201,10 @@ unfixable = [ # B011 Do not call assert False since python -O removes these calls # F841 local variable 'foo' is assigned to but never used # C408 Unnecessary `dict` call -# E402 Module level import not at top of file -# T201 `print` found -# EM101 Exception must not use a string literal -# PLR2004 Magic value used in comparison # S108 Probable insecure usage of temporary file or directory # PLC1901 `ext_pkg.version == ""` can be simplified to `not ext_pkg.version` as an empty string is falsey -"tests/*" = ["B011", "F841", "C408", "E402", "T201", "EM101", "EM102", "EM103", "PLR2004", "S108", "PLC1901"] +# B018 Found useless expression +"tests/*" = ["B011", "F841", "EM", "C", "T201", "S108", "PLC1901", "PTH", "ARG", "PT", "RET", "G", "PLW", "B018"] # print should be used in applications "**/*app.py" = ["T201"] # Ignore flake 8 errors from shimmed imports @@ -265,6 +243,7 @@ filterwarnings = [ "ignore:jupyter_server.base.zmqhandlers module is deprecated in Jupyter Server 2.0:DeprecationWarning", "ignore:datetime.datetime.utc:DeprecationWarning:dateutil", "ignore:datetime.datetime.utc:DeprecationWarning:tornado", + "module:add_callback_from_signal is deprecated:DeprecationWarning", ] [tool.coverage.report] @@ -298,9 +277,8 @@ python_version = "3.8" explicit_package_bases = true strict = true pretty = true -show_error_codes = true warn_unreachable = true -disable_error_code = ["no-untyped-def", "no-untyped-call", "type-arg", "no-any-return"] +disable_error_code = ["no-untyped-def", "no-untyped-call"] enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] [tool.interrogate] @@ -317,4 +295,4 @@ exclude = ["docs", "test"] ignore = ["W002"] [tool.repo-review] -ignore = ["PY007", "GH102"] +ignore = ["GH102"] diff --git a/tests/auth/test_authorizer.py b/tests/auth/test_authorizer.py index 86e2184307..a02237aaa0 100644 --- a/tests/auth/test_authorizer.py +++ b/tests/auth/test_authorizer.py @@ -1,16 +1,22 @@ """Tests for authorization""" +import asyncio import json import os +from typing import Awaitable import pytest from jupyter_client.kernelspec import NATIVE_KERNEL_NAME from nbformat import writes from nbformat.v4 import new_notebook +from traitlets import Bool +from jupyter_server.auth.authorizer import Authorizer +from jupyter_server.auth.identity import User +from jupyter_server.base.handlers import JupyterHandler from jupyter_server.services.security import csp_report_uri -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_server_authorizer): return { "ServerApp": {"authorizer_class": jp_server_authorizer}, @@ -18,7 +24,7 @@ def jp_server_config(jp_server_authorizer): } -@pytest.fixture +@pytest.fixture() def jp_server_auth_resources(jp_server_auth_core_resources): # terminal plugin doesn't have importable url patterns # get these from terminal/__init__.py @@ -217,3 +223,45 @@ async def test_authorized_requests( code = await send_request(url, body=body, method=method) assert code in expected_codes + + +class AsyncAuthorizerTest(Authorizer): + """Test that an asynchronous authorizer would still work.""" + + called = Bool(False) + + async def mock_async_fetch(self) -> True: + """Mock an async fetch""" + # Mock a hang for a half a second. + await asyncio.sleep(0.5) + return True + + async def is_authorized( + self, handler: JupyterHandler, user: User, action: str, resource: str + ) -> Awaitable[bool]: + response = await self.mock_async_fetch() + self.called = True + return response + + +@pytest.mark.parametrize( + "jp_server_config,", + [ + { + "ServerApp": {"authorizer_class": AsyncAuthorizerTest}, + "jpserver_extensions": {"jupyter_server_terminals": True}, + } + ], +) +async def test_async_authorizer( + request, + io_loop, + send_request, + tmp_path, + jp_serverapp, +): + code = await send_request("/api/status", method="GET") + assert code == 200 + # Ensure that the authorizor method finished its request. + assert hasattr(jp_serverapp.authorizer, "called") + assert jp_serverapp.authorizer.called is True diff --git a/tests/auth/test_identity.py b/tests/auth/test_identity.py index 9c4010f445..6f3af07060 100644 --- a/tests/auth/test_identity.py +++ b/tests/auth/test_identity.py @@ -113,7 +113,7 @@ def test_user_defaults(fields, expected): assert value is None or isinstance(value, str) -@pytest.fixture +@pytest.fixture() def identity_provider_class(): """Allow override in other test modules""" return PasswordIdentityProvider diff --git a/tests/auth/test_legacy_login.py b/tests/auth/test_legacy_login.py index ba49ff53f2..be139fe707 100644 --- a/tests/auth/test_legacy_login.py +++ b/tests/auth/test_legacy_login.py @@ -13,8 +13,8 @@ from jupyter_server.serverapp import ServerApp # re-run some login tests with legacy login config -from .test_identity import test_password_required, test_validate_security # noqa -from .test_login import login, test_change_password, test_login_cookie, test_logout # noqa +from .test_identity import test_password_required, test_validate_security +from .test_login import login, test_change_password, test_login_cookie, test_logout # Don't raise on deprecation warnings in this module testing deprecated behavior pytestmark = pytest.mark.filterwarnings("ignore::DeprecationWarning") @@ -32,19 +32,19 @@ def get_user(cls, handler): return None -@pytest.fixture +@pytest.fixture() def login_headers(): return {"test-user": "super"} -@pytest.fixture +@pytest.fixture() def jp_server_config(): cfg = Config() cfg.ServerApp.login_handler_class = CustomLoginHandler return cfg -@pytest.fixture +@pytest.fixture() def identity_provider_class(): # for tests imported from test_identity.py return LegacyIdentityProvider diff --git a/tests/auth/test_login.py b/tests/auth/test_login.py index d91ec5ad06..7aad3129ca 100644 --- a/tests/auth/test_login.py +++ b/tests/auth/test_login.py @@ -11,12 +11,12 @@ # override default config to ensure a non-empty base url is used -@pytest.fixture +@pytest.fixture() def jp_base_url(): return "/a%40b/" -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_base_url): return { "ServerApp": { @@ -66,7 +66,7 @@ async def _login( return resp -@pytest.fixture +@pytest.fixture() def login_headers(): """Extra headers to pass to login @@ -75,13 +75,13 @@ def login_headers(): return {} -@pytest.fixture +@pytest.fixture() def login(jp_serverapp, http_server_client, jp_base_url, login_headers): """Fixture to return a function to login to a Jupyter server by submitting the login page form """ - yield partial(_login, jp_serverapp, http_server_client, jp_base_url, login_headers) + return partial(_login, jp_serverapp, http_server_client, jp_base_url, login_headers) @pytest.mark.parametrize( diff --git a/tests/base/test_websocket.py b/tests/base/test_websocket.py index 22751c059a..ee6ee3ee62 100644 --- a/tests/base/test_websocket.py +++ b/tests/base/test_websocket.py @@ -18,7 +18,7 @@ class MockHandler(WebSocketMixin, WebSocketHandler): log = logging.getLogger() -@pytest.fixture +@pytest.fixture() def mixin(jp_serverapp): app: ServerApp = jp_serverapp headers = HTTPHeaders({"Host": "foo"}) diff --git a/tests/conftest.py b/tests/conftest.py index 9b43e0f532..f50aa797db 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -62,18 +62,18 @@ def pytest_runtest_setup(item): """ -@pytest.fixture +@pytest.fixture() def mock_template(jp_template_dir): index = jp_template_dir.joinpath("index.html") index.write_text(mock_html) -@pytest.fixture +@pytest.fixture() def extension_manager(jp_serverapp): return jp_serverapp.extension_manager -@pytest.fixture +@pytest.fixture() def config_file(jp_config_dir): """""" f = jp_config_dir.joinpath("jupyter_mockextension_config.py") @@ -87,7 +87,7 @@ def jp_mockextension_cleanup(): MockExtensionApp.clear_instance() -@pytest.fixture +@pytest.fixture() def contents_dir(tmp_path, jp_serverapp): return tmp_path / jp_serverapp.root_dir @@ -108,7 +108,7 @@ def contents_dir(tmp_path, jp_serverapp): ] -@pytest.fixture +@pytest.fixture() def contents(contents_dir): # Create files in temporary directory paths: dict = {"notebooks": [], "textfiles": [], "blobs": [], "contents_dir": contents_dir} @@ -137,6 +137,6 @@ def contents(contents_dir): return paths -@pytest.fixture +@pytest.fixture() def folders(): return list({item[0] for item in dirs}) diff --git a/tests/extension/mockextensions/app.py b/tests/extension/mockextensions/app.py index 0cd45b26d3..26f38464cd 100644 --- a/tests/extension/mockextensions/app.py +++ b/tests/extension/mockextensions/app.py @@ -65,7 +65,6 @@ def initialize_settings(self): elogger.register_event_schema(EVENT_SCHEMA) except SchemaRegistryException as err: self.log.error(err) - pass def initialize_handlers(self): self.handlers.append(("/mock", MockExtensionHandler)) diff --git a/tests/extension/test_app.py b/tests/extension/test_app.py index 6c7af22fe2..de52924df3 100644 --- a/tests/extension/test_app.py +++ b/tests/extension/test_app.py @@ -11,7 +11,7 @@ from .mockextensions.app import MockExtensionApp -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_template_dir): config = { "ServerApp": { @@ -25,7 +25,7 @@ def jp_server_config(jp_template_dir): return config -@pytest.fixture +@pytest.fixture() def mock_extension(extension_manager): name = "tests.extension.mockextensions" pkg = extension_manager.extensions[name] diff --git a/tests/extension/test_config.py b/tests/extension/test_config.py index 5667c3efae..ac7a75aedc 100644 --- a/tests/extension/test_config.py +++ b/tests/extension/test_config.py @@ -9,7 +9,7 @@ pytestmark = pytest.mark.usefixtures("jp_environ") -@pytest.fixture +@pytest.fixture() def configd(jp_env_config_path): """A pathlib.Path object that acts like a jupyter_server_config.d folder.""" configd = jp_env_config_path.joinpath("jupyter_server_config.d") @@ -28,7 +28,7 @@ def configd(jp_env_config_path): """ -@pytest.fixture +@pytest.fixture() def ext1_config(configd): config = configd.joinpath("ext1_config.json") config.write_text(ext1_json_config) @@ -45,7 +45,7 @@ def ext1_config(configd): """ -@pytest.fixture +@pytest.fixture() def ext2_config(configd): config = configd.joinpath("ext2_config.json") config.write_text(ext2_json_config) diff --git a/tests/extension/test_handler.py b/tests/extension/test_handler.py index 870e311b4a..3151cf2b4d 100644 --- a/tests/extension/test_handler.py +++ b/tests/extension/test_handler.py @@ -1,7 +1,7 @@ import pytest -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_template_dir): return { "ServerApp": {"jpserver_extensions": {"tests.extension.mockextensions": True}}, diff --git a/tests/extension/test_launch.py b/tests/extension/test_launch.py index 86336efe21..de94607187 100644 --- a/tests/extension/test_launch.py +++ b/tests/extension/test_launch.py @@ -14,17 +14,17 @@ HERE = os.path.dirname(os.path.abspath(__file__)) -@pytest.fixture +@pytest.fixture() def port(): return 9999 -@pytest.fixture +@pytest.fixture() def token(): return hexlify(os.urandom(4)).decode("ascii") -@pytest.fixture +@pytest.fixture() def auth_header(token): return {"Authorization": "token %s" % token} @@ -32,7 +32,7 @@ def auth_header(token): def wait_up(url, interval=0.1, check=None): while True: try: - r = requests.get(url) # noqa + r = requests.get(url) except Exception: if check: assert check() @@ -42,7 +42,7 @@ def wait_up(url, interval=0.1, check=None): break -@pytest.fixture +@pytest.fixture() def launch_instance(request, port, token): def _run_in_subprocess(argv=None, add_token=True): argv = argv or [] @@ -66,7 +66,7 @@ def _kill_extension_app(): root = Path(HERE).parent.parent process = subprocess.Popen( - [ # noqa + [ sys.executable, "-m", "tests.extension.mockextensions.app", @@ -86,11 +86,11 @@ def _kill_extension_app(): return _run_in_subprocess -@pytest.fixture +@pytest.fixture() def fetch(port, auth_header): def _get(endpoint): url = f"http://127.0.0.1:{port}" + endpoint - return requests.get(url, headers=auth_header) # noqa + return requests.get(url, headers=auth_header) return _get diff --git a/tests/extension/test_manager.py b/tests/extension/test_manager.py index 9ca42d95ef..6e48b65df4 100644 --- a/tests/extension/test_manager.py +++ b/tests/extension/test_manager.py @@ -62,7 +62,7 @@ def test_extension_package_api(): app = path1["app"] e = ExtensionPackage(name="tests.extension.mockextensions", enabled=True) - e.extension_points # noqa + e.extension_points assert hasattr(e, "extension_points") assert len(e.extension_points) == len(metadata_list) assert app.name in e.extension_points diff --git a/tests/nbconvert/test_handlers.py b/tests/nbconvert/test_handlers.py index f14fde35a2..1805902e76 100644 --- a/tests/nbconvert/test_handlers.py +++ b/tests/nbconvert/test_handlers.py @@ -16,7 +16,7 @@ ).decode("ascii") -@pytest.fixture +@pytest.fixture() def notebook(jp_root_dir): # Build sub directory. subdir = jp_root_dir / "foo" diff --git a/tests/services/api/test_api.py b/tests/services/api/test_api.py index 900280f67d..f013dcfcd8 100644 --- a/tests/services/api/test_api.py +++ b/tests/services/api/test_api.py @@ -59,7 +59,7 @@ def is_authorized(self, handler, user, action, resource): return action in actions -@pytest.fixture +@pytest.fixture() def identity_provider(jp_serverapp): idp = MockIdentityProvider(parent=jp_serverapp) authorizer = MockAuthorizer(parent=jp_serverapp) diff --git a/tests/services/contents/test_api.py b/tests/services/contents/test_api.py index 746c663345..b74ee8f62a 100644 --- a/tests/services/contents/test_api.py +++ b/tests/services/contents/test_api.py @@ -97,11 +97,28 @@ async def test_get_nb_contents(jp_fetch, contents, path, name): assert model["path"] == nbpath assert model["type"] == "notebook" assert "content" in model + assert model["hash"] is None + assert model["hash_algorithm"] is None assert model["format"] == "json" assert "metadata" in model["content"] assert isinstance(model["content"]["metadata"], dict) +@pytest.mark.parametrize("path,name", dirs) +async def test_get_nb_hash(jp_fetch, contents, path, name): + nbname = name + ".ipynb" + nbpath = (path + "/" + nbname).lstrip("/") + r = await jp_fetch("api", "contents", nbpath, method="GET", params=dict(hash="1")) + model = json.loads(r.body.decode()) + assert model["name"] == nbname + assert model["path"] == nbpath + assert model["type"] == "notebook" + assert model["hash"] + assert model["hash_algorithm"] + assert "metadata" in model["content"] + assert isinstance(model["content"]["metadata"], dict) + + @pytest.mark.parametrize("path,name", dirs) async def test_get_nb_no_contents(jp_fetch, contents, path, name): nbname = name + ".ipynb" @@ -111,6 +128,9 @@ async def test_get_nb_no_contents(jp_fetch, contents, path, name): assert model["name"] == nbname assert model["path"] == nbpath assert model["type"] == "notebook" + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert "content" in model assert model["content"] is None @@ -161,6 +181,9 @@ async def test_get_text_file_contents(jp_fetch, contents, path, name): model = json.loads(r.body.decode()) assert model["name"] == txtname assert model["path"] == txtpath + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert "content" in model assert model["format"] == "text" assert model["type"] == "file" @@ -186,6 +209,21 @@ async def test_get_text_file_contents(jp_fetch, contents, path, name): assert expected_http_error(e, 400) +@pytest.mark.parametrize("path,name", dirs) +async def test_get_text_file_hash(jp_fetch, contents, path, name): + txtname = name + ".txt" + txtpath = (path + "/" + txtname).lstrip("/") + r = await jp_fetch("api", "contents", txtpath, method="GET", params=dict(hash="1")) + model = json.loads(r.body.decode()) + assert model["name"] == txtname + assert model["path"] == txtpath + assert "hash" in model + assert model["hash"] + assert model["hash_algorithm"] + assert model["format"] == "text" + assert model["type"] == "file" + + async def test_get_404_hidden(jp_fetch, contents, contents_dir): # Create text files hidden_dir = contents_dir / ".hidden" @@ -226,6 +264,9 @@ async def test_get_binary_file_contents(jp_fetch, contents, path, name): assert model["name"] == blobname assert model["path"] == blobpath assert "content" in model + assert "hash" in model + assert model["hash"] == None + assert "hash_algorithm" in model assert model["format"] == "base64" assert model["type"] == "file" data_out = decodebytes(model["content"].encode("ascii")) @@ -268,7 +309,7 @@ async def test_get_bad_type(jp_fetch, contents): assert expected_http_error(e, 400, "%s is not a directory" % path) -@pytest.fixture +@pytest.fixture() def _check_created(jp_base_url): def _inner(r, contents_dir, path, name, type="notebook"): fpath = path + "/" + name diff --git a/tests/services/contents/test_fileio.py b/tests/services/contents/test_fileio.py index 0f0cf1bfed..12752ee810 100644 --- a/tests/services/contents/test_fileio.py +++ b/tests/services/contents/test_fileio.py @@ -67,9 +67,9 @@ class CustomExc(Exception): assert f.read() == "written from symlink" -@pytest.fixture +@pytest.fixture() def handle_umask(): - global umask # noqa + global umask umask = os.umask(0) os.umask(umask) yield @@ -137,11 +137,16 @@ def test_path_to_invalid(tmpdir): @pytest.mark.skipif(os.name == "nt", reason="test fails on Windows") -def test_file_manager_mixin(tmpdir): +def test_file_manager_mixin(tmp_path): mixin = FileManagerMixin() mixin.log = logging.getLogger() - bad_content = tmpdir / "bad_content.ipynb" + bad_content = tmp_path / "bad_content.ipynb" bad_content.write_text("{}", "utf8") + # Same as `echo -n {} | sha256sum` + assert mixin._get_hash(bad_content.read_bytes()) == { + "hash": "44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "hash_algorithm": "sha256", + } with pytest.raises(HTTPError): mixin._read_notebook(bad_content) other = path_to_intermediate(bad_content) @@ -152,10 +157,10 @@ def test_file_manager_mixin(tmpdir): validate(nb) with pytest.raises(HTTPError): - mixin._read_file(tmpdir, "text") + mixin._read_file(tmp_path, "text") with pytest.raises(HTTPError): - mixin._save_file(tmpdir / "foo", "foo", "bar") + mixin._save_file(tmp_path / "foo", "foo", "bar") @pytest.mark.skipif(os.name == "nt", reason="test fails on Windows") @@ -170,7 +175,12 @@ async def test_async_file_manager_mixin(tmpdir): with open(other, "w") as fid: json.dump(new_notebook(), fid) mixin.use_atomic_writing = True - nb = await mixin._read_notebook(bad_content) + nb, bcontent = await mixin._read_notebook(bad_content, raw=True) + # Same as `echo -n {} | sha256sum` + assert mixin._get_hash(bcontent) == { + "hash": "4747f9680816e352a697d0fb69d82334457cdd1e46f053e800859833d3e6003e", + "hash_algorithm": "sha256", + } validate(nb) with pytest.raises(HTTPError): @@ -178,3 +188,30 @@ async def test_async_file_manager_mixin(tmpdir): with pytest.raises(HTTPError): await mixin._save_file(tmpdir / "foo", "foo", "bar") + + +async def test_AsyncFileManagerMixin_read_notebook_no_raw(tmpdir): + mixin = AsyncFileManagerMixin() + mixin.log = logging.getLogger() + bad_content = tmpdir / "bad_content.ipynb" + bad_content.write_text("{}", "utf8") + + other = path_to_intermediate(bad_content) + with open(other, "w") as fid: + json.dump(new_notebook(), fid) + mixin.use_atomic_writing = True + answer = await mixin._read_notebook(bad_content) + + assert not isinstance(answer, tuple) + + +async def test_AsyncFileManagerMixin_read_file_no_raw(tmpdir): + mixin = AsyncFileManagerMixin() + mixin.log = logging.getLogger() + file_path = tmpdir / "bad_content.text" + file_path.write_text("blablabla", "utf8") + + mixin.use_atomic_writing = True + answer = await mixin._read_file(file_path, "text") + + assert len(answer) == 2 diff --git a/tests/services/contents/test_manager.py b/tests/services/contents/test_manager.py index 7fa5cbd742..e718036b0b 100644 --- a/tests/services/contents/test_manager.py +++ b/tests/services/contents/test_manager.py @@ -296,7 +296,7 @@ async def test_403(jp_file_contents_manager_class, tmp_path): assert e.status_code == 403 -async def test_400(jp_file_contents_manager_class, tmp_path): # noqa +async def test_400(jp_file_contents_manager_class, tmp_path): # Test Delete behavior # Test delete of file in hidden directory td = str(tmp_path) @@ -406,35 +406,44 @@ async def test_400(jp_file_contents_manager_class, tmp_path): # noqa async def test_404(jp_file_contents_manager_class, tmp_path): + # setup + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + # Test visible file in hidden folder - with pytest.raises(HTTPError) as excinfo: - td = str(tmp_path) - cm = jp_file_contents_manager_class(root_dir=td) - hidden_dir = ".hidden" - file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") - _make_dir(cm, hidden_dir) - model = await ensure_async(cm.new(path=file_in_hidden_path)) - os_path = cm._get_os_path(model["path"]) + cm.allow_hidden = True + hidden_dir = ".hidden" + file_in_hidden_path = os.path.join(hidden_dir, "visible.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + cm.allow_hidden = False - try: - result = await ensure_async(cm.get(os_path, "w")) - except HTTPError as e: - assert e.status_code == 404 + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.get(os_path)) + assert excinfo.value.status_code == 404 # Test hidden file in visible folder + cm.allow_hidden = True + hidden_dir = "visible" + file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") + _make_dir(cm, hidden_dir) + model = await ensure_async(cm.new(path=file_in_hidden_path)) + os_path = cm._get_os_path(model["path"]) + cm.allow_hidden = False + with pytest.raises(HTTPError) as excinfo: - td = str(tmp_path) - cm = jp_file_contents_manager_class(root_dir=td) - hidden_dir = "visible" - file_in_hidden_path = os.path.join(hidden_dir, ".hidden.txt") - _make_dir(cm, hidden_dir) - model = await ensure_async(cm.new(path=file_in_hidden_path)) - os_path = cm._get_os_path(model["path"]) + await ensure_async(cm.get(os_path)) + assert excinfo.value.status_code == 404 - try: - result = await ensure_async(cm.get(os_path, "w")) - except HTTPError as e: - assert e.status_code == 404 + # Test file not found + td = str(tmp_path) + cm = jp_file_contents_manager_class(root_dir=td) + not_a_file = "foo.bar" + + with pytest.raises(HTTPError) as excinfo: + await ensure_async(cm.get(not_a_file)) + assert excinfo.value.status_code == 404 async def test_escape_root(jp_file_contents_manager_class, tmp_path): @@ -538,7 +547,7 @@ async def test_modified_date(jp_contents_manager): assert renamed["last_modified"] >= saved["last_modified"] -async def test_get(jp_contents_manager): # noqa +async def test_get(jp_contents_manager): cm = jp_contents_manager # Create a notebook model = await ensure_async(cm.new_untitled(type="notebook")) @@ -562,6 +571,17 @@ async def test_get(jp_contents_manager): # noqa nb_as_bin_file = await ensure_async(cm.get(path, content=True, type="file", format="base64")) assert nb_as_bin_file["format"] == "base64" + nb_with_hash = await ensure_async(cm.get(path, require_hash=True)) + assert nb_with_hash["hash"] + assert nb_with_hash["hash_algorithm"] + + # Get the hash without the content + nb_with_hash = await ensure_async(cm.get(path, content=False, require_hash=True)) + assert nb_with_hash["content"] is None + assert nb_with_hash["format"] is None + assert nb_with_hash["hash"] + assert nb_with_hash["hash_algorithm"] + # Test in sub-directory sub_dir = "/foo/" _make_dir(cm, "foo") @@ -576,7 +596,7 @@ async def test_get(jp_contents_manager): # noqa # Test with a regular file. file_model_path = (await ensure_async(cm.new_untitled(path=sub_dir, ext=".txt")))["path"] - file_model = await ensure_async(cm.get(file_model_path)) + file_model = await ensure_async(cm.get(file_model_path, require_hash=True)) expected_model = { "content": "", "format": "text", @@ -585,12 +605,34 @@ async def test_get(jp_contents_manager): # noqa "path": "foo/untitled.txt", "type": "file", "writable": True, + "hash_algorithm": cm.hash_algorithm, } # Assert expected model is in file_model for key, value in expected_model.items(): assert file_model[key] == value assert "created" in file_model assert "last_modified" in file_model + assert file_model["hash"] + + # Get hash without content + file_model = await ensure_async(cm.get(file_model_path, content=False, require_hash=True)) + expected_model = { + "content": None, + "format": None, + "mimetype": "text/plain", + "name": "untitled.txt", + "path": "foo/untitled.txt", + "type": "file", + "writable": True, + "hash_algorithm": cm.hash_algorithm, + } + + # Assert expected model is in file_model + for key, value in expected_model.items(): + assert file_model[key] == value + assert "created" in file_model + assert "last_modified" in file_model + assert file_model["hash"] # Create a sub-sub directory to test getting directory contents with a # subdir. diff --git a/tests/services/contents/test_manager_no_hash.py b/tests/services/contents/test_manager_no_hash.py new file mode 100644 index 0000000000..511a8d319b --- /dev/null +++ b/tests/services/contents/test_manager_no_hash.py @@ -0,0 +1,44 @@ +import json + +import pytest + +from jupyter_server.services.contents.filemanager import ( + AsyncFileContentsManager, +) + + +class NoHashFileManager(AsyncFileContentsManager): + """FileManager prior to 2.11 that introduce the ability to request file hash.""" + + def _base_model(self, path): + """Drop new attributes from model.""" + model = super()._base_model(path) + + del model["hash"] + del model["hash_algorithm"] + + return model + + async def get(self, path, content=True, type=None, format=None): + """Get without the new `require_hash` argument""" + model = await super().get(path, content=content, type=type, format=format) + return model + + +@pytest.fixture +def jp_server_config(jp_server_config): + jp_server_config["ServerApp"]["contents_manager_class"] = NoHashFileManager + return jp_server_config + + +async def test_manager_no_hash_support(tmp_path, jp_root_dir, jp_fetch): + # Create some content + path = "dummy.txt" + (jp_root_dir / path).write_text("blablabla", encoding="utf-8") + + response = await jp_fetch("api", "contents", path, method="GET", params=dict(hash="1")) + + model = json.loads(response.body) + + assert "hash" not in model + assert "hash_algorithm" not in model diff --git a/tests/services/events/mockextension/__init__.py b/tests/services/events/mockextension/__init__.py index b19cb18a2e..ed7c0e9d37 100644 --- a/tests/services/events/mockextension/__init__.py +++ b/tests/services/events/mockextension/__init__.py @@ -1,4 +1,4 @@ -from .mock_extension import _load_jupyter_server_extension # noqa: F401 +from .mock_extension import _load_jupyter_server_extension # Function that makes these extensions discoverable # by the test functions. diff --git a/tests/services/events/test_api.py b/tests/services/events/test_api.py index 5311f0860b..d84b112240 100644 --- a/tests/services/events/test_api.py +++ b/tests/services/events/test_api.py @@ -9,7 +9,7 @@ from tests.utils import expected_http_error -@pytest.fixture +@pytest.fixture() def event_logger_sink(jp_serverapp): event_logger = jp_serverapp.event_logger # Register the event schema defined in this directory. @@ -21,7 +21,7 @@ def event_logger_sink(jp_serverapp): return event_logger, sink -@pytest.fixture +@pytest.fixture() def event_logger(event_logger_sink): event_logger, sink = event_logger_sink return event_logger diff --git a/tests/services/events/test_extension.py b/tests/services/events/test_extension.py index fafb34497f..c20e6f79b6 100644 --- a/tests/services/events/test_extension.py +++ b/tests/services/events/test_extension.py @@ -3,7 +3,7 @@ import pytest -@pytest.fixture +@pytest.fixture() def jp_server_config(): config = { "ServerApp": { diff --git a/tests/services/kernels/test_api.py b/tests/services/kernels/test_api.py index 60009e8978..c1b98e7269 100644 --- a/tests/services/kernels/test_api.py +++ b/tests/services/kernels/test_api.py @@ -29,7 +29,7 @@ def suppress_deprecation_warnings(): yield -@pytest.fixture +@pytest.fixture() def pending_kernel_is_ready(jp_serverapp): async def _(kernel_id, ready=None): km = jp_serverapp.kernel_manager diff --git a/tests/services/kernels/test_config.py b/tests/services/kernels/test_config.py index 1db2e11b1f..8f779bb1dd 100644 --- a/tests/services/kernels/test_config.py +++ b/tests/services/kernels/test_config.py @@ -4,7 +4,7 @@ from jupyter_server.services.kernels.kernelmanager import AsyncMappingKernelManager -@pytest.fixture +@pytest.fixture() def jp_server_config(): return Config( {"ServerApp": {"MappingKernelManager": {"allowed_message_types": ["kernel_info_request"]}}} diff --git a/tests/services/kernels/test_cull.py b/tests/services/kernels/test_cull.py index 50ecbf2b96..f370f9c5ef 100644 --- a/tests/services/kernels/test_cull.py +++ b/tests/services/kernels/test_cull.py @@ -30,7 +30,7 @@ def suppress_deprecation_warnings(): yield -@pytest.fixture +@pytest.fixture() def jp_kernelspec_with_metadata(jp_data_dir): """Configures some sample kernelspecs in the Jupyter data directory.""" kenrel_spec_name = "sample_with_metadata" diff --git a/tests/services/kernels/test_events.py b/tests/services/kernels/test_events.py index de2e976b1b..9bdfb03672 100644 --- a/tests/services/kernels/test_events.py +++ b/tests/services/kernels/test_events.py @@ -16,7 +16,6 @@ async def test_kernel_action_success_event( async def mock_method(self, *args, **kwargs): self.kernel_id = "x-x-x-x-x" - ... monkeypatch.setattr(AsyncKernelManager, f"{action}_kernel", mock_method) @@ -42,7 +41,7 @@ async def mock_method(self, *args, **kwargs): monkeypatch.setattr(AsyncKernelManager, f"{action}_kernel", mock_method) - with pytest.raises(Exception): # noqa + with pytest.raises(Exception): # noqa: B017 await getattr(manager, f"{action}_kernel")() output = jp_read_emitted_events()[0] diff --git a/tests/services/sessions/test_api.py b/tests/services/sessions/test_api.py index 8c46de9470..a4aa0a73e5 100644 --- a/tests/services/sessions/test_api.py +++ b/tests/services/sessions/test_api.py @@ -164,7 +164,7 @@ async def cleanup(self): time.sleep(0.1) -@pytest.fixture +@pytest.fixture() def session_is_ready(jp_serverapp): """Wait for the kernel started by a session to be ready. @@ -187,7 +187,7 @@ async def _(session_id): return _ -@pytest.fixture +@pytest.fixture() def session_client(jp_root_dir, jp_fetch): subdir = jp_root_dir.joinpath("foo") subdir.mkdir() diff --git a/tests/services/sessions/test_manager.py b/tests/services/sessions/test_manager.py index bd092259e0..9af04f2268 100644 --- a/tests/services/sessions/test_manager.py +++ b/tests/services/sessions/test_manager.py @@ -66,7 +66,7 @@ async def shutdown_kernel(self, kernel_id, now=False): await super().shutdown_kernel(kernel_id, now=now) -@pytest.fixture +@pytest.fixture() def session_manager(): return SessionManager(kernel_manager=MockMKM(), contents_manager=ContentsManager()) diff --git a/tests/test_gateway.py b/tests/test_gateway.py index c178c7f801..585650e2f0 100644 --- a/tests/test_gateway.py +++ b/tests/test_gateway.py @@ -90,7 +90,7 @@ def generate_model(name): return model -async def mock_gateway_request(url, **kwargs): # noqa +async def mock_gateway_request(url, **kwargs): method = "GET" if kwargs["method"]: method = kwargs["method"] @@ -234,7 +234,7 @@ def jp_server_config(): ) -@pytest.fixture +@pytest.fixture() def init_gateway(monkeypatch): """Initializes the server for use as a gateway client.""" # Clear the singleton first since previous tests may not have used a gateway. diff --git a/tests/test_serverapp.py b/tests/test_serverapp.py index 9ea6e569f7..df703f550c 100644 --- a/tests/test_serverapp.py +++ b/tests/test_serverapp.py @@ -168,7 +168,7 @@ def test_list_running_servers(jp_serverapp, jp_web_app): assert len(servers) >= 1 -@pytest.fixture +@pytest.fixture() def prefix_path(jp_root_dir, tmp_path): """If a given path is prefixed with the literal strings `/jp_root_dir` or `/tmp_path`, replace those diff --git a/tests/test_terminal.py b/tests/test_terminal.py index 5e45b01dee..27fd69fadb 100644 --- a/tests/test_terminal.py +++ b/tests/test_terminal.py @@ -14,7 +14,7 @@ from jupyter_server._tz import isoformat -@pytest.fixture +@pytest.fixture() def terminal_path(tmp_path): subdir = tmp_path.joinpath("terminal_path") subdir.mkdir() @@ -24,7 +24,7 @@ def terminal_path(tmp_path): shutil.rmtree(str(subdir), ignore_errors=True) -@pytest.fixture +@pytest.fixture() def terminal_root_dir(jp_root_dir): subdir = jp_root_dir.joinpath("terminal_path") subdir.mkdir() @@ -38,7 +38,7 @@ def terminal_root_dir(jp_root_dir): CULL_INTERVAL = 3 -@pytest.fixture +@pytest.fixture() def jp_server_config(): return Config( { @@ -52,7 +52,7 @@ def jp_server_config(): ) -@pytest.fixture +@pytest.fixture() def jp_argv(): """Allows tests to setup specific argv values.""" return ["--ServerApp.jpserver_extensions", "jupyter_server_terminals=True"] @@ -300,7 +300,7 @@ def test_shell_command_override( def test_importing_shims(): with warnings.catch_warnings(): warnings.simplefilter("ignore") - from jupyter_server.terminal import initialize # noqa - from jupyter_server.terminal.api_handlers import TerminalRootHandler # noqa - from jupyter_server.terminal.handlers import TermSocket # noqa - from jupyter_server.terminal.terminalmanager import TerminalManager # noqa + from jupyter_server.terminal import initialize + from jupyter_server.terminal.api_handlers import TerminalRootHandler + from jupyter_server.terminal.handlers import TermSocket + from jupyter_server.terminal.terminalmanager import TerminalManager diff --git a/tests/test_utils.py b/tests/test_utils.py index 5a3f33138b..83d2a1d926 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -99,7 +99,7 @@ def test_check_version(): def test_check_pid(): - proc = subprocess.Popen([sys.executable]) # noqa + proc = subprocess.Popen([sys.executable]) proc.kill() proc.wait() check_pid(proc.pid) diff --git a/tests/unix_sockets/conftest.py b/tests/unix_sockets/conftest.py index c9324d226d..6eb4daa03a 100644 --- a/tests/unix_sockets/conftest.py +++ b/tests/unix_sockets/conftest.py @@ -6,13 +6,13 @@ from jupyter_server import DEFAULT_JUPYTER_SERVER_PORT -@pytest.fixture +@pytest.fixture() def jp_process_id(): """Choose a random unused process ID.""" return os.getpid() -@pytest.fixture +@pytest.fixture() def jp_unix_socket_file(jp_process_id): """Define a temporary socket connection""" # Rely on `/tmp` to avoid any Linux socket length max buffer @@ -26,7 +26,7 @@ def jp_unix_socket_file(jp_process_id): jp_unix_socket_file.unlink() -@pytest.fixture +@pytest.fixture() def jp_http_port(): """Set the port to the default value, since sock and port cannot both be configured at the same time. diff --git a/tests/unix_sockets/test_api.py b/tests/unix_sockets/test_api.py index 85714f4a52..fb6358860f 100644 --- a/tests/unix_sockets/test_api.py +++ b/tests/unix_sockets/test_api.py @@ -16,13 +16,13 @@ from jupyter_server.utils import async_fetch, url_path_join, urlencode_unix_socket -@pytest.fixture +@pytest.fixture() def jp_server_config(jp_unix_socket_file): """Configure the serverapp fixture with the unix socket.""" return {"ServerApp": {"sock": jp_unix_socket_file, "allow_remote_access": True}} -@pytest.fixture +@pytest.fixture() def http_server_port(jp_unix_socket_file, jp_process_id): """Unix socket and process ID used by tornado's HTTP Server. @@ -32,7 +32,7 @@ def http_server_port(jp_unix_socket_file, jp_process_id): return (bind_unix_socket(jp_unix_socket_file), jp_process_id) -@pytest.fixture +@pytest.fixture() def jp_unix_socket_fetch(jp_unix_socket_file, jp_auth_header, jp_base_url, http_server, io_loop): """A fetch fixture for Jupyter Server tests that use the unix_serverapp fixture""" diff --git a/tests/unix_sockets/test_serverapp_integration.py b/tests/unix_sockets/test_serverapp_integration.py index f60c99b1bc..392fd7a61a 100644 --- a/tests/unix_sockets/test_serverapp_integration.py +++ b/tests/unix_sockets/test_serverapp_integration.py @@ -42,7 +42,7 @@ def _cleanup_process(proc): fid.close() -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_shutdown_sock_server_integration(jp_unix_socket_file): url = urlencode_unix_socket(jp_unix_socket_file).encode() encoded_sock_path = urlencode_unix_socket_path(jp_unix_socket_file) @@ -89,7 +89,7 @@ def test_shutdown_sock_server_integration(jp_unix_socket_file): _cleanup_process(p) -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_sock_server_validate_sockmode_type(): try: _check_output(["jupyter-server", "--sock=/tmp/nonexistent", "--sock-mode=badbadbad"]) @@ -99,7 +99,7 @@ def test_sock_server_validate_sockmode_type(): raise AssertionError("expected execution to fail due to validation of --sock-mode param") -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_sock_server_validate_sockmode_accessible(): try: _check_output( @@ -120,7 +120,7 @@ def _ensure_stopped(check_msg="There are no running servers"): raise AssertionError("expected all servers to be stopped") -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): """Tests lifecycle behavior for mixed-mode server types w/ default ports. @@ -158,7 +158,7 @@ def test_stop_multi_integration(jp_unix_socket_file, jp_http_port): [_cleanup_process(p) for p in [p1, p2, p3]] -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_launch_socket_collision(jp_unix_socket_file): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = jp_unix_socket_file @@ -189,7 +189,7 @@ def test_launch_socket_collision(jp_unix_socket_file): _cleanup_process(p1) -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_shutdown_server(jp_environ): # Start a server in another process # Stop that server @@ -214,7 +214,7 @@ def test_shutdown_server(jp_environ): _cleanup_process(p) -@pytest.mark.integration_test +@pytest.mark.integration_test() def test_jupyter_server_apps(jp_environ): # Start a server in another process # Stop that server