From ed0c640898891bd0a087bd71f2c39278777ea053 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Wed, 18 Nov 2020 08:10:53 -0800 Subject: [PATCH 01/31] Fix race condition with async kernel management --- notebook/services/kernels/kernelmanager.py | 23 ++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/notebook/services/kernels/kernelmanager.py b/notebook/services/kernels/kernelmanager.py index 61cbbe58f5..7ed182dfc0 100644 --- a/notebook/services/kernels/kernelmanager.py +++ b/notebook/services/kernels/kernelmanager.py @@ -294,7 +294,6 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): kernel._activity_stream.close() kernel._activity_stream = None self.stop_buffering(kernel_id) - self._kernel_connections.pop(kernel_id, None) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 @@ -302,7 +301,12 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): type=self._kernels[kernel_id].kernel_name ).dec() - return self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + # Unlike its async sibling method in AsyncMappingKernelManager, removing the kernel_id + # from the connections dictionary isn't as problematic before the shutdown since the + # method is synchronous. However, we'll keep the relative call orders the same from + # a maintenance perspective. + self._kernel_connections.pop(kernel_id, None) async def restart_kernel(self, kernel_id, now=False): """Restart a kernel by kernel_id""" @@ -376,8 +380,11 @@ def list_kernels(self): kernels = [] kernel_ids = self.pinned_superclass.list_kernel_ids(self) for kernel_id in kernel_ids: - model = self.kernel_model(kernel_id) - kernels.append(model) + try: + model = self.kernel_model(kernel_id) + kernels.append(model) + except (web.HTTPError, KeyError): + pass # Probably due to a (now) non-existent kernel, continue building the list return kernels # override _check_kernel_id to raise 404 instead of KeyError @@ -498,7 +505,6 @@ async def shutdown_kernel(self, kernel_id, now=False, restart=False): kernel._activity_stream.close() kernel._activity_stream = None self.stop_buffering(kernel_id) - self._kernel_connections.pop(kernel_id, None) # Decrease the metric of number of kernels # running for the relevant kernel type by 1 @@ -506,4 +512,9 @@ async def shutdown_kernel(self, kernel_id, now=False, restart=False): type=self._kernels[kernel_id].kernel_name ).dec() - return await self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + await self.pinned_superclass.shutdown_kernel(self, kernel_id, now=now, restart=restart) + # Remove kernel_id from the connections dictionary only after kernel has been shutdown, + # otherwise a race condition can occur since the shutdown may take a while - allowing + # list/fetch kernel operations to access _kernel_connections for a non-existent key + # (kernel_id) while "awaiting" the result of the shutdown. + self._kernel_connections.pop(kernel_id, None) From 6a46458f1f13166c43a192fd84ea57c00c1f4776 Mon Sep 17 00:00:00 2001 From: Stefano Rivera Date: Tue, 1 Dec 2020 13:04:52 -0800 Subject: [PATCH 02/31] Check for TrashPermissionError rather than guess _check_trash() was added (in #3304) because TrashPermissionError didn't exist, yet. Now that it does, we can use it, and stop guessing what will cause a permission problem. Closes: #3374 --- notebook/services/contents/filemanager.py | 22 ++++------------------ setup.py | 2 +- 2 files changed, 5 insertions(+), 19 deletions(-) diff --git a/notebook/services/contents/filemanager.py b/notebook/services/contents/filemanager.py index 3fa6dad212..0c9386b2fc 100644 --- a/notebook/services/contents/filemanager.py +++ b/notebook/services/contents/filemanager.py @@ -15,6 +15,7 @@ import nbformat from send2trash import send2trash +from send2trash.exceptions import TrashPermissionError from tornado import web from .filecheckpoints import FileCheckpoints @@ -512,17 +513,6 @@ def delete_file(self, path): if not os.path.exists(os_path): raise web.HTTPError(404, u'File or directory does not exist: %s' % os_path) - def _check_trash(os_path): - if sys.platform in {'win32', 'darwin'}: - return True - - # It's a bit more nuanced than this, but until we can better - # distinguish errors from send2trash, assume that we can only trash - # files on the same partition as the home directory. - file_dev = os.stat(os_path).st_dev - home_dev = os.stat(os.path.expanduser('~')).st_dev - return file_dev == home_dev - def is_non_empty_dir(os_path): if os.path.isdir(os_path): # A directory containing only leftover checkpoints is @@ -538,16 +528,12 @@ def is_non_empty_dir(os_path): # send2trash can really delete files on Windows, so disallow # deleting non-empty files. See Github issue 3631. raise web.HTTPError(400, u'Directory %s not empty' % os_path) - if _check_trash(os_path): + try: self.log.debug("Sending %s to trash", os_path) - # Looking at the code in send2trash, I don't think the errors it - # raises let us distinguish permission errors from other errors in - # code. So for now, just let them all get logged as server errors. send2trash(os_path) return - else: - self.log.warning("Skipping trash for %s, on different device " - "to home directory", os_path) + except TrashPermissionError as e: + self.log.warning("Skipping trash for %s, %s", os_path, e) if os.path.isdir(os_path): # Don't permanently delete non-empty directories. diff --git a/setup.py b/setup.py index 676c5763cd..7c0d10e6a3 100755 --- a/setup.py +++ b/setup.py @@ -110,7 +110,7 @@ 'nbformat', 'nbconvert', 'ipykernel', # bless IPython kernel for now - 'Send2Trash', + 'Send2Trash>=1.5.0', 'terminado>=0.8.3', 'prometheus_client' ], From 7bcc824873c494a85c399061e3d194dc499a5438 Mon Sep 17 00:00:00 2001 From: Stefano Rivera Date: Tue, 1 Dec 2020 14:11:52 -0800 Subject: [PATCH 03/31] Handle send2trash failure in test_delete_non_empty_dir If the test working directory isn't trashable, we expect to not be able to delete the non-empty directory. --- .../contents/tests/test_contents_api.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/notebook/services/contents/tests/test_contents_api.py b/notebook/services/contents/tests/test_contents_api.py index 543aec2577..6e4ad49dbc 100644 --- a/notebook/services/contents/tests/test_contents_api.py +++ b/notebook/services/contents/tests/test_contents_api.py @@ -12,6 +12,8 @@ pjoin = os.path.join import requests +from send2trash import send2trash +from send2trash.exceptions import TrashPermissionError from ..filecheckpoints import GenericFileCheckpoints @@ -197,6 +199,14 @@ def isfile(self, api_path): def isdir(self, api_path): return os.path.isdir(self.to_os_path(api_path)) + def can_send2trash(self, api_path): + """Send a path to trash, if possible. Return success.""" + try: + send2trash(self.to_os_path(api_path)) + return True + except TrashPermissionError as e: + return False + def setUp(self): for d in (self.dirs + self.hidden_dirs): self.make_dir(d) @@ -526,7 +536,13 @@ def test_delete_non_empty_dir(self): if sys.platform == 'win32': self.skipTest("Disabled deleting non-empty dirs on Windows") # Test that non empty directory can be deleted - self.api.delete(u'å b') + try: + self.api.delete(u'å b') + except requests.HTTPError as e: + if e.response.status_code == 400: + if not self.can_send2trash(u'å b'): + self.skipTest("Dir can't be sent to trash") + raise # Check if directory has actually been deleted with assert_http_error(404): self.api.list(u'å b') From e39ccb91884f414def49a472c78b122b90013d7f Mon Sep 17 00:00:00 2001 From: Zsailer Date: Wed, 23 Dec 2020 14:52:50 -0800 Subject: [PATCH 04/31] Release 6.1.6 --- notebook/_version.py | 2 +- notebook/static/base/js/namespace.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/_version.py b/notebook/_version.py index 7004fc7ad3..1ed427a628 100644 --- a/notebook/_version.py +++ b/notebook/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (7, 0, 0, '.dev0') +version_info = (6, 1, 6, '') __version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) diff --git a/notebook/static/base/js/namespace.js b/notebook/static/base/js/namespace.js index 7ae2d1409c..b1f04c6039 100644 --- a/notebook/static/base/js/namespace.js +++ b/notebook/static/base/js/namespace.js @@ -73,7 +73,7 @@ define(function(){ // tree jglobal('SessionList','tree/js/sessionlist'); - Jupyter.version = "7.0.0.dev0"; + Jupyter.version = "6.1.6"; Jupyter._target = '_blank'; return Jupyter; From fb5deeed23479ff9a790a674993b00d39cc6b225 Mon Sep 17 00:00:00 2001 From: Zsailer Date: Wed, 23 Dec 2020 14:56:23 -0800 Subject: [PATCH 05/31] Back to dev version --- notebook/_version.py | 2 +- notebook/static/base/js/namespace.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/_version.py b/notebook/_version.py index 1ed427a628..7004fc7ad3 100644 --- a/notebook/_version.py +++ b/notebook/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (6, 1, 6, '') +version_info = (7, 0, 0, '.dev0') __version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) diff --git a/notebook/static/base/js/namespace.js b/notebook/static/base/js/namespace.js index b1f04c6039..7ae2d1409c 100644 --- a/notebook/static/base/js/namespace.js +++ b/notebook/static/base/js/namespace.js @@ -73,7 +73,7 @@ define(function(){ // tree jglobal('SessionList','tree/js/sessionlist'); - Jupyter.version = "6.1.6"; + Jupyter.version = "7.0.0.dev0"; Jupyter._target = '_blank'; return Jupyter; From 3cc628e3fe0c8bf5415470ff69eb4070e8a2e4a3 Mon Sep 17 00:00:00 2001 From: user202729 <25191436+user202729@users.noreply.github.com> Date: Mon, 28 Dec 2020 21:48:41 +0700 Subject: [PATCH 06/31] Fix typo --- notebook/bundler/tests/test_bundler_api.py | 2 +- notebook/static/notebook/js/celltoolbar.js | 2 +- notebook/static/notebook/js/codecell.js | 2 +- notebook/static/notebook/js/completer.js | 4 ++-- notebook/static/notebook/js/tooltip.js | 4 ++-- 5 files changed, 7 insertions(+), 7 deletions(-) diff --git a/notebook/bundler/tests/test_bundler_api.py b/notebook/bundler/tests/test_bundler_api.py index 173ee5fb3d..6c251e5e99 100644 --- a/notebook/bundler/tests/test_bundler_api.py +++ b/notebook/bundler/tests/test_bundler_api.py @@ -46,7 +46,7 @@ def test_missing_bundler_arg(self): self.assertIn('Missing argument bundler', resp.text) def test_notebook_not_found(self): - """Shoudl respond with 404 error about missing notebook""" + """Should respond with 404 error about missing notebook""" resp = self.request('GET', 'bundle/fake.ipynb', params={'bundler': 'fake_bundler'}) self.assertEqual(resp.status_code, 404) diff --git a/notebook/static/notebook/js/celltoolbar.js b/notebook/static/notebook/js/celltoolbar.js index 95ee248dcc..889f3c6ef3 100644 --- a/notebook/static/notebook/js/celltoolbar.js +++ b/notebook/static/notebook/js/celltoolbar.js @@ -403,7 +403,7 @@ define([ * @static * * @param list_list {list_of_sublist} List of sublist of metadata value and name in the dropdown list. - * subslit should contain 2 element each, first a string that woul be displayed in the dropdown list, + * sublist should contain 2 element each, first a string that would be displayed in the dropdown list, * and second the corresponding value to be passed to setter/return by getter. the corresponding value * should not be "undefined" or behavior can be unexpected. * @param setter {function( cell, newValue )} diff --git a/notebook/static/notebook/js/codecell.js b/notebook/static/notebook/js/codecell.js index f5b21bf8c4..a1b7179ec4 100644 --- a/notebook/static/notebook/js/codecell.js +++ b/notebook/static/notebook/js/codecell.js @@ -253,7 +253,7 @@ define([ } if (event.which === keycodes.down && event.type === 'keypress' && this.tooltip.time_before_tooltip >= 0) { - // triger on keypress (!) otherwise inconsistent event.which depending on plateform + // triger on keypress (!) otherwise inconsistent event.which depending on platform // browser and keyboard layout ! // Pressing '(' , request tooltip, don't forget to reappend it // The second argument says to hide the tooltip if the docstring diff --git a/notebook/static/notebook/js/completer.js b/notebook/static/notebook/js/completer.js index 909438b5d0..98b89161c2 100644 --- a/notebook/static/notebook/js/completer.js +++ b/notebook/static/notebook/js/completer.js @@ -348,7 +348,7 @@ define([ } else if (code == keycodes.tab) { //all the fastforwarding operation, //Check that shared start is not null which can append with prefixed completion - // like %pylab , pylab have no shred start, and ff will result in py + // like %pylab , pylab have no shared start, and ff will result in py // to erase py var sh = shared_start(this.raw_result, true); if (sh.str !== '') { @@ -358,7 +358,7 @@ define([ this.carry_on_completion(); } else if (code == keycodes.up || code == keycodes.down) { // need to do that to be able to move the arrow - // when on the first or last line ofo a code cell + // when on the first or last line of a code cell event.codemirrorIgnore = true; event._ipkmIgnore = true; event.preventDefault(); diff --git a/notebook/static/notebook/js/tooltip.js b/notebook/static/notebook/js/tooltip.js index 98c1448ac3..e913a3fd81 100644 --- a/notebook/static/notebook/js/tooltip.js +++ b/notebook/static/notebook/js/tooltip.js @@ -135,7 +135,7 @@ define([ }; // deal with all the logic of hiding the tooltip - // and reset it's status + // and reset its status Tooltip.prototype._hide = function () { this._hidden = true; this.tooltip.fadeOut('fast'); @@ -243,7 +243,7 @@ define([ this._sticky = false; }; - // put the tooltip in a sicky state for 10 seconds + // put the tooltip in a sticky state for 10 seconds // it won't be removed by remove_and_cancel() unless you called with // the first parameter set to true. // remove_and_cancel_tooltip(true) From ba0f490c944af02df3ceef3e6592ffceb548b699 Mon Sep 17 00:00:00 2001 From: sd Date: Tue, 5 Jan 2021 13:19:22 +0200 Subject: [PATCH 07/31] mirrored logic from https://github.com/jupyter/nb2kg/pull/45 to notebook/gateway --- notebook/gateway/handlers.py | 14 ++++++++++++-- notebook/gateway/managers.py | 35 +++++++++++++++++++++++++++++++++-- 2 files changed, 45 insertions(+), 4 deletions(-) diff --git a/notebook/gateway/handlers.py b/notebook/gateway/handlers.py index 75de066692..d774ba39e2 100644 --- a/notebook/gateway/handlers.py +++ b/notebook/gateway/handlers.py @@ -4,6 +4,7 @@ import os import logging import mimetypes +import random from ..base.handlers import APIHandler, IPythonHandler from ..utils import url_path_join @@ -134,6 +135,7 @@ def __init__(self, **kwargs): self.ws = None self.ws_future = Future() self.disconnected = False + self.retry = 0 @gen.coroutine def _connect(self, kernel_id): @@ -155,6 +157,7 @@ def _connect(self, kernel_id): def _connection_done(self, fut): if not self.disconnected and fut.exception() is None: # prevent concurrent.futures._base.CancelledError self.ws = fut.result() + self.retry = 0 self.log.debug("Connection is ready: ws: {}".format(self.ws)) else: self.log.warning("Websocket connection has been closed via client disconnect or due to error. " @@ -189,8 +192,15 @@ def _read_messages(self, callback): else: # ws cancelled - stop reading break - if not self.disconnected: # if websocket is not disconnected by client, attept to reconnect to Gateway - self.log.info("Attempting to re-establish the connection to Gateway: {}".format(self.kernel_id)) + # NOTE(esevan): if websocket is not disconnected by client, try to reconnect. + if not self.disconnected and self.retry < GatewayClient.instance().gateway_retry_max: + jitter = random.randint(10, 100) * 0.01 + retry_interval = min(GatewayClient.instance().gateway_retry_interval * (2 ** self.retry), + GatewayClient.instance().gateway_retry_interval_max) + jitter + self.retry += 1 + self.log.info("Attempting to re-establish the connection to Gateway in %s secs (%s/%s): %s", + retry_interval, self.retry, GatewayClient.instance().gateway_retry_max, self.kernel_id) + yield gen.sleep(retry_interval) self._connect(self.kernel_id) loop = IOLoop.current() loop.add_future(self.ws_future, lambda future: self._read_messages(callback)) diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index 856cea494e..61c21fc2ef 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -22,7 +22,7 @@ class GatewayClient(SingletonConfigurable): """This class manages the configuration. It's its own singleton class so that we can share these values across all objects. It also contains some helper methods - to build request arguments out of the various config options. + to build request arguments out of the various config options. """ @@ -220,6 +220,38 @@ def __init__(self, **kwargs): def _env_whitelist_default(self): return os.environ.get(self.env_whitelist_env, self.env_whitelist_default_value) + gateway_retry_interval_default_value = 1.0 + gateway_retry_interval_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL' + gateway_retry_interval = Float(default_value=gateway_retry_interval_default_value, config=True, + help="""The time allowed for HTTP reconnection with the Gateway server for the first time. + Next will be JUPYTER_GATEWAY_RETRY_INTERVAL multiplied by two in factor of numbers of retries + but less than JUPYTER_GATEWAY_RETRY_INTERVAL_MAX. + (JUPYTER_GATEWAY_RETRY_INTERVAL env var)""") + + @default('gateway_retry_interval') + def gateway_retry_interval_default(self): + return float(os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL', self.gateway_retry_interval_default_value)) + + gateway_retry_interval_max_default_value = 30.0 + gateway_retry_interval_max_env = 'JUPYTER_GATEWAY_RETRY_INTERVAL_MAX' + gateway_retry_interval_max = Float(default_value=gateway_retry_interval_max_default_value, config=True, + help="""The maximum time allowed for HTTP reconnection retry with the Gateway server. + (JUPYTER_GATEWAY_RETRY_INTERVAL_MAX env var)""") + + @default('gateway_retry_interval_max') + def gateway_retry_interval_max_default(self): + return float(os.environ.get('JUPYTER_GATEWAY_RETRY_INTERVAL_MAX', self.gateway_retry_interval_max_default_value)) + + gateway_retry_max_default_value = 5 + gateway_retry_max_env = 'JUPYTER_GATEWAY_RETRY_MAX' + gateway_retry_max = Float(default_value=gateway_retry_max_default_value, config=True, + help="""The maximum numbers allowed for HTTP reconnection retries with the Gateway server. + (JUPYTER_GATEWAY_RETRY_MAX env var)""") + + @default('gateway_retry_max') + def gateway_retry_max_default(self): + return int(os.environ.get('JUPYTER_GATEWAY_RETRY_MAX', self.gateway_retry_max_default_value)) + @property def gateway_enabled(self): return bool(self.url is not None and len(self.url) > 0) @@ -503,7 +535,6 @@ def shutdown_all(self, now=False): self.remove_kernel(kernel_id) - class GatewayKernelSpecManager(KernelSpecManager): def __init__(self, **kwargs): From cd102428caf6d75d71dbf8e12e7ce750baec250c Mon Sep 17 00:00:00 2001 From: sd Date: Tue, 5 Jan 2021 13:40:02 +0200 Subject: [PATCH 08/31] traitlets Int for gateway_retry_max parameter --- notebook/gateway/managers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index 61c21fc2ef..125ee9ba3a 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -15,7 +15,7 @@ from jupyter_client.kernelspec import KernelSpecManager from ..utils import url_path_join -from traitlets import Instance, Unicode, Float, Bool, default, validate, TraitError +from traitlets import Instance, Unicode, Int, Float, Bool, default, validate, TraitError from traitlets.config import SingletonConfigurable @@ -244,7 +244,7 @@ def gateway_retry_interval_max_default(self): gateway_retry_max_default_value = 5 gateway_retry_max_env = 'JUPYTER_GATEWAY_RETRY_MAX' - gateway_retry_max = Float(default_value=gateway_retry_max_default_value, config=True, + gateway_retry_max = Int(default_value=gateway_retry_max_default_value, config=True, help="""The maximum numbers allowed for HTTP reconnection retries with the Gateway server. (JUPYTER_GATEWAY_RETRY_MAX env var)""") From d95f251ac609d6f6121c820162fc067fd0453389 Mon Sep 17 00:00:00 2001 From: Dvoiak Stepan Date: Tue, 5 Jan 2021 18:23:32 +0200 Subject: [PATCH 09/31] Update notebook/gateway/managers.py Co-authored-by: Kevin Bates --- notebook/gateway/managers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index 125ee9ba3a..2f647d11da 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -245,7 +245,7 @@ def gateway_retry_interval_max_default(self): gateway_retry_max_default_value = 5 gateway_retry_max_env = 'JUPYTER_GATEWAY_RETRY_MAX' gateway_retry_max = Int(default_value=gateway_retry_max_default_value, config=True, - help="""The maximum numbers allowed for HTTP reconnection retries with the Gateway server. + help="""The maximum retries allowed for HTTP reconnection with the Gateway server. (JUPYTER_GATEWAY_RETRY_MAX env var)""") @default('gateway_retry_max') From b070efc32ccce04c98549a80bc0209572a4f1bfb Mon Sep 17 00:00:00 2001 From: mishaschwartz Date: Wed, 6 Jan 2021 10:03:26 -0500 Subject: [PATCH 10/31] cell data: make sure that the cell id (from nbformat 4.5) is kept when saving notebooks --- notebook/static/notebook/js/cell.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/notebook/static/notebook/js/cell.js b/notebook/static/notebook/js/cell.js index 023b3d3756..dbe031359f 100644 --- a/notebook/static/notebook/js/cell.js +++ b/notebook/static/notebook/js/cell.js @@ -490,6 +490,9 @@ define([ var data = {}; // deepcopy the metadata so copied cells don't share the same object data.metadata = JSON.parse(JSON.stringify(this.metadata)); + if (this.id !== undefined) { + data.id = this.id; + } if (data.metadata.deletable) { delete data.metadata.deletable; } @@ -511,6 +514,9 @@ define([ if (data.metadata !== undefined) { this.metadata = data.metadata; } + if (data.id !== undefined) { + this.id = data.id; + } }; From a5d7528994cb0d4ac9c12cbed82116d8ef63c941 Mon Sep 17 00:00:00 2001 From: mishaschwartz Date: Wed, 6 Jan 2021 10:32:01 -0500 Subject: [PATCH 11/31] cell copy: make sure that cell id is not copied when copying cell --- notebook/static/notebook/js/notebook.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/notebook/static/notebook/js/notebook.js b/notebook/static/notebook/js/notebook.js index 6a4310b0a8..00ad703dee 100644 --- a/notebook/static/notebook/js/notebook.js +++ b/notebook/static/notebook/js/notebook.js @@ -1648,6 +1648,9 @@ define([ if (cell_json.metadata.deletable !== undefined) { delete cell_json.metadata.deletable; } + if (cell_json.id !== undefined) { + delete cell_json.id; + } this.clipboard.push(cell_json); } this.enable_paste(); From 245ca66603ccdee07545159ecbec15c5e71a9edd Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Sun, 10 Jan 2021 22:33:10 -0800 Subject: [PATCH 12/31] Increase minimum tornado version (#5933) --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 7c0d10e6a3..e3d189e087 100755 --- a/setup.py +++ b/setup.py @@ -98,7 +98,7 @@ zip_safe = False, install_requires = [ 'jinja2', - 'tornado>=5.0', + 'tornado>=6.1', # pyzmq>=17 is not technically necessary, # but hopefully avoids incompatibilities with Tornado 5. April 2018 'pyzmq>=17', From 8d0af52125c1b5ebd54a29620b7869eab45f2c46 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Sun, 10 Jan 2021 22:34:11 -0800 Subject: [PATCH 13/31] Adjust skip decorators to avoid remaining dependency on nose (#5932) --- notebook/tests/test_notebookapp_integration.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/notebook/tests/test_notebookapp_integration.py b/notebook/tests/test_notebookapp_integration.py index 9af505342b..fa2b642d0c 100644 --- a/notebook/tests/test_notebookapp_integration.py +++ b/notebook/tests/test_notebookapp_integration.py @@ -1,16 +1,17 @@ import os +import pytest import stat import subprocess +import sys import time -from ipython_genutils.testing.decorators import skip_win32, onlyif from notebook import DEFAULT_NOTEBOOK_PORT from .launchnotebook import UNIXSocketNotebookTestBase from ..utils import urlencode_unix_socket, urlencode_unix_socket_path -@skip_win32 +@pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_shutdown_sock_server_integration(): sock = UNIXSocketNotebookTestBase.sock url = urlencode_unix_socket(sock).encode() @@ -87,7 +88,7 @@ def _ensure_stopped(check_msg='There are no running servers'): raise AssertionError('expected all servers to be stopped') -@onlyif(bool(os.environ.get('RUN_NB_INTEGRATION_TESTS', False)), 'for local testing') +@pytest.mark.skipif(not bool(os.environ.get('RUN_NB_INTEGRATION_TESTS', False)), reason="for local testing") def test_stop_multi_integration(): """Tests lifecycle behavior for mixed-mode server types w/ default ports. @@ -137,7 +138,7 @@ def test_stop_multi_integration(): p3.wait() -@skip_win32 +@pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_launch_socket_collision(): """Tests UNIX socket in-use detection for lifecycle correctness.""" sock = UNIXSocketNotebookTestBase.sock From e67c331a4e1c1cf7f37b7960f730bdab21dfb534 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Tue, 12 Jan 2021 11:52:28 -0800 Subject: [PATCH 14/31] Add shim to mathjaxutils.js --- notebook/static/notebook/js/mathjaxutils.js | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 notebook/static/notebook/js/mathjaxutils.js diff --git a/notebook/static/notebook/js/mathjaxutils.js b/notebook/static/notebook/js/mathjaxutils.js new file mode 100644 index 0000000000..a8257009aa --- /dev/null +++ b/notebook/static/notebook/js/mathjaxutils.js @@ -0,0 +1,8 @@ + +define([ + 'base/js/mathjaxutils' +], function(mathjaxutils) { + "use strict" + + return mathjaxutils; +}); From e437bad1a5cd818fa1c40ad8d4936bade5e92684 Mon Sep 17 00:00:00 2001 From: Matthias Bussonnier Date: Wed, 13 Jan 2021 08:09:13 -0800 Subject: [PATCH 15/31] DOC: Server extension, extra docs on configuration/authentication. That give most of the information a user needs to write a full-fledge extension without having to look into tornado documentation: - Authentication: I think all handler should be authenticated by default. - Managing state and accessing server state from the handlers: I don't think there is many interesting things you can do without accessing server state and configuration. --- docs/source/extending/handlers.rst | 47 ++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) diff --git a/docs/source/extending/handlers.rst b/docs/source/extending/handlers.rst index e853d357df..bc38cdb004 100644 --- a/docs/source/extending/handlers.rst +++ b/docs/source/extending/handlers.rst @@ -122,6 +122,53 @@ following: route_pattern = url_path_join(web_app.settings['base_url'], '/hello') web_app.add_handlers(host_pattern, [(route_pattern, HelloWorldHandler)]) + +Extra Parameters and authentication +=================================== + +Here is a quick rundown of what you need to know to pass extra parameters to the handler and enable authentication: + + - extra arguments to the ``__init__`` constructor are given in a dictionary after the handler class in ``add_handlers``: + +.. code:: python + + + class HelloWorldHandler(IPythonHandler): + + def __init__(self, *args, **kwargs): + self.extra = kwargs.pop('extra') + ... + + def load_jupyter_server_extension(nb_server_app): + + ... + + web_app.add_handlers(host_pattern, + [ + (route_pattern, HelloWorldHandler, {"extra": nb_server_app.extra}) + ]) + + +All handler methods that require authentication _MUST_ be decorated with ``@tornado.web.authenticated``: + + +.. code:: python + + from tornado import web + + class HelloWorldHandler(IPythonHandler): + + ... + + @web.authenticated + def get(self, *args, **kwargs): + ... + + @web.authenticated + def post(self, *args, **kwargs): + ... + + References: 1. `Peter Parente's Mindtrove `__ From 24d4492044a21db410b9c4d262fcefd2b88e9b25 Mon Sep 17 00:00:00 2001 From: Zachary Sailer Date: Wed, 13 Jan 2021 08:27:02 -0800 Subject: [PATCH 16/31] add change log for 6.1.6 and 6.2.0 (#5936) --- docs/source/changelog.rst | 44 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 360e4120df..a8ea387464 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -22,6 +22,50 @@ We strongly recommend that you upgrade pip to version 9+ of pip before upgrading ``pip --version``. + +.. _release-6.2.0: + +6.2.0 +----- + +Merged PRs +~~~~~~~~~~ + +- Increase minimum tornado version (:ghpull:`5933`) +- Adjust skip decorators to avoid remaining dependency on nose (:ghpull:`5932`) +- Ensure that cell ids persist after save (:ghpull:`5928`) +- Add reconnection to Gateway (form nb2kg) (:ghpull:`5924`) +- Fix some typos (:ghpull:`5917`) +- Handle TrashPermissionError, now that it exists (:ghpull:`5894`) + +Thank you to all the contributors: + +- @kevin-bates +- @mishaschwartz +- @oyvsyo +- @user202729 +- @stefanor + +.. _release-6.1.6: + +6.1.6 +----- + +Merged PRs +~~~~~~~~~~ + +* do not require nose for testing (:ghpull:`5826`) +* [docs] Update Chinese and Hindi readme.md (:ghpull:`5823`) +* Add support for creating terminals via GET (:ghpull:`5813`) +* Made doc translations in Hindi and Chinese (:ghpull:`5787`) + +Thank you to all the contributors: + +- @pgajdos +- @rjn01 +- @kevin-bates +- @virejdasani + .. _release-6.1.5: 6.1.5 From 57fcc413b4cd8e2c15d3fd4d14f79f3f339cfd12 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 13 Jan 2021 10:30:49 -0600 Subject: [PATCH 17/31] Release 6.2.0 --- notebook/_version.py | 2 +- notebook/static/base/js/namespace.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/_version.py b/notebook/_version.py index 7004fc7ad3..7932ea856f 100644 --- a/notebook/_version.py +++ b/notebook/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (7, 0, 0, '.dev0') +version_info = (6, 2, 0) __version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) diff --git a/notebook/static/base/js/namespace.js b/notebook/static/base/js/namespace.js index 7ae2d1409c..2632f79fdf 100644 --- a/notebook/static/base/js/namespace.js +++ b/notebook/static/base/js/namespace.js @@ -73,7 +73,7 @@ define(function(){ // tree jglobal('SessionList','tree/js/sessionlist'); - Jupyter.version = "7.0.0.dev0"; + Jupyter.version = "6.2.0"; Jupyter._target = '_blank'; return Jupyter; From 358714829aa5b903d2592270e9cbe7a1b773d7bb Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Wed, 13 Jan 2021 10:32:47 -0600 Subject: [PATCH 18/31] Back to dev version --- notebook/_version.py | 2 +- notebook/static/base/js/namespace.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/_version.py b/notebook/_version.py index 7932ea856f..7004fc7ad3 100644 --- a/notebook/_version.py +++ b/notebook/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (6, 2, 0) +version_info = (7, 0, 0, '.dev0') __version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) diff --git a/notebook/static/base/js/namespace.js b/notebook/static/base/js/namespace.js index 2632f79fdf..7ae2d1409c 100644 --- a/notebook/static/base/js/namespace.js +++ b/notebook/static/base/js/namespace.js @@ -73,7 +73,7 @@ define(function(){ // tree jglobal('SessionList','tree/js/sessionlist'); - Jupyter.version = "6.2.0"; + Jupyter.version = "7.0.0.dev0"; Jupyter._target = '_blank'; return Jupyter; From 364ac5257d03fc2b3026083b9eb8ff5537139f96 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Wed, 13 Jan 2021 15:07:02 -0800 Subject: [PATCH 19/31] Replace Travis and Appveyor with Github Actions --- .github/workflows/docs.yml | 52 +++++++++ .github/workflows/js.yml | 61 ++++++++++ .github/workflows/python-nbconvert.yml | 53 +++++++++ .github/workflows/python.yml | 53 +++++++++ .github/workflows/selenium.yml | 46 ++++++++ .travis.yml | 106 ------------------ appveyor.yml | 32 ------ .../tests/test_nbconvert_handlers.py | 49 ++++++-- notebook/tests/conftest.py | 9 ++ notebook/tests/test_nbextensions.py | 85 +++++++------- .../tests/test_notebookapp_integration.py | 4 + notebook/tests/test_utils.py | 3 +- 12 files changed, 358 insertions(+), 195 deletions(-) create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/js.yml create mode 100644 .github/workflows/python-nbconvert.yml create mode 100644 .github/workflows/python.yml create mode 100644 .github/workflows/selenium.yml delete mode 100644 .travis.yml delete mode 100644 appveyor.yml create mode 100644 notebook/tests/conftest.py diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000000..4903ee9485 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,52 @@ +name: Docs Tests +on: + push: + branches: '*' + pull_request: + branches: '*' +jobs: + build: + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu] + python-version: [ '3.6' ] + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v1 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.py') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e .[test] codecov + pip install -r docs/doc-requirements.txt + wget https://github.com/jgm/pandoc/releases/download/1.19.1/pandoc-1.19.1-1-amd64.deb && sudo dpkg -i pandoc-1.19.1-1-amd64.deb + - name: List installed packages + run: | + pip freeze + pip check + - name: Run tests on documentation + run: | + EXIT_STATUS=0 + make -C docs/ html || EXIT_STATUS=$? + pytest --nbval --current-env docs || EXIT_STATUS=$? + exit $EXIT_STATUS diff --git a/.github/workflows/js.yml b/.github/workflows/js.yml new file mode 100644 index 0000000000..9ce5424e20 --- /dev/null +++ b/.github/workflows/js.yml @@ -0,0 +1,61 @@ +name: Linux JS Tests + +on: + push: + branches: '*' + pull_request: + branches: '*' + +jobs: + build: + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos] + group: [notebook, base, services] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Set up Node + uses: actions/setup-node@v1 + with: + node-version: '12.x' + + - name: Cache node modules + uses: actions/cache@v2 + env: + cache-name: cache-node-modules + with: + # npm cache files are stored in `~/.npm` on Linux/macOS + path: ~/.npm + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}- + ${{ runner.os }}-build- + ${{ runner.os }}- + + - name: Cache pip on Linux + uses: actions/cache@v1 + if: startsWith(runner.os, 'Linux') + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ matrix.python }}-${{ hashFiles('**/requirements.txt', 'setup.py') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python }} + + - name: Install dependencies + run: | + pip install --upgrade pip + pip install --upgrade setuptools wheel + npm install + npm install -g casperjs@1.1.3 phantomjs-prebuilt@2.1.7 + pip install .[test] + + - name: Run Tests + run: | + python -m notebook.jstest ${{ matrix.group }} diff --git a/.github/workflows/python-nbconvert.yml b/.github/workflows/python-nbconvert.yml new file mode 100644 index 0000000000..152bd9d4f4 --- /dev/null +++ b/.github/workflows/python-nbconvert.yml @@ -0,0 +1,53 @@ +# The NBConvert Service requires pandoc. Instead of testing +# Pandoc on every operating system (which should already be +# done in nbconvert directly), we'll only test these services +# on ubuntu where we can easily load Pandoc from a Github +# Actions docker image (this docker image is not on other +# operating systems). +name: NBConvert Service Tests +on: + push: + branches: '*' + pull_request: + branches: '*' +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: [ '3.6' , '3.7', '3.8', '3.9' ] + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + - name: Setup Pandoc + uses: r-lib/actions/setup-pandoc@v1 + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v1 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.py') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e .[test] + - name: Run NBConvert Tests + run: | + pytest notebook/nbconvert/tests/ + - name: Run NBConvert Service Tests + run: | + pytest notebook/services/nbconvert/tests/ diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml new file mode 100644 index 0000000000..d427aa7355 --- /dev/null +++ b/.github/workflows/python.yml @@ -0,0 +1,53 @@ +name: Python Tests +on: + push: + branches: '*' + pull_request: + branches: '*' +jobs: + build: + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos, windows] + python-version: [ '3.6' , '3.7', '3.8', '3.9' ] # Windows 3.9 fails due to the pywinpty dependency not working + steps: + - name: Checkout + uses: actions/checkout@v1 + - name: Install Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + - name: Upgrade packaging dependencies + run: | + pip install --upgrade pip setuptools wheel --user + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: Cache pip + uses: actions/cache@v1 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ matrix.python-version }}-${{ hashFiles('setup.py') }} + restore-keys: | + ${{ runner.os }}-pip-${{ matrix.python-version }}- + ${{ runner.os }}-pip- + - name: Install the Python dependencies + run: | + pip install -e .[test] codecov + - name: List installed packages + run: | + pip freeze + pip check + - name: Run Server-side tests + run: | + pytest -vv --cov notebook --cov-branch --cov-report term-missing:skip-covered --ignore-glob=notebook/tests/selenium/* --ignore-glob=notebook/nbconvert/tests/* --ignore-glob=notebook/services/nbconvert/tests/* + - name: Run Integration Tests + run: | + pytest -v notebook/tests/test_notebookapp_integration.py --integration_tests + - name: Coverage + run: | + codecov diff --git a/.github/workflows/selenium.yml b/.github/workflows/selenium.yml new file mode 100644 index 0000000000..b3f1cd2e32 --- /dev/null +++ b/.github/workflows/selenium.yml @@ -0,0 +1,46 @@ +name: Selenium Tests + +on: + push: + branches: '*' + pull_request: + branches: '*' +jobs: + build: + runs-on: ${{ matrix.os }}-latest + strategy: + fail-fast: false + matrix: + os: [ubuntu, macos] + python-version: [ '3.6', '3.7', '3.8', '3.9' ] + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x64' + + - name: Set up Node + uses: actions/setup-node@v1 + with: + node-version: '12.x' + + - name: Install JS + run: | + npm install + + - name: Install Python dependencies + run: | + python -m pip install -U pip setuptools wheel + pip install --upgrade selenium + pip install pytest + pip install .[test] + + - name: Run Tests + run: | + export JUPYTER_TEST_BROWSER=firefox + export MOZ_HEADLESS=1 + pytest -sv notebook/tests/selenium diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6ccb294711..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,106 +0,0 @@ -# http://travis-ci.org/#!/ipython/ipython -language: python - -cache: - directories: - - $HOME/.cache/bower - - $HOME/.cache/pip -python: - - 3.6 - -env: - global: - - PATH=$TRAVIS_BUILD_DIR/pandoc:$PATH - matrix: - - GROUP=js/notebook - - GROUP=python - - GROUP=js/base - - GROUP=js/services - -before_install: - - pip install --upgrade pip - - pip install --upgrade setuptools wheel pytest pytest-cov coverage codecov - - nvm install 6.9.2 - - nvm use 6.9.2 - - node --version - - npm --version - - npm install -g npm - - npm install - - | - if [[ $GROUP == js* ]]; then - npm install -g casperjs@1.1.3 phantomjs-prebuilt@2.1.7 - fi - - | - if [[ $GROUP == docs ]]; then - pip install -r docs/doc-requirements.txt - pip install --upgrade pytest - fi - - | - if [[ $GROUP == selenium ]]; then - pip install --upgrade selenium pytest - # Install Webdriver backend for Firefox: - wget https://github.com/mozilla/geckodriver/releases/download/v0.19.1/geckodriver-v0.19.1-linux64.tar.gz - mkdir geckodriver - tar -xzf geckodriver-v0.19.1-linux64.tar.gz -C geckodriver - export PATH=$PATH:$PWD/geckodriver - fi - - pip install "attrs>=17.4.0" - -install: - - pip install --pre .[test] $EXTRA_PIP - - pip freeze - - wget https://github.com/jgm/pandoc/releases/download/1.19.1/pandoc-1.19.1-1-amd64.deb && sudo dpkg -i pandoc-1.19.1-1-amd64.deb - - -script: - - jupyter kernelspec list - - | - symlinks=$(find . -type l| grep -v './node_modules/' | grep -v './git-hooks') - if [[ $(echo $symlinks) ]]; then - echo "Repository contains symlinks which won't work on windows:" - echo $symlinks - echo "" - false - else - true - fi - - 'if [[ $GROUP == js* ]]; then travis_retry python -m notebook.jstest ${GROUP:3}; fi' - - 'if [[ $GROUP == python ]]; then py.test -v --ignore notebook/tests/selenium --cov=notebook notebook; fi' - - 'if [[ $GROUP == selenium ]]; then py.test -sv notebook/tests/selenium; fi' - - | - if [[ $GROUP == docs ]]; then - EXIT_STATUS=0 - make -C docs/ html || EXIT_STATUS=$? - - if [[ $TRAVIS_EVENT_TYPE == cron ]]; then - make -C docs/ linkcheck || EXIT_STATUS=$?; - fi - - pytest --nbval --current-env docs || EXIT_STATUS=$? - exit $EXIT_STATUS - fi - - -matrix: - include: - - python: 3.6 - env: - - GROUP=selenium - - JUPYTER_TEST_BROWSER=firefox - - MOZ_HEADLESS=1 - addons: - firefox: 57.0 - - python: 3.5 - env: GROUP=python - - python: 3.7 - dist: xenial - env: GROUP=python - - python: 3.8 - env: GROUP=python - - python: 3.9-dev - env: GROUP=python - - python: 3.6 - env: GROUP=docs - -after_success: - - codecov diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 48a02e0faa..0000000000 --- a/appveyor.yml +++ /dev/null @@ -1,32 +0,0 @@ -# miniconda bootstrap from conda-forge recipe -matrix: - fast_finish: true - -environment: - matrix: - - CONDA_PY: 36 - CONDA_PY_SPEC: 3.6 - CONDA_INSTALL_LOCN: "C:\\Miniconda36-x64" - - CONDA_PY: 38 - CONDA_PY_SPEC: 3.8 - CONDA_INSTALL_LOCN: "C:\\Miniconda37-x64" - -platform: - - x64 - -build: off - -install: - - cmd: call %CONDA_INSTALL_LOCN%\Scripts\activate.bat - - cmd: conda config --set show_channel_urls true - - cmd: conda config --add channels conda-forge - #- cmd: conda update --yes --quiet conda - - cmd: conda install -y python=%CONDA_PY_SPEC% pyzmq tornado jupyter_client nbformat ipykernel pip nodejs pytest nose - # not using `conda install -y` on nbconvent package because there is - # currently a bug with the version that the anaconda installs, so we will just install it with pip - - cmd: pip install nbconvert - - cmd: python setup.py build - - cmd: pip install .[test] - -test_script: - - py.test -v notebook --ignore notebook\tests\selenium diff --git a/notebook/nbconvert/tests/test_nbconvert_handlers.py b/notebook/nbconvert/tests/test_nbconvert_handlers.py index 0708e10f74..e5af13c0c4 100644 --- a/notebook/nbconvert/tests/test_nbconvert_handlers.py +++ b/notebook/nbconvert/tests/test_nbconvert_handlers.py @@ -5,7 +5,7 @@ import shutil import requests - +import pytest from notebook.utils import url_path_join from notebook.tests.launchnotebook import NotebookTestBase, assert_http_error from nbformat import write @@ -18,6 +18,13 @@ from base64 import encodebytes +def cmd_exists(cmd): + """Check is a command exists.""" + if shutil.which(cmd) is None: + return False + return True + + class NbconvertAPI(object): """Wrapper for nbconvert API calls.""" def __init__(self, request): @@ -50,7 +57,7 @@ def list_formats(self): class APITest(NotebookTestBase): def setUp(self): nbdir = self.notebook_dir - + if not os.path.isdir(pjoin(nbdir, 'foo')): subdir = pjoin(nbdir, 'foo') @@ -64,7 +71,7 @@ def cleanup_dir(): shutil.rmtree(subdir, ignore_errors=True) nb = new_notebook() - + nb.cells.append(new_markdown_cell(u'Created by test ³')) cc1 = new_code_cell(source=u'print(2*6)') cc1.outputs.append(new_output(output_type="stream", text=u'12')) @@ -73,14 +80,17 @@ def cleanup_dir(): execution_count=1, )) nb.cells.append(cc1) - + with io.open(pjoin(nbdir, 'foo', 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) self.nbconvert_api = NbconvertAPI(self.request) - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_file(self): r = self.nbconvert_api.from_file('html', 'foo', 'testnb.ipynb') self.assertEqual(r.status_code, 200) @@ -92,39 +102,54 @@ def test_from_file(self): self.assertIn(u'text/x-python', r.headers['Content-Type']) self.assertIn(u'print(2*6)', r.text) - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_file_404(self): with assert_http_error(404): self.nbconvert_api.from_file('html', 'foo', 'thisdoesntexist.ipynb') - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_file_download(self): r = self.nbconvert_api.from_file('python', 'foo', 'testnb.ipynb', download=True) content_disposition = r.headers['Content-Disposition'] self.assertIn('attachment', content_disposition) self.assertIn('testnb.py', content_disposition) - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_file_zip(self): r = self.nbconvert_api.from_file('latex', 'foo', 'testnb.ipynb', download=True) self.assertIn(u'application/zip', r.headers['Content-Type']) self.assertIn(u'.zip', r.headers['Content-Disposition']) - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_post(self): nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() - + r = self.nbconvert_api.from_post(format='html', nbmodel=nbmodel) self.assertEqual(r.status_code, 200) self.assertIn(u'text/html', r.headers['Content-Type']) self.assertIn(u'Created by test', r.text) self.assertIn(u'print', r.text) - + r = self.nbconvert_api.from_post(format='python', nbmodel=nbmodel) self.assertIn(u'text/x-python', r.headers['Content-Type']) self.assertIn(u'print(2*6)', r.text) - @onlyif_cmds_exist('pandoc') + @pytest.mark.skipif( + not cmd_exists('pandoc'), + reason="Pandoc wasn't found. Skipping this test." + ) def test_from_post_zip(self): nbmodel = self.request('GET', 'api/contents/foo/testnb.ipynb').json() diff --git a/notebook/tests/conftest.py b/notebook/tests/conftest.py new file mode 100644 index 0000000000..b9aee32cdc --- /dev/null +++ b/notebook/tests/conftest.py @@ -0,0 +1,9 @@ + + +def pytest_addoption(parser): + parser.addoption('--integration_tests', action='store_true', dest="integration_tests", + default=False, help="enable integration tests") + +def pytest_configure(config): + if not config.option.integration_tests: + setattr(config.option, 'markexpr', 'not integration_tests') \ No newline at end of file diff --git a/notebook/tests/test_nbextensions.py b/notebook/tests/test_nbextensions.py index 0bb9da3704..3d9549658a 100644 --- a/notebook/tests/test_nbextensions.py +++ b/notebook/tests/test_nbextensions.py @@ -16,7 +16,6 @@ from unittest.mock import patch -import ipython_genutils.testing.decorators as dec from ipython_genutils import py3compat from ipython_genutils.tempdir import TemporaryDirectory from notebook import nbextensions @@ -32,7 +31,7 @@ def touch(file_name, mtime=None): """ensure a file exists, and set its modification time - + returns the modification time of the file """ open(file_name, 'a').close() @@ -52,7 +51,7 @@ def test_help_output(): class TestInstallNBExtension(TestCase): - + def tempdir(self): td = TemporaryDirectory() self.tempdirs.append(td) @@ -109,11 +108,11 @@ def assert_dir_exists(self, path): if not os.path.exists(path): do_exist = os.listdir(os.path.dirname(path)) self.fail(u"%s should exist (found %s)" % (path, do_exist)) - + def assert_not_dir_exists(self, path): if os.path.exists(path): self.fail(u"%s should not exist" % path) - + def assert_installed(self, relative_path, user=False): if user: nbext = pjoin(self.data_dir, u'nbextensions') @@ -122,7 +121,7 @@ def assert_installed(self, relative_path, user=False): self.assert_dir_exists( pjoin(nbext, relative_path) ) - + def assert_not_installed(self, relative_path, user=False): if user: nbext = pjoin(self.data_dir, u'nbextensions') @@ -131,7 +130,7 @@ def assert_not_installed(self, relative_path, user=False): self.assert_not_dir_exists( pjoin(nbext, relative_path) ) - + def test_create_data_dir(self): """install_nbextension when data_dir doesn't exist""" with TemporaryDirectory() as td: @@ -146,7 +145,7 @@ def test_create_data_dir(self): pjoin(basename(self.src), file_name), user=True, ) - + def test_create_nbextensions_user(self): with TemporaryDirectory() as td: install_nbextension(self.src, user=True) @@ -154,7 +153,7 @@ def test_create_nbextensions_user(self): pjoin(basename(self.src), u'ƒile'), user=True ) - + def test_create_nbextensions_system(self): with TemporaryDirectory() as td: self.system_nbext = pjoin(td, u'nbextensions') @@ -164,17 +163,17 @@ def test_create_nbextensions_system(self): pjoin(basename(self.src), u'ƒile'), user=False ) - + def test_single_file(self): file_name = self.files[0] install_nbextension(pjoin(self.src, file_name)) self.assert_installed(file_name) - + def test_single_dir(self): d = u'∂ir' install_nbextension(pjoin(self.src, d)) self.assert_installed(self.files[-1]) - + def test_single_dir_trailing_slash(self): d = u'∂ir/' install_nbextension(pjoin(self.src, d)) @@ -193,11 +192,11 @@ def test_destination_dir(self): d = u'∂ir' install_nbextension(pjoin(self.src, d), destination = u'ƒiledest2') self.assert_installed(pjoin(u'ƒiledest2', u'∂ir2', u'ƒile2')) - + def test_install_nbextension(self): with self.assertRaises(TypeError): install_nbextension(glob.glob(pjoin(self.src, '*'))) - + def test_overwrite_file(self): with TemporaryDirectory() as d: fname = u'ƒ.js' @@ -213,7 +212,7 @@ def test_overwrite_file(self): install_nbextension(src, overwrite=True) with open(dest) as f: self.assertEqual(f.read(), 'overwrite') - + def test_overwrite_dir(self): with TemporaryDirectory() as src: base = basename(src) @@ -227,7 +226,7 @@ def test_overwrite_dir(self): install_nbextension(src, overwrite=True) self.assert_installed(pjoin(base, fname2)) self.assert_not_installed(pjoin(base, fname)) - + def test_update_file(self): with TemporaryDirectory() as d: fname = u'ƒ.js' @@ -245,7 +244,7 @@ def test_update_file(self): install_nbextension(src) with open(dest) as f: self.assertEqual(f.read(), 'overwrite') - + def test_skip_old_file(self): with TemporaryDirectory() as d: fname = u'ƒ.js' @@ -255,7 +254,7 @@ def test_skip_old_file(self): self.assert_installed(fname) dest = pjoin(self.system_nbext, fname) old_mtime = os.stat(dest).st_mtime - + mtime = touch(src, mtime - 100) install_nbextension(src) new_mtime = os.stat(dest).st_mtime @@ -269,7 +268,7 @@ def test_quiet(self): install_nbextension(self.src) self.assertEqual(stdout.getvalue(), '') self.assertEqual(stderr.getvalue(), '') - + def test_install_zip(self): path = pjoin(self.src, "myjsext.zip") with zipfile.ZipFile(path, 'w') as f: @@ -278,13 +277,13 @@ def test_install_zip(self): install_nbextension(path) self.assert_installed("a.js") self.assert_installed(pjoin("foo", "a.js")) - + def test_install_tar(self): def _add_file(f, fname, buf): info = tarfile.TarInfo(fname) info.size = len(buf) f.addfile(info, BytesIO(buf)) - + for i,ext in enumerate((".tar.gz", ".tgz", ".tar.bz2")): path = pjoin(self.src, "myjsext" + ext) with tarfile.open(path, 'w') as f: @@ -293,7 +292,7 @@ def _add_file(f, fname, buf): install_nbextension(path) self.assert_installed("b%i.js" % i) self.assert_installed(pjoin("foo", "b%i.js" % i)) - + def test_install_url(self): def fake_urlretrieve(url, dest): touch(dest) @@ -304,23 +303,23 @@ def fake_urlretrieve(url, dest): self.assert_installed("foo.js") install_nbextension("https://example.com/path/to/another/bar.js") self.assert_installed("bar.js") - install_nbextension("https://example.com/path/to/another/bar.js", + install_nbextension("https://example.com/path/to/another/bar.js", destination = 'foobar.js') self.assert_installed("foobar.js") finally: nbextensions.urlretrieve = save_urlretrieve - + def test_check_nbextension(self): with TemporaryDirectory() as d: f = u'ƒ.js' src = pjoin(d, f) touch(src) install_nbextension(src, user=True) - + assert check_nbextension(f, user=True) assert check_nbextension([f], user=True) assert not check_nbextension([f, pjoin('dne', f)], user=True) - + @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_install_symlink(self): with TemporaryDirectory() as d: @@ -332,7 +331,7 @@ def test_install_symlink(self): assert os.path.islink(dest) link = os.readlink(dest) self.assertEqual(link, src) - + @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_overwrite_broken_symlink(self): with TemporaryDirectory() as d: @@ -393,21 +392,21 @@ def test_nbextension_enable(self): touch(src) install_nbextension(src, user=True) enable_nbextension(section='notebook', require=u'ƒ') - + config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) assert enabled - + def test_nbextension_disable(self): self.test_nbextension_enable() disable_nbextension(section='notebook', require=u'ƒ') - + config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) enabled = cm.get('notebook').get('load_extensions', {}).get(u'ƒ', False) assert not enabled - + def _mock_extension_spec_meta(self, section='notebook'): return { @@ -424,52 +423,52 @@ def _inject_mock_extension(self, section='notebook'): class mock(): __file__ = outer_file - + @staticmethod def _jupyter_nbextension_paths(): return [meta] - + import sys sys.modules['mockextension'] = mock - + def test_nbextensionpy_files(self): self._inject_mock_extension() install_nbextension_python('mockextension') - + assert check_nbextension('_mockdestination/index.js') assert check_nbextension(['_mockdestination/index.js']) - + def test_nbextensionpy_user_files(self): self._inject_mock_extension() install_nbextension_python('mockextension', user=True) - + assert check_nbextension('_mockdestination/index.js', user=True) assert check_nbextension(['_mockdestination/index.js'], user=True) - + def test_nbextensionpy_uninstall_files(self): self._inject_mock_extension() install_nbextension_python('mockextension', user=True) uninstall_nbextension_python('mockextension', user=True) - + assert not check_nbextension('_mockdestination/index.js') assert not check_nbextension(['_mockdestination/index.js']) - + def test_nbextensionpy_enable(self): self._inject_mock_extension('notebook') install_nbextension_python('mockextension', user=True) enable_nbextension_python('mockextension') - + config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) enabled = cm.get('notebook').get('load_extensions', {}).get('_mockdestination/index', False) assert enabled - + def test_nbextensionpy_disable(self): self._inject_mock_extension('notebook') install_nbextension_python('mockextension', user=True) enable_nbextension_python('mockextension') disable_nbextension_python('mockextension', user=True) - + config_dir = os.path.join(_get_config_dir(user=True), 'nbconfig') cm = BaseJSONConfigManager(config_dir=config_dir) enabled = cm.get('notebook').get('load_extensions', {}).get('_mockdestination/index', False) diff --git a/notebook/tests/test_notebookapp_integration.py b/notebook/tests/test_notebookapp_integration.py index fa2b642d0c..328fab05bc 100644 --- a/notebook/tests/test_notebookapp_integration.py +++ b/notebook/tests/test_notebookapp_integration.py @@ -4,6 +4,7 @@ import subprocess import sys import time +import pytest from notebook import DEFAULT_NOTEBOOK_PORT @@ -11,6 +12,9 @@ from ..utils import urlencode_unix_socket, urlencode_unix_socket_path +pytestmark = pytest.mark.integration_tests + + @pytest.mark.skipif(sys.platform == "win32", reason="do not run on windows") def test_shutdown_sock_server_integration(): sock = UNIXSocketNotebookTestBase.sock diff --git a/notebook/tests/test_utils.py b/notebook/tests/test_utils.py index a5919954fc..51f0e8accc 100644 --- a/notebook/tests/test_utils.py +++ b/notebook/tests/test_utils.py @@ -13,7 +13,6 @@ from notebook.utils import url_escape, url_unescape, is_hidden, is_file_hidden from ipython_genutils.py3compat import cast_unicode from ipython_genutils.tempdir import TemporaryDirectory -from ipython_genutils.testing.decorators import skip_if_not_win32 def test_help_output(): @@ -34,7 +33,7 @@ def test_url_escape(): path = url_escape('/path with a/notebook and space.ipynb') assert path == '/path%20with%20a/notebook%20and%20space.ipynb' - + path = url_escape('/ !@$#%^&* / test %^ notebook @#$ name.ipynb') assert path == '/%20%21%40%24%23%25%5E%26%2A%20/%20test%20%25%5E%20notebook%20%40%23%24%20name.ipynb' From b6297d011e78f32fea713a0651c5c9e8f427b164 Mon Sep 17 00:00:00 2001 From: "Afshin T. Darian" Date: Thu, 14 Jan 2021 16:11:37 +0000 Subject: [PATCH 20/31] Re-enable support for answer_yes flag --- notebook/notebookapp.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index ed08d881d4..78d1d57d76 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -1906,6 +1906,13 @@ def _confirm_exit(self): """ info = self.log.info info(_('interrupted')) + # Check if answer_yes is set + if self.answer_yes: + self.log.critical(_("Shutting down...")) + # schedule stop on the main thread, + # since this might be called from a signal handler + self.io_loop.add_callback_from_signal(self.io_loop.stop) + return print(self.notebook_info()) yes = _('y') no = _('n') From c89c359906f1ae54c541c8009b452f044d787679 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Sun, 17 Jan 2021 09:28:44 -0800 Subject: [PATCH 21/31] Increase culling test idle timeout --- .../services/kernels/tests/test_kernels_api.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/notebook/services/kernels/tests/test_kernels_api.py b/notebook/services/kernels/tests/test_kernels_api.py index 36c441c2c7..010ca63346 100644 --- a/notebook/services/kernels/tests/test_kernels_api.py +++ b/notebook/services/kernels/tests/test_kernels_api.py @@ -237,6 +237,10 @@ def test_config(self): self.assertEqual(self.notebook.kernel_manager.allowed_message_types, ['kernel_info_request']) +CULL_TIMEOUT = 5 +CULL_INTERVAL = 1 + + class KernelCullingTest(NotebookTestBase): """Test kernel culling """ @@ -244,9 +248,9 @@ class KernelCullingTest(NotebookTestBase): def get_argv(cls): argv = super(KernelCullingTest, cls).get_argv() - # Enable culling with 2s timeout and 1s intervals - argv.extend(['--MappingKernelManager.cull_idle_timeout=2', - '--MappingKernelManager.cull_interval=1', + # Enable culling with 5s timeout and 1s intervals + argv.extend(['--MappingKernelManager.cull_idle_timeout={}'.format(CULL_TIMEOUT), + '--MappingKernelManager.cull_interval={}'.format(CULL_INTERVAL), '--MappingKernelManager.cull_connected=False']) return argv @@ -270,8 +274,9 @@ def test_culling(self): assert self.get_cull_status(kid) # not connected, should be culled def get_cull_status(self, kid): + frequency = 0.5 culled = False - for i in range(15): # Need max of 3s to ensure culling timeout exceeded + for _ in range(int((CULL_TIMEOUT + CULL_INTERVAL)/frequency)): # Timeout + Interval will ensure cull try: self.kern_api.get(kid) except HTTPError as e: @@ -279,5 +284,5 @@ def get_cull_status(self, kid): culled = True break else: - time.sleep(0.2) + time.sleep(frequency) return culled From 1c20ad9d0f4cd7b9d3a9dfdab14d4a868218c2cd Mon Sep 17 00:00:00 2001 From: insolor Date: Mon, 18 Jan 2021 12:40:43 +0300 Subject: [PATCH 22/31] Fix Russain translation for "In" and "Out" --- notebook/i18n/ru_RU/LC_MESSAGES/nbjs.po | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/notebook/i18n/ru_RU/LC_MESSAGES/nbjs.po b/notebook/i18n/ru_RU/LC_MESSAGES/nbjs.po index e3292aaa1b..644d2e18b1 100644 --- a/notebook/i18n/ru_RU/LC_MESSAGES/nbjs.po +++ b/notebook/i18n/ru_RU/LC_MESSAGES/nbjs.po @@ -753,7 +753,7 @@ msgstr "Невозможно выполнить ячейку, так как яд #: notebook/static/notebook/js/codecell.js:472 msgid "In" -msgstr "В" +msgstr "Ввод" #: notebook/static/notebook/js/kernelselector.js:269 #, python-format @@ -1329,7 +1329,7 @@ msgstr "" #: notebook/static/notebook/js/outputarea.js:468 #, python-format msgid "Out[%d]:" -msgstr "Выход[%d]:" +msgstr "Вывод[%d]:" #: notebook/static/notebook/js/outputarea.js:577 #, python-format From 8f2fbdb331e59b3bcac2749c7b760b38ca354ef2 Mon Sep 17 00:00:00 2001 From: insolor Date: Mon, 18 Jan 2021 12:49:05 +0300 Subject: [PATCH 23/31] Make more human Russain translation of the "Toggle" term --- notebook/i18n/ru_RU/LC_MESSAGES/nbui.po | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/notebook/i18n/ru_RU/LC_MESSAGES/nbui.po b/notebook/i18n/ru_RU/LC_MESSAGES/nbui.po index 665e1960f6..f4fde78883 100644 --- a/notebook/i18n/ru_RU/LC_MESSAGES/nbui.po +++ b/notebook/i18n/ru_RU/LC_MESSAGES/nbui.po @@ -84,11 +84,11 @@ msgstr "Показать/cкрыть логотип и название блок #: notebook/templates/edit.html:71 notebook/templates/notebook.html:163 msgid "Toggle Header" -msgstr "Триггер заголовка" +msgstr "Показать/скрыть заголовок" #: notebook/templates/edit.html:72 notebook/templates/notebook.html:171 msgid "Toggle Line Numbers" -msgstr "Триггер номеров строк" +msgstr "Показать/скрыть номера строк" #: notebook/templates/edit.html:75 msgid "Language" @@ -302,7 +302,7 @@ msgstr "Показать/скрыть значки действий (ниже с #: notebook/templates/notebook.html:167 msgid "Toggle Toolbar" -msgstr "Триггер панели мониторинга" +msgstr "Показать/скрыть панель мониторинга" #: notebook/templates/notebook.html:170 msgid "Show/Hide line numbers in cells" From 3865b7f2e5b9af6600292bff2340074da02fe2d5 Mon Sep 17 00:00:00 2001 From: "Afshin T. Darian" Date: Thu, 21 Jan 2021 16:12:57 +0000 Subject: [PATCH 24/31] Allow jupyter_server-based contents managers in notebook --- notebook/notebookapp.py | 33 +- notebook/services/sessions/sessionmanager.py | 42 ++- notebook/tests/test_traittypes.py | 80 +++++ notebook/traittypes.py | 349 +++++++++++++++++++ 4 files changed, 486 insertions(+), 18 deletions(-) create mode 100644 notebook/tests/test_traittypes.py create mode 100644 notebook/traittypes.py diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index 78d1d57d76..b9d64beaf7 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -121,6 +121,7 @@ urlencode_unix_socket_path, urljoin, ) +from .traittypes import TypeFromClasses # Check if we can use async kernel management try: @@ -1379,13 +1380,41 @@ def _update_mathjax_config(self, change): (shutdown the notebook server).""" ) - contents_manager_class = Type( + # We relax this trait to handle Contents Managers using jupyter_server + # as the core backend. + contents_manager_class = TypeFromClasses( default_value=LargeFileManager, - klass=ContentsManager, + klasses=[ + ContentsManager, + # To make custom ContentsManagers both forward+backward + # compatible, we'll relax the strictness of this trait + # and allow jupyter_server contents managers to pass + # through. If jupyter_server is not installed, this class + # will be ignored. + 'jupyter_server.contents.services.managers.ContentsManager' + ], config=True, help=_('The notebook manager class to use.') ) + # Throws a deprecation warning to jupyter_server based contents managers. + @observe('contents_manager_class') + def _observe_contents_manager_class(self, change): + new = change['new'] + # If 'new' is a class, get a string representing the import + # module path. + if inspect.isclass(new): + new = new.__module__ + + if new.startswith('jupyter_server'): + self.log.warn( + "The specified 'contents_manager_class' class inherits a manager from the " + "'jupyter_server' package. These (future-looking) managers are not " + "guaranteed to work with the 'notebook' package. For longer term support " + "consider switching to NBClassic—a notebook frontend that leverages " + "Jupyter Server as its server backend." + ) + kernel_manager_class = Type( default_value=MappingKernelManager, klass=MappingKernelManager, diff --git a/notebook/services/sessions/sessionmanager.py b/notebook/services/sessions/sessionmanager.py index 63e1844829..92b2a73454 100644 --- a/notebook/services/sessions/sessionmanager.py +++ b/notebook/services/sessions/sessionmanager.py @@ -18,24 +18,34 @@ from traitlets import Instance from notebook.utils import maybe_future - +from notebook.traittypes import InstanceFromClasses class SessionManager(LoggingConfigurable): kernel_manager = Instance('notebook.services.kernels.kernelmanager.MappingKernelManager') - contents_manager = Instance('notebook.services.contents.manager.ContentsManager') - + contents_manager = InstanceFromClasses( + klasses=[ + 'notebook.services.contents.manager.ContentsManager', + # To make custom ContentsManagers both forward+backward + # compatible, we'll relax the strictness of this trait + # and allow jupyter_server contents managers to pass + # through. If jupyter_server is not installed, this class + # will be ignored. + 'jupyter_server.services.contents.manager.ContentsManager' + ] + ) + # Session database initialized below _cursor = None _connection = None _columns = {'session_id', 'path', 'name', 'type', 'kernel_id'} - + @property def cursor(self): """Start a cursor and create a database called 'session'""" if self._cursor is None: self._cursor = self.connection.cursor() - self._cursor.execute("""CREATE TABLE session + self._cursor.execute("""CREATE TABLE session (session_id, path, name, type, kernel_id)""") return self._cursor @@ -46,7 +56,7 @@ def connection(self): self._connection = sqlite3.connect(':memory:') self._connection.row_factory = sqlite3.Row return self._connection - + def close(self): """Close the sqlite connection""" if self._cursor is not None: @@ -106,11 +116,11 @@ def start_kernel_for_session(self, session_id, path, name, type, kernel_name): @gen.coroutine def save_session(self, session_id, path=None, name=None, type=None, kernel_id=None): """Saves the items for the session with the given session_id - + Given a session_id (and any other of the arguments), this method creates a row in the sqlite session database that holds the information for a session. - + Parameters ---------- session_id : str @@ -123,7 +133,7 @@ def save_session(self, session_id, path=None, name=None, type=None, kernel_id=No the type of the session kernel_id : str a uuid for the kernel associated with this session - + Returns ------- model : dict @@ -138,7 +148,7 @@ def save_session(self, session_id, path=None, name=None, type=None, kernel_id=No @gen.coroutine def get_session(self, **kwargs): """Returns the model for a particular session. - + Takes a keyword argument and searches for the value in the session database, then returns the rest of the session's info. @@ -151,7 +161,7 @@ def get_session(self, **kwargs): Returns ------- model : dict - returns a dictionary that includes all the information from the + returns a dictionary that includes all the information from the session described by the kwarg. """ if not kwargs: @@ -185,17 +195,17 @@ def get_session(self, **kwargs): @gen.coroutine def update_session(self, session_id, **kwargs): """Updates the values in the session database. - + Changes the values of the session with the given session_id - with the values from the keyword arguments. - + with the values from the keyword arguments. + Parameters ---------- session_id : str a uuid that identifies a session in the sqlite3 database **kwargs : str the key must correspond to a column title in session database, - and the value replaces the current value in the session + and the value replaces the current value in the session with session_id. """ yield maybe_future(self.get_session(session_id=session_id)) @@ -228,7 +238,7 @@ def row_to_model(self, row, tolerate_culled=False): # If caller wishes to tolerate culled kernels, log a warning # and return None. Otherwise, raise KeyError with a similar # message. - self.cursor.execute("DELETE FROM session WHERE session_id=?", + self.cursor.execute("DELETE FROM session WHERE session_id=?", (row['session_id'],)) msg = "Kernel '{kernel_id}' appears to have been culled or died unexpectedly, " \ "invalidating session '{session_id}'. The session has been removed.".\ diff --git a/notebook/tests/test_traittypes.py b/notebook/tests/test_traittypes.py new file mode 100644 index 0000000000..69c268223f --- /dev/null +++ b/notebook/tests/test_traittypes.py @@ -0,0 +1,80 @@ +import pytest +from traitlets import HasTraits, TraitError +from traitlets.utils.importstring import import_item + +from notebook.traittypes import ( + InstanceFromClasses, + TypeFromClasses +) +from notebook.services.contents.largefilemanager import LargeFileManager + + +class DummyClass: + """Dummy class for testing Instance""" + + +class DummyInt(int): + """Dummy class for testing types.""" + + +class Thing(HasTraits): + + a = InstanceFromClasses( + default_value=2, + klasses=[ + int, + str, + DummyClass, + ] + ) + + b = TypeFromClasses( + default_value=None, + allow_none=True, + klasses=[ + DummyClass, + int, + 'notebook.services.contents.manager.ContentsManager' + ] + ) + + +class TestInstanceFromClasses: + + @pytest.mark.parametrize( + 'value', + [1, 'test', DummyClass()] + ) + def test_good_values(self, value): + thing = Thing(a=value) + assert thing.a == value + + @pytest.mark.parametrize( + 'value', + [2.4, object()] + ) + def test_bad_values(self, value): + with pytest.raises(TraitError) as e: + thing = Thing(a=value) + + +class TestTypeFromClasses: + + @pytest.mark.parametrize( + 'value', + [DummyClass, DummyInt, LargeFileManager, + 'notebook.services.contents.manager.ContentsManager'] + ) + def test_good_values(self, value): + thing = Thing(b=value) + if isinstance(value, str): + value = import_item(value) + assert thing.b == value + + @pytest.mark.parametrize( + 'value', + [float, object] + ) + def test_bad_values(self, value): + with pytest.raises(TraitError) as e: + thing = Thing(b=value) diff --git a/notebook/traittypes.py b/notebook/traittypes.py new file mode 100644 index 0000000000..226657c1f4 --- /dev/null +++ b/notebook/traittypes.py @@ -0,0 +1,349 @@ +import inspect +from traitlets import ClassBasedTraitType, Undefined, warn + +# Traitlet's 5.x includes a set of utilities for building +# description strings for objects. Traitlets 5.x does not +# support Python 3.6, but notebook does; instead +# notebook uses traitlets 4.3.x which doesn't have +# this `descriptions` submodule. This chunk in the except +# clause is a copy-and-paste from traitlets 5.0.5. +try: + from traitlets.utils.descriptions import describe +except ImportError: + import inspect + import re + import types + + def describe(article, value, name=None, verbose=False, capital=False): + """Return string that describes a value + Parameters + ---------- + article : str or None + A definite or indefinite article. If the article is + indefinite (i.e. "a" or "an") the appropriate one + will be infered. Thus, the arguments of ``describe`` + can themselves represent what the resulting string + will actually look like. If None, then no article + will be prepended to the result. For non-articled + description, values that are instances are treated + definitely, while classes are handled indefinitely. + value : any + The value which will be named. + name : str or None (default: None) + Only applies when ``article`` is "the" - this + ``name`` is a definite reference to the value. + By default one will be infered from the value's + type and repr methods. + verbose : bool (default: False) + Whether the name should be concise or verbose. When + possible, verbose names include the module, and/or + class name where an object was defined. + capital : bool (default: False) + Whether the first letter of the article should + be capitalized or not. By default it is not. + Examples + -------- + Indefinite description: + >>> describe("a", object()) + 'an object' + >>> describe("a", object) + 'an object' + >>> describe("a", type(object)) + 'a type' + Definite description: + >>> describe("the", object()) + "the object at '0x10741f1b0'" + >>> describe("the", object) + "the type 'object'" + >>> describe("the", type(object)) + "the type 'type'" + Definitely named description: + >>> describe("the", object(), "I made") + 'the object I made' + >>> describe("the", object, "I will use") + 'the object I will use' + """ + if isinstance(article, str): + article = article.lower() + + if not inspect.isclass(value): + typename = type(value).__name__ + else: + typename = value.__name__ + if verbose: + typename = _prefix(value) + typename + + if article == "the" or (article is None and not inspect.isclass(value)): + if name is not None: + result = "{} {}".format(typename, name) + if article is not None: + return add_article(result, True, capital) + else: + return result + else: + tick_wrap = False + if inspect.isclass(value): + name = value.__name__ + elif isinstance(value, types.FunctionType): + name = value.__name__ + tick_wrap = True + elif isinstance(value, types.MethodType): + name = value.__func__.__name__ + tick_wrap = True + elif type(value).__repr__ in (object.__repr__, type.__repr__): + name = "at '%s'" % hex(id(value)) + verbose = False + else: + name = repr(value) + verbose = False + if verbose: + name = _prefix(value) + name + if tick_wrap: + name = name.join("''") + return describe(article, value, name=name, + verbose=verbose, capital=capital) + elif article in ("a", "an") or article is None: + if article is None: + return typename + return add_article(typename, False, capital) + else: + raise ValueError("The 'article' argument should " + "be 'the', 'a', 'an', or None not %r" % article) + + + def add_article(name, definite=False, capital=False): + """Returns the string with a prepended article. + The input does not need to begin with a charater. + Parameters + ---------- + definite : bool (default: False) + Whether the article is definite or not. + Indefinite articles being 'a' and 'an', + while 'the' is definite. + capital : bool (default: False) + Whether the added article should have + its first letter capitalized or not. + """ + if definite: + result = "the " + name + else: + first_letters = re.compile(r'[\W_]+').sub('', name) + if first_letters[:1].lower() in 'aeiou': + result = 'an ' + name + else: + result = 'a ' + name + if capital: + return result[0].upper() + result[1:] + else: + return result + + +class TypeFromClasses(ClassBasedTraitType): + """A trait whose value must be a subclass of a class in a specified list of classes.""" + + def __init__(self, default_value=Undefined, klasses=None, **kwargs): + """Construct a Type trait + A Type trait specifies that its values must be subclasses of + a class in a list of possible classes. + If only ``default_value`` is given, it is used for the ``klasses`` as + well. If neither are given, both default to ``object``. + Parameters + ---------- + default_value : class, str or None + The default value must be a subclass of klass. If an str, + the str must be a fully specified class name, like 'foo.bar.Bah'. + The string is resolved into real class, when the parent + :class:`HasTraits` class is instantiated. + klasses : list of class, str [ default object ] + Values of this trait must be a subclass of klass. The klass + may be specified in a string like: 'foo.bar.MyClass'. + The string is resolved into real class, when the parent + :class:`HasTraits` class is instantiated. + allow_none : bool [ default False ] + Indicates whether None is allowed as an assignable value. + """ + if default_value is Undefined: + new_default_value = object if (klasses is None) else klasses + else: + new_default_value = default_value + + if klasses is None: + if (default_value is None) or (default_value is Undefined): + klasses = [object] + else: + klasses = [default_value] + + # OneOfType requires a list of klasses to be specified (different than Type). + if not isinstance(klasses, (list, tuple, set)): + raise TraitError("`klasses` must be a list of class names (type is str) or classes.") + + for klass in klasses: + if not (inspect.isclass(klass) or isinstance(klass, str)): + raise TraitError("A OneOfType trait must specify a list of classes.") + + # Store classes. + self.klasses = klasses + + super().__init__(new_default_value, **kwargs) + + def subclass_from_klasses(self, value): + "Check that a given class is a subclasses found in the klasses list." + return any(issubclass(value, klass) for klass in self.importable_klasses) + + def validate(self, obj, value): + """Validates that the value is a valid object instance.""" + if isinstance(value, str): + try: + value = self._resolve_string(value) + except ImportError: + raise TraitError("The '%s' trait of %s instance must be a type, but " + "%r could not be imported" % (self.name, obj, value)) + try: + if self.subclass_from_klasses(value): + return value + except Exception: + pass + + self.error(obj, value) + + def info(self): + """Returns a description of the trait.""" + result = "a subclass of " + for klass in self.klasses: + if not isinstance(klass, str): + klass = klass.__module__ + '.' + klass.__name__ + result += f"{klass} or " + # Strip the last "or" + result = result.strip(" or ") + if self.allow_none: + return result + ' or None' + return result + + def instance_init(self, obj): + self._resolve_classes() + super().instance_init(obj) + + def _resolve_classes(self): + # Resolve all string names to actual classes. + self.importable_klasses = [] + for klass in self.klasses: + if isinstance(klass, str): + try: + klass = self._resolve_string(klass) + self.importable_klasses.append(klass) + except: + warn(f"{klass} is not importable. Is it installed?", ImportWarning) + else: + self.importable_klasses.append(klass) + + if isinstance(self.default_value, str): + self.default_value = self._resolve_string(self.default_value) + + def default_value_repr(self): + value = self.default_value + if isinstance(value, str): + return repr(value) + else: + return repr(f'{value.__module__}.{value.__name__}') + + +class InstanceFromClasses(ClassBasedTraitType): + """A trait whose value must be an instance of a class in a specified list of classes. + The value can also be an instance of a subclass of the specified classes. + Subclasses can declare default classes by overriding the klass attribute + """ + def __init__(self, klasses=None, args=None, kw=None, **kwargs): + """Construct an Instance trait. + This trait allows values that are instances of a particular + class or its subclasses. Our implementation is quite different + from that of enthough.traits as we don't allow instances to be used + for klass and we handle the ``args`` and ``kw`` arguments differently. + Parameters + ---------- + klasses : list of classes or class_names (str) + The class that forms the basis for the trait. Class names + can also be specified as strings, like 'foo.bar.Bar'. + args : tuple + Positional arguments for generating the default value. + kw : dict + Keyword arguments for generating the default value. + allow_none : bool [ default False ] + Indicates whether None is allowed as a value. + Notes + ----- + If both ``args`` and ``kw`` are None, then the default value is None. + If ``args`` is a tuple and ``kw`` is a dict, then the default is + created as ``klass(*args, **kw)``. If exactly one of ``args`` or ``kw`` is + None, the None is replaced by ``()`` or ``{}``, respectively. + """ + # If class + if klasses is None: + self.klasses = klasses + # Verify all elements are either classes or strings. + elif all(inspect.isclass(k) or isinstance(k, str) for k in klasses): + self.klasses = klasses + else: + raise TraitError('The klasses attribute must be a list of class names or classes' + ' not: %r' % klass) + + if (kw is not None) and not isinstance(kw, dict): + raise TraitError("The 'kw' argument must be a dict or None.") + if (args is not None) and not isinstance(args, tuple): + raise TraitError("The 'args' argument must be a tuple or None.") + + self.default_args = args + self.default_kwargs = kw + + super(InstanceFromClasses, self).__init__(**kwargs) + + def instance_from_importable_klasses(self, value): + "Check that a given class is a subclasses found in the klasses list." + return any(isinstance(value, klass) for klass in self.importable_klasses) + + def validate(self, obj, value): + if self.instance_from_importable_klasses(value): + return value + else: + self.error(obj, value) + + def info(self): + result = "an instance of " + for klass in self.klasses: + if isinstance(klass, str): + result += klass + else: + result += describe("a", klass) + result += " or " + result = result.strip(" or ") + if self.allow_none: + result += ' or None' + return result + + def instance_init(self, obj): + self._resolve_classes() + super().instance_init(obj) + + def _resolve_classes(self): + # Resolve all string names to actual classes. + self.importable_klasses = [] + for klass in self.klasses: + if isinstance(klass, str): + try: + klass = self._resolve_string(klass) + self.importable_klasses.append(klass) + except: + warn(f"{klass} is not importable. Is it installed?", ImportWarning) + else: + self.importable_klasses.append(klass) + + def make_dynamic_default(self): + if (self.default_args is None) and (self.default_kwargs is None): + return None + return self.klass(*(self.default_args or ()), + **(self.default_kwargs or {})) + + def default_value_repr(self): + return repr(self.make_dynamic_default()) + + def from_string(self, s): + return _safe_literal_eval(s) From 2a90e23434cb489e2499d405593f258bb22e5448 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Wed, 27 Jan 2021 11:57:09 -0800 Subject: [PATCH 25/31] Drop Python 3.5 --- notebook/_version.py | 2 +- notebook/notebookapp.py | 3 --- notebook/services/kernels/tests/test_kernels_api.py | 2 -- notebook/services/sessions/tests/test_sessions_api.py | 2 -- notebook/static/base/js/namespace.js | 2 +- setup.py | 11 ++++++----- 6 files changed, 8 insertions(+), 14 deletions(-) diff --git a/notebook/_version.py b/notebook/_version.py index 7004fc7ad3..6d8d7af78c 100644 --- a/notebook/_version.py +++ b/notebook/_version.py @@ -9,5 +9,5 @@ # Next beta/alpha/rc release: The version number for beta is X.Y.ZbN **without dots**. -version_info = (7, 0, 0, '.dev0') +version_info = (6, 3, 0, '.dev0') __version__ = '.'.join(map(str, version_info[:3])) + ''.join(version_info[3:]) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index 78d1d57d76..90f16e2338 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -1611,9 +1611,6 @@ def init_configurables(self): ) # Ensure the appropriate version of Python and jupyter_client is available. if isinstance(self.kernel_manager, AsyncMappingKernelManager): - if sys.version_info < (3, 6): # Can be removed once 3.5 is dropped. - raise ValueError("You are using `AsyncMappingKernelManager` in Python 3.5 (or lower) " - "which is not supported. Please upgrade Python to 3.6+ or change kernel managers.") if not async_kernel_mgmt_available: # Can be removed once jupyter_client >= 6.1 is required. raise ValueError("You are using `AsyncMappingKernelManager` without an appropriate " "jupyter_client installed! Please upgrade jupyter_client or change kernel managers.") diff --git a/notebook/services/kernels/tests/test_kernels_api.py b/notebook/services/kernels/tests/test_kernels_api.py index 36c441c2c7..e8439752d9 100644 --- a/notebook/services/kernels/tests/test_kernels_api.py +++ b/notebook/services/kernels/tests/test_kernels_api.py @@ -204,8 +204,6 @@ class AsyncKernelAPITest(KernelAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncKernelAPITest tests skipped due to down-level jupyter_client!") - if sys.version_info < (3, 6): # Can be removed once 3.5 is dropped. - raise SkipTest("AsyncKernelAPITest tests skipped due to Python < 3.6!") super(AsyncKernelAPITest, cls).setup_class() @classmethod diff --git a/notebook/services/sessions/tests/test_sessions_api.py b/notebook/services/sessions/tests/test_sessions_api.py index 0e30fc3ea5..cb4bc0bdea 100644 --- a/notebook/services/sessions/tests/test_sessions_api.py +++ b/notebook/services/sessions/tests/test_sessions_api.py @@ -273,8 +273,6 @@ class AsyncSessionAPITest(SessionAPITest): def setup_class(cls): if not async_testing_enabled: # Can be removed once jupyter_client >= 6.1 is required. raise SkipTest("AsyncSessionAPITest tests skipped due to down-level jupyter_client!") - if sys.version_info < (3, 6): # Can be removed once 3.5 is dropped. - raise SkipTest("AsyncSessionAPITest tests skipped due to Python < 3.6!") super(AsyncSessionAPITest, cls).setup_class() @classmethod diff --git a/notebook/static/base/js/namespace.js b/notebook/static/base/js/namespace.js index 7ae2d1409c..db0eb5b08f 100644 --- a/notebook/static/base/js/namespace.js +++ b/notebook/static/base/js/namespace.js @@ -73,7 +73,7 @@ define(function(){ // tree jglobal('SessionList','tree/js/sessionlist'); - Jupyter.version = "7.0.0.dev0"; + Jupyter.version = "6.3.0.dev0"; Jupyter._target = '_blank'; return Jupyter; diff --git a/setup.py b/setup.py index e3d189e087..035faa73e1 100755 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ name = "notebook" -if sys.version_info < (3, 5): +if sys.version_info < (3, 6): pip_message = 'This may be due to an out of date pip. Make sure you have pip >= 9.0.1.' try: import pip @@ -31,7 +31,8 @@ error = """ -Notebook 6.0+ supports Python 3.5 and above. +Notebook 6.3+ supports Python 3.6 and above. +When using Python 3.5, please install Notebook <= 6.2. When using Python 3.4 or earlier (including 2.7), please install Notebook 5.x. Python {py} detected. @@ -90,10 +91,10 @@ 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8' + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9' ], zip_safe = False, install_requires = [ @@ -121,7 +122,7 @@ 'test:sys_platform != "win32"': ['requests-unixsocket'], 'json-logging': ['json-logging'] }, - python_requires = '>=3.5', + python_requires = '>=3.6', entry_points = { 'console_scripts': [ 'jupyter-notebook = notebook.notebookapp:main', From ff5399a5c8609b108b6bc053dbe6468b0aa866e9 Mon Sep 17 00:00:00 2001 From: Steven Silvester Date: Thu, 28 Jan 2021 15:04:19 -0600 Subject: [PATCH 26/31] Update notebook/notebookapp.py Co-authored-by: Kevin Bates --- notebook/notebookapp.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index b9d64beaf7..b7ce5c2cee 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -1407,7 +1407,7 @@ def _observe_contents_manager_class(self, change): new = new.__module__ if new.startswith('jupyter_server'): - self.log.warn( + self.log.warning( "The specified 'contents_manager_class' class inherits a manager from the " "'jupyter_server' package. These (future-looking) managers are not " "guaranteed to work with the 'notebook' package. For longer term support " From 3e6916447984832108a8456ce87fdcfefc3bbc51 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Thu, 28 Jan 2021 14:51:19 -0800 Subject: [PATCH 27/31] Update GatewayKernelManager to derive from AsyncMappingKernelManager --- notebook/gateway/managers.py | 93 +++++++++++++++------------------- notebook/tests/test_gateway.py | 35 +++++++------ 2 files changed, 57 insertions(+), 71 deletions(-) diff --git a/notebook/gateway/managers.py b/notebook/gateway/managers.py index 2f647d11da..199f3f4c89 100644 --- a/notebook/gateway/managers.py +++ b/notebook/gateway/managers.py @@ -5,11 +5,11 @@ import json from socket import gaierror -from tornado import gen, web +from tornado import web from tornado.escape import json_encode, json_decode, url_escape from tornado.httpclient import HTTPClient, AsyncHTTPClient, HTTPError -from ..services.kernels.kernelmanager import MappingKernelManager +from ..services.kernels.kernelmanager import AsyncMappingKernelManager from ..services.sessions.sessionmanager import SessionManager from jupyter_client.kernelspec import KernelSpecManager @@ -303,13 +303,12 @@ def load_connection_args(self, **kwargs): return kwargs -@gen.coroutine -def gateway_request(endpoint, **kwargs): +async def gateway_request(endpoint, **kwargs): """Make an async request to kernel gateway endpoint, returns a response """ client = AsyncHTTPClient() kwargs = GatewayClient.instance().load_connection_args(**kwargs) try: - response = yield client.fetch(endpoint, **kwargs) + response = await client.fetch(endpoint, **kwargs) # Trap a set of common exceptions so that we can inform the user that their Gateway url is incorrect # or the server is not running. # NOTE: We do this here since this handler is called during the Notebook's startup and subsequent refreshes @@ -332,10 +331,10 @@ def gateway_request(endpoint, **kwargs): "url is valid and the Gateway instance is running.".format(GatewayClient.instance().url) ) from e - raise gen.Return(response) + return response -class GatewayKernelManager(MappingKernelManager): +class GatewayKernelManager(AsyncMappingKernelManager): """Kernel manager that supports remote kernels hosted by Jupyter Kernel or Enterprise Gateway.""" # We'll maintain our own set of kernel ids @@ -367,8 +366,7 @@ def _get_kernel_endpoint_url(self, kernel_id=None): return self.base_endpoint - @gen.coroutine - def start_kernel(self, kernel_id=None, path=None, **kwargs): + async def start_kernel(self, kernel_id=None, path=None, **kwargs): """Start a kernel for a session and return its kernel_id. Parameters @@ -403,21 +401,20 @@ def start_kernel(self, kernel_id=None, path=None, **kwargs): json_body = json_encode({'name': kernel_name, 'env': kernel_env}) - response = yield gateway_request(kernel_url, method='POST', body=json_body) + response = await gateway_request(kernel_url, method='POST', body=json_body) kernel = json_decode(response.body) kernel_id = kernel['id'] self.log.info("Kernel started: %s" % kernel_id) self.log.debug("Kernel args: %r" % kwargs) else: - kernel = yield self.get_kernel(kernel_id) + kernel = await self.get_kernel(kernel_id) kernel_id = kernel['id'] self.log.info("Using existing kernel: %s" % kernel_id) self._kernels[kernel_id] = kernel - raise gen.Return(kernel_id) + return kernel_id - @gen.coroutine - def get_kernel(self, kernel_id=None, **kwargs): + async def get_kernel(self, kernel_id=None, **kwargs): """Get kernel for kernel_id. Parameters @@ -428,7 +425,7 @@ def get_kernel(self, kernel_id=None, **kwargs): kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request kernel at: %s" % kernel_url) try: - response = yield gateway_request(kernel_url, method='GET') + response = await gateway_request(kernel_url, method='GET') except web.HTTPError as error: if error.status_code == 404: self.log.warn("Kernel not found at: %s" % kernel_url) @@ -440,10 +437,9 @@ def get_kernel(self, kernel_id=None, **kwargs): kernel = json_decode(response.body) self._kernels[kernel_id] = kernel self.log.debug("Kernel retrieved: %s" % kernel) - raise gen.Return(kernel) + return kernel - @gen.coroutine - def kernel_model(self, kernel_id): + async def kernel_model(self, kernel_id): """Return a dictionary of kernel information described in the JSON standard model. @@ -453,21 +449,19 @@ def kernel_model(self, kernel_id): The uuid of the kernel. """ self.log.debug("RemoteKernelManager.kernel_model: %s", kernel_id) - model = yield self.get_kernel(kernel_id) - raise gen.Return(model) + model = await self.get_kernel(kernel_id) + return model - @gen.coroutine - def list_kernels(self, **kwargs): + async def list_kernels(self, **kwargs): """Get a list of kernels.""" kernel_url = self._get_kernel_endpoint_url() self.log.debug("Request list kernels: %s", kernel_url) - response = yield gateway_request(kernel_url, method='GET') + response = await gateway_request(kernel_url, method='GET') kernels = json_decode(response.body) self._kernels = {x['id']: x for x in kernels} - raise gen.Return(kernels) + return kernels - @gen.coroutine - def shutdown_kernel(self, kernel_id, now=False, restart=False): + async def shutdown_kernel(self, kernel_id, now=False, restart=False): """Shutdown a kernel by its kernel uuid. Parameters @@ -481,12 +475,11 @@ def shutdown_kernel(self, kernel_id, now=False, restart=False): """ kernel_url = self._get_kernel_endpoint_url(kernel_id) self.log.debug("Request shutdown kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='DELETE') + response = await gateway_request(kernel_url, method='DELETE') self.log.debug("Shutdown kernel response: %d %s", response.code, response.reason) self.remove_kernel(kernel_id) - @gen.coroutine - def restart_kernel(self, kernel_id, now=False, **kwargs): + async def restart_kernel(self, kernel_id, now=False, **kwargs): """Restart a kernel by its kernel uuid. Parameters @@ -496,11 +489,10 @@ def restart_kernel(self, kernel_id, now=False, **kwargs): """ kernel_url = self._get_kernel_endpoint_url(kernel_id) + '/restart' self.log.debug("Request restart kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='POST', body=json_encode({})) + response = await gateway_request(kernel_url, method='POST', body=json_encode({})) self.log.debug("Restart kernel response: %d %s", response.code, response.reason) - @gen.coroutine - def interrupt_kernel(self, kernel_id, **kwargs): + async def interrupt_kernel(self, kernel_id, **kwargs): """Interrupt a kernel by its kernel uuid. Parameters @@ -510,7 +502,7 @@ def interrupt_kernel(self, kernel_id, **kwargs): """ kernel_url = self._get_kernel_endpoint_url(kernel_id) + '/interrupt' self.log.debug("Request interrupt kernel at: %s", kernel_url) - response = yield gateway_request(kernel_url, method='POST', body=json_encode({})) + response = await gateway_request(kernel_url, method='POST', body=json_encode({})) self.log.debug("Interrupt kernel response: %d %s", response.code, response.reason) def shutdown_all(self, now=False): @@ -565,9 +557,8 @@ def _get_kernelspecs_endpoint_url(self, kernel_name=None): return self.base_endpoint - @gen.coroutine - def get_all_specs(self): - fetched_kspecs = yield self.list_kernel_specs() + async def get_all_specs(self): + fetched_kspecs = await self.list_kernel_specs() # get the default kernel name and compare to that of this server. # If different log a warning and reset the default. However, the @@ -583,19 +574,17 @@ def get_all_specs(self): km.default_kernel_name = remote_default_kernel_name remote_kspecs = fetched_kspecs.get('kernelspecs') - raise gen.Return(remote_kspecs) + return remote_kspecs - @gen.coroutine - def list_kernel_specs(self): + async def list_kernel_specs(self): """Get a list of kernel specs.""" kernel_spec_url = self._get_kernelspecs_endpoint_url() self.log.debug("Request list kernel specs at: %s", kernel_spec_url) - response = yield gateway_request(kernel_spec_url, method='GET') + response = await gateway_request(kernel_spec_url, method='GET') kernel_specs = json_decode(response.body) - raise gen.Return(kernel_specs) + return kernel_specs - @gen.coroutine - def get_kernel_spec(self, kernel_name, **kwargs): + async def get_kernel_spec(self, kernel_name, **kwargs): """Get kernel spec for kernel_name. Parameters @@ -606,7 +595,7 @@ def get_kernel_spec(self, kernel_name, **kwargs): kernel_spec_url = self._get_kernelspecs_endpoint_url(kernel_name=str(kernel_name)) self.log.debug("Request kernel spec at: %s" % kernel_spec_url) try: - response = yield gateway_request(kernel_spec_url, method='GET') + response = await gateway_request(kernel_spec_url, method='GET') except web.HTTPError as error: if error.status_code == 404: # Convert not found to KeyError since that's what the Notebook handler expects @@ -620,10 +609,9 @@ def get_kernel_spec(self, kernel_name, **kwargs): else: kernel_spec = json_decode(response.body) - raise gen.Return(kernel_spec) + return kernel_spec - @gen.coroutine - def get_kernel_spec_resource(self, kernel_name, path): + async def get_kernel_spec_resource(self, kernel_name, path): """Get kernel spec for kernel_name. Parameters @@ -636,7 +624,7 @@ def get_kernel_spec_resource(self, kernel_name, path): kernel_spec_resource_url = url_path_join(self.base_resource_endpoint, str(kernel_name), str(path)) self.log.debug("Request kernel spec resource '{}' at: {}".format(path, kernel_spec_resource_url)) try: - response = yield gateway_request(kernel_spec_resource_url, method='GET') + response = await gateway_request(kernel_spec_resource_url, method='GET') except web.HTTPError as error: if error.status_code == 404: kernel_spec_resource = None @@ -644,14 +632,13 @@ def get_kernel_spec_resource(self, kernel_name, path): raise else: kernel_spec_resource = response.body - raise gen.Return(kernel_spec_resource) + return kernel_spec_resource class GatewaySessionManager(SessionManager): kernel_manager = Instance('notebook.gateway.managers.GatewayKernelManager') - @gen.coroutine - def kernel_culled(self, kernel_id): + async def kernel_culled(self, kernel_id): """Checks if the kernel is still considered alive and returns true if its not found. """ - kernel = yield self.kernel_manager.get_kernel(kernel_id) - raise gen.Return(kernel is None) + kernel = await self.kernel_manager.get_kernel(kernel_id) + return kernel is None diff --git a/notebook/tests/test_gateway.py b/notebook/tests/test_gateway.py index cdf215da61..f84ba06369 100644 --- a/notebook/tests/test_gateway.py +++ b/notebook/tests/test_gateway.py @@ -38,8 +38,7 @@ def generate_model(name): return model -@gen.coroutine -def mock_gateway_request(url, **kwargs): +async def mock_gateway_request(url, **kwargs): method = 'GET' if kwargs['method']: method = kwargs['method'] @@ -51,8 +50,8 @@ def mock_gateway_request(url, **kwargs): # Fetch all kernelspecs if endpoint.endswith('/api/kernelspecs') and method == 'GET': response_buf = StringIO(json.dumps(kernelspecs)) - response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response # Fetch named kernelspec if endpoint.rfind('/api/kernelspecs/') >= 0 and method == 'GET': @@ -60,8 +59,8 @@ def mock_gateway_request(url, **kwargs): kspecs = kernelspecs.get('kernelspecs') if requested_kernelspec in kspecs: response_buf = StringIO(json.dumps(kspecs.get(requested_kernelspec))) - response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response else: raise HTTPError(404, message='Kernelspec does not exist: %s' % requested_kernelspec) @@ -75,8 +74,8 @@ def mock_gateway_request(url, **kwargs): model = generate_model(name) running_kernels[model.get('id')] = model # Register model as a running kernel response_buf = StringIO(json.dumps(model)) - response = yield maybe_future(HTTPResponse(request, 201, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 201, buffer=response_buf)) + return response # Fetch list of running kernels if endpoint.endswith('/api/kernels') and method == 'GET': @@ -85,8 +84,8 @@ def mock_gateway_request(url, **kwargs): model = running_kernels.get(kernel_id) kernels.append(model) response_buf = StringIO(json.dumps(kernels)) - response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response # Interrupt or restart existing kernel if endpoint.rfind('/api/kernels/') >= 0 and method == 'POST': @@ -94,15 +93,15 @@ def mock_gateway_request(url, **kwargs): if action == 'interrupt': if requested_kernel_id in running_kernels: - response = yield maybe_future(HTTPResponse(request, 204)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 204)) + return response else: raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) elif action == 'restart': if requested_kernel_id in running_kernels: response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) - response = yield maybe_future(HTTPResponse(request, 204, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 204, buffer=response_buf)) + return response else: raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) else: @@ -112,16 +111,16 @@ def mock_gateway_request(url, **kwargs): if endpoint.rfind('/api/kernels/') >= 0 and method == 'DELETE': requested_kernel_id = endpoint.rpartition('/')[2] running_kernels.pop(requested_kernel_id) # Simulate shutdown by removing kernel from running set - response = yield maybe_future(HTTPResponse(request, 204)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 204)) + return response # Fetch existing kernel if endpoint.rfind('/api/kernels/') >= 0 and method == 'GET': requested_kernel_id = endpoint.rpartition('/')[2] if requested_kernel_id in running_kernels: response_buf = StringIO(json.dumps(running_kernels.get(requested_kernel_id))) - response = yield maybe_future(HTTPResponse(request, 200, buffer=response_buf)) - raise gen.Return(response) + response = await maybe_future(HTTPResponse(request, 200, buffer=response_buf)) + return response else: raise HTTPError(404, message='Kernel does not exist: %s' % requested_kernel_id) From 198178f4bf2d5e02cbea023a5e2e283233872017 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Thu, 28 Jan 2021 14:53:48 -0800 Subject: [PATCH 28/31] Drop use of deprecated pyzmq.ioloop --- notebook/notebookapp.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index 78d1d57d76..3791d76c6b 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -44,11 +44,6 @@ from notebook.transutils import trans, _ -# Install the pyzmq ioloop. This has to be done before anything else from -# tornado is imported. -from zmq.eventloop import ioloop -ioloop.install() - # check for tornado 3.1.0 try: import tornado @@ -62,6 +57,7 @@ raise ImportError(_("The Jupyter Notebook requires tornado >= 5.0, but you have %s") % tornado.version) from tornado import httpserver +from tornado import ioloop from tornado import web from tornado.httputil import url_concat from tornado.log import LogFormatter, app_log, access_log, gen_log From 434c5cb4cb7aedea971ee4464ecbbcf88fa873b5 Mon Sep 17 00:00:00 2001 From: Kevin Bates Date: Thu, 28 Jan 2021 15:52:59 -0800 Subject: [PATCH 29/31] Skip collection of terminal tests on Windows 3.9+ --- .github/workflows/python.yml | 2 +- notebook/conftest.py | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 notebook/conftest.py diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index d427aa7355..92ee5d5214 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -11,7 +11,7 @@ jobs: fail-fast: false matrix: os: [ubuntu, macos, windows] - python-version: [ '3.6' , '3.7', '3.8', '3.9' ] # Windows 3.9 fails due to the pywinpty dependency not working + python-version: [ '3.6' , '3.7', '3.8', '3.9' ] # Windows 3.9 fails due to the pywinpty dependency not working (Issue #5967) steps: - name: Checkout uses: actions/checkout@v1 diff --git a/notebook/conftest.py b/notebook/conftest.py new file mode 100644 index 0000000000..2b1a913653 --- /dev/null +++ b/notebook/conftest.py @@ -0,0 +1,10 @@ + +import pytest + +import sys + +# TODO: Remove this hook once Issue #5967 is resolved. +def pytest_ignore_collect(path): + if str(path).endswith("test_terminals_api.py"): + if sys.platform.startswith('win') and sys.version_info >= (3, 9): + return True # do not collect From 2712dc42267a6150423d705f85170dc3d9986213 Mon Sep 17 00:00:00 2001 From: blair drummond Date: Fri, 5 Feb 2021 10:13:25 -0500 Subject: [PATCH 30/31] change authenticate_prometheus default --- notebook/notebookapp.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/notebook/notebookapp.py b/notebook/notebookapp.py index de1b0f6a19..fb753e88bb 100755 --- a/notebook/notebookapp.py +++ b/notebook/notebookapp.py @@ -1584,6 +1584,21 @@ def _update_server_extensions(self, change): """ ).tag(config=True) + @default('authenticate_prometheus') + def _default_authenticate_prometheus(self): + """ Authenticate Prometheus by default, unless auth is disabled. """ + auth = bool(self.password) or bool(self.token) + if auth is False: + self.log.info(_("Authentication of /metrics is OFF, since other authentication is disabled.")) + return auth + + @observe('authenticate_prometheus') + def _update_authenticate_prometheus(self, change): + newauth = change['new'] + if self.authenticate_prometheus is True and newauth is False: + self.log.info(_("Authentication of /metrics is being turned OFF.")) + self.authenticate_prometheus = newauth + # Since use of terminals is also a function of whether the terminado package is # available, this variable holds the "final indication" of whether terminal functionality # should be considered (particularly during shutdown/cleanup). It is enabled only From 993286209a526265ff9aee24f92581cd2b2412da Mon Sep 17 00:00:00 2001 From: Daniel Rice Date: Sun, 7 Feb 2021 11:58:38 +0000 Subject: [PATCH 31/31] Update security.rst --- docs/source/security.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/security.rst b/docs/source/security.rst index f90f9d6da5..981fb0b74b 100644 --- a/docs/source/security.rst +++ b/docs/source/security.rst @@ -72,7 +72,7 @@ and store the hashed password in your :file:`jupyter_notebook_config.json`. :command:`jupyter notebook password` command is added. -It is possible disable authentication altogether by setting the token and password to empty strings, +It is possible to disable authentication altogether by setting the token and password to empty strings, but this is **NOT RECOMMENDED**, unless authentication or access restrictions are handled at a different layer in your web application: .. sourcecode:: python