This repository has been archived by the owner on Sep 3, 2022. It is now read-only.
/
__init__.py
150 lines (119 loc) · 5.51 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
"""Google Cloud Datalab - notebook functionality."""
import httplib2 as _httplib2
import requests as _requests
try:
import IPython as _IPython
import IPython.core.magic as _magic # noqa
import IPython.core.interactiveshell as _shell
from IPython import get_ipython # noqa
except ImportError:
raise Exception('This package requires an IPython notebook installation')
import datalab.context as _context
# Import the modules that do cell magics.
import datalab.bigquery.commands
import datalab.context.commands
import datalab.data.commands
import datalab.stackdriver.commands
import datalab.storage.commands
import datalab.utils.commands
# mlalpha modules require TensorFlow, CloudML SDK, and DataFlow (installed with CloudML SDK).
# These are big dependencies and users who want to use Bigquery/Storage features may not
# want to install them.
# This __init__.py file is called when Jupyter/Datalab loads magics on startup. We don't want
# Jupyter+pydatalab fail to start because of missing TensorFlow/DataFlow. So we ignore import
# errors on mlalpha commands.
try:
import datalab.mlalpha.commands
except:
print('TensorFlow and CloudML SDK are required.')
_orig_request = _httplib2.Http.request
_orig_init = _requests.Session.__init__
_orig_run_cell_magic = _shell.InteractiveShell.run_cell_magic
_orig_run_line_magic = _shell.InteractiveShell.run_line_magic
def load_ipython_extension(shell):
"""
Called when the extension is loaded.
Args:
shell - (NotebookWebApplication): handle to the Notebook interactive shell instance.
"""
# Inject our user agent on all requests by monkey-patching a wrapper around httplib2.Http.request.
def _request(self, uri, method="GET", body=None, headers=None,
redirections=_httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
if headers is None:
headers = {}
headers['user-agent'] = 'GoogleCloudDataLab/1.0'
return _orig_request(self, uri, method=method, body=body, headers=headers,
redirections=redirections, connection_type=connection_type)
_httplib2.Http.request = _request
# Similarly for the requests library.
def _init_session(self):
_orig_init(self)
self.headers['User-Agent'] = 'GoogleCloudDataLab/1.0'
_requests.Session.__init__ = _init_session
# Be more tolerant with magics. If the user specified a cell magic that doesn't
# exist and an empty cell body but a line magic with that name exists, run that
# instead. Conversely, if the user specified a line magic that doesn't exist but
# a cell magic exists with that name, run the cell magic with an empty body.
def _run_line_magic(self, magic_name, line):
fn = self.find_line_magic(magic_name)
if fn is None:
cm = self.find_cell_magic(magic_name)
if cm:
return _run_cell_magic(self, magic_name, line, None)
return _orig_run_line_magic(self, magic_name, line)
def _run_cell_magic(self, magic_name, line, cell):
if len(cell) == 0 or cell.isspace():
fn = self.find_line_magic(magic_name)
if fn:
return _orig_run_line_magic(self, magic_name, line)
# IPython will complain if cell is empty string but not if it is None
cell = None
return _orig_run_cell_magic(self, magic_name, line, cell)
_shell.InteractiveShell.run_cell_magic = _run_cell_magic
_shell.InteractiveShell.run_line_magic = _run_line_magic
# Define global 'project_id' and 'set_project_id' functions to manage the default project ID. We
# do this conditionally in a try/catch # to avoid the call to Context.default() when running tests
# which mock IPython.get_ipython().
def _get_project_id():
try:
return _context.Context.default().project_id
except Exception:
return None
def _set_project_id(project_id):
context = _context.Context.default()
context.set_project_id(project_id)
def _get_bq_dialect():
return datalab.bigquery.Dialect.default().bq_dialect
def _set_bq_dialect(bq_dialect):
datalab.bigquery.Dialect.default().set_bq_dialect(bq_dialect)
try:
if 'datalab_project_id' not in _IPython.get_ipython().user_ns:
_IPython.get_ipython().user_ns['datalab_project_id'] = _get_project_id
_IPython.get_ipython().user_ns['set_datalab_project_id'] = _set_project_id
if 'datalab_bq_dialect' not in _IPython.get_ipython().user_ns:
_IPython.get_ipython().user_ns['datalab_bq_dialect'] = _get_bq_dialect
_IPython.get_ipython().user_ns['set_datalab_bq_dialect'] = _set_bq_dialect
except TypeError:
pass
def unload_ipython_extension(shell):
_shell.InteractiveShell.run_cell_magic = _orig_run_cell_magic
_shell.InteractiveShell.run_line_magic = _orig_run_line_magic
_requests.Session.__init__ = _orig_init
_httplib2.Http.request = _orig_request
try:
del _IPython.get_ipython().user_ns['project_id']
del _IPython.get_ipython().user_ns['set_project_id']
except Exception:
pass # We mock IPython for tests so we need this.
# TODO(gram): unregister imports/magics/etc.