Skip to content

Commit

Permalink
7620: Start removing dependencies on requests (#7643)
Browse files Browse the repository at this point in the history
* 7620: Start removing dependencies on requests

* Patch urllib.request.urlopen instead of requests.get

* Try to fix flake8

* More work on flake8 import errors

* First attempt at using urllib with cookies

* Fix pylint/flake8

* Fix test_deliver_slice_csv_attachment

* Fix test_deliver_slice_csv_inline

* Import requests and pydruid conditionally, remove dependency on prison

* Fix flake errors

* Fix load_examples

* Please flake

* Skip tests depending on optional deps

* Try to please flake

* Address review comments

* Remove Druid-related UI

* Revert "Remove Druid-related UI"

This reverts commit d7e0f166cc3f3dd2496b4a666e177f0c191aeb0f.

* Skip a few tests more

* Put imports in right order

* Apply black patch

* Please flake

* Please black, silence flake

* Use flake8 silencing the right way

* Add deps for CI
  • Loading branch information
sturmer authored and mistercrunch committed Aug 2, 2019
1 parent cbac428 commit e23920b
Show file tree
Hide file tree
Showing 13 changed files with 248 additions and 81 deletions.
2 changes: 2 additions & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@ pip-tools==3.7.0
pre-commit==1.17.0
psycopg2-binary==2.7.5
pycodestyle==2.5.0
pydruid==0.5.6
pyhive==0.6.1
pylint==1.9.2
redis==3.2.1
requests==2.22.0
statsd==3.3.0
tox==3.11.1
2 changes: 0 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ polyline==1.4.0
prison==0.1.2 # via flask-appbuilder
py==1.8.0 # via retry
pycparser==2.19 # via cffi
pydruid==0.5.6
pyjwt==1.7.1 # via flask-appbuilder, flask-jwt-extended
pyrsistent==0.15.4 # via jsonschema
python-dateutil==2.8.0
Expand All @@ -70,7 +69,6 @@ python-geohash==0.8.5
python3-openid==3.1.0 # via flask-openid
pytz==2019.2 # via babel, celery, pandas
pyyaml==5.1.2
requests==2.22.0
retry==0.9.2
selenium==3.141.0
simplejson==3.16.0
Expand Down
3 changes: 1 addition & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,10 @@ def get_git_sha():
"parsedatetime",
"pathlib2",
"polyline",
"pydruid>=0.5.2",
"python-dateutil",
"python-dotenv",
"python-geohash",
"pyyaml>=5.1",
"requests>=2.22.0",
"retry>=0.9.2",
"selenium>=3.141.0",
"simplejson>=3.15.0",
Expand All @@ -111,6 +109,7 @@ def get_git_sha():
"mysql": ["mysqlclient==1.4.2.post1"],
"postgres": ["psycopg2-binary==2.7.5"],
"presto": ["pyhive[presto]>=0.4.0"],
"druid": ["pydruid==0.5.2", "requests==2.22.0"],
},
author="Apache Software Foundation",
author_email="dev@superset.incubator.apache.org",
Expand Down
79 changes: 46 additions & 33 deletions superset/connectors/druid/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
# under the License.
# pylint: disable=C,R,W
# pylint: disable=invalid-unary-operand-type
# flake8: noqa I202
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime, timedelta
Expand All @@ -31,20 +32,24 @@
from flask_appbuilder.models.decorators import renders
from flask_babel import lazy_gettext as _
import pandas
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.having import Aggregation
from pydruid.utils.postaggregator import (
Const,
Field,
HyperUniqueCardinality,
Postaggregator,
Quantile,
Quantiles,
)
import requests

try:
from pydruid.client import PyDruid
from pydruid.utils.aggregators import count
from pydruid.utils.dimensions import MapLookupExtraction, RegexExtraction
from pydruid.utils.filters import Dimension, Filter
from pydruid.utils.having import Aggregation
from pydruid.utils.postaggregator import (
Const,
Field,
HyperUniqueCardinality,
Postaggregator,
Quantile,
Quantiles,
)
import requests
except ImportError:
pass
import sqlalchemy as sa
from sqlalchemy import (
Boolean,
Expand All @@ -65,36 +70,44 @@
from superset.exceptions import MetricPermException, SupersetException
from superset.models.helpers import AuditMixinNullable, ImportMixin, QueryResult
from superset.utils import core as utils, import_datasource
from superset.utils.core import DimSelector, DTTM_ALIAS, flasher

try:
from superset.utils.core import DimSelector, DTTM_ALIAS, flasher
except ImportError:
pass
DRUID_TZ = conf.get("DRUID_TZ")
POST_AGG_TYPE = "postagg"
metadata = Model.metadata # pylint: disable=no-member


# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()
try:
# Postaggregator might not have been imported.
class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
"type": "javascript",
"fieldNames": field_names,
"name": name,
"function": function,
}
self.name = name

class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""

class JavascriptPostAggregator(Postaggregator):
def __init__(self, name, field_names, function):
self.post_aggregator = {
"type": "javascript",
"fieldNames": field_names,
"name": name,
"function": function,
}
self.name = name
def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator


class CustomPostAggregator(Postaggregator):
"""A way to allow users to specify completely custom PostAggregators"""
except NameError:
pass

def __init__(self, name, post_aggregator):
self.name = name
self.post_aggregator = post_aggregator

# Function wrapper because bound methods cannot
# be passed to processes
def _fetch_metadata_for(datasource):
return datasource.latest_metadata()


class DruidCluster(Model, AuditMixinNullable, ImportMixin):
Expand Down
5 changes: 2 additions & 3 deletions superset/examples/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,9 @@
from io import BytesIO
import json
import os
from urllib import request
import zlib

import requests

from superset import app, db
from superset.connectors.connector_registry import ConnectorRegistry
from superset.models import core as models
Expand Down Expand Up @@ -70,7 +69,7 @@ def get_slice_json(defaults, **kwargs):


def get_example_data(filepath, is_gzip=True, make_bytes=False):
content = requests.get(f"{BASE_URL}{filepath}?raw=true").content
content = request.urlopen(f"{BASE_URL}{filepath}?raw=true").read()
if is_gzip:
content = zlib.decompress(content, zlib.MAX_WBITS | 16)
if make_bytes:
Expand Down
8 changes: 4 additions & 4 deletions superset/tasks/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,10 @@

import json
import logging
from urllib import request
from urllib.error import URLError

from celery.utils.log import get_task_logger
import requests
from requests.exceptions import RequestException
from sqlalchemy import and_, func

from superset import app, db
Expand Down Expand Up @@ -282,9 +282,9 @@ def cache_warmup(strategy_name, *args, **kwargs):
for url in strategy.get_urls():
try:
logger.info(f"Fetching {url}")
requests.get(url)
request.urlopen(url)
results["success"].append(url)
except RequestException:
except URLError:
logger.exception("Error warming up cache!")
results["errors"].append(url)

Expand Down
12 changes: 7 additions & 5 deletions superset/tasks/schedules.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,18 @@
from email.utils import make_msgid, parseaddr
import logging
import time

from urllib.error import URLError
import urllib.request

import croniter
from dateutil.tz import tzlocal
from flask import render_template, Response, session, url_for
from flask_babel import gettext as __
from flask_login import login_user
import requests
from retry.api import retry_call
from selenium.common.exceptions import WebDriverException
from selenium.webdriver import chrome, firefox
import simplejson as json
from six.moves import urllib
from werkzeug.utils import parse_cookie

# Superset framework imports
Expand Down Expand Up @@ -258,8 +257,11 @@ def _get_slice_data(schedule):
for cookie in _get_auth_cookies():
cookies["session"] = cookie

response = requests.get(slice_url, cookies=cookies)
response.raise_for_status()
opener = urllib.request.build_opener()
opener.addheaders.append(("Cookie", f"session={cookies['session']}"))
response = opener.open(slice_url)
if response.getcode() != 200:
raise URLError(response.getcode())

# TODO: Move to the csv module
rows = [r.split(b",") for r in response.content.splitlines()]
Expand Down
40 changes: 25 additions & 15 deletions superset/utils/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=C,R,W
# flake8: noqa I202
"""Utility functions used across Superset"""
from datetime import date, datetime, time, timedelta
import decimal
Expand Down Expand Up @@ -51,7 +52,11 @@
import numpy
import pandas as pd
import parsedatetime
from pydruid.utils.having import Having

try:
from pydruid.utils.having import Having
except ImportError:
pass
import sqlalchemy as sa
from sqlalchemy import event, exc, select, Text
from sqlalchemy.dialects.mysql import MEDIUMTEXT
Expand All @@ -72,6 +77,25 @@

sources = {"chart": 0, "dashboard": 1, "sql_lab": 2}

try:
# Having might not have been imported.
class DimSelector(Having):
def __init__(self, **args):
# Just a hack to prevent any exceptions
Having.__init__(self, type="equalTo", aggregation=None, value=None)

self.having = {
"having": {
"type": "dimSelector",
"dimension": args["dimension"],
"value": args["value"],
}
}


except NameError:
pass


def flasher(msg, severity=None):
"""Flask's flash if available, logging call if not"""
Expand Down Expand Up @@ -179,20 +203,6 @@ def string_to_num(s: str):
return None


class DimSelector(Having):
def __init__(self, **args):
# Just a hack to prevent any exceptions
Having.__init__(self, type="equalTo", aggregation=None, value=None)

self.having = {
"having": {
"type": "dimSelector",
"dimension": args["dimension"],
"value": args["value"],
}
}


def list_minus(l: List, minus: List) -> List:
"""Returns l without what is in minus
Expand Down
Loading

0 comments on commit e23920b

Please sign in to comment.