Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rewrite search API to use fastapi, aioredis-py #3

Merged
merged 19 commits into from
May 19, 2021
Merged
3 changes: 2 additions & 1 deletion .env.test
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
REDIS_HOST=redis
REDIS_HOST=test_redis
REDIS_PORT=6379
REDIS_PASSWORD=password
KEY_PREFIX=sitesearch:test
ENV=test
API_KEY=super-secret
NEW_RELIC_MONITOR_MODE=off
PYTHONASYNCIODEBUG=1
2 changes: 1 addition & 1 deletion .github/workflows/docker-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,4 @@ jobs:
- uses: actions/checkout@v2
- run: cp .env.example .env
- run: sed -i 's/remove-if-redis-has-no-password/test/g' .env
- run: docker-compose run --entrypoint=pytest app
- run: docker-compose up test
4 changes: 4 additions & 0 deletions 300
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Netscape HTTP Cookie File
# https://curl.haxx.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.

2 changes: 1 addition & 1 deletion deploy_staging.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ echo "Creating new app instance template $NEW_APP_TEMPLATE from $APP_TAG"

gcloud beta compute --project=redislabs-university instance-templates \
create-with-container $NEW_APP_TEMPLATE \
--machine-type=e2-micro \
--machine-type=e2-medium \
--network=projects/redislabs-university/global/networks/docsearch \
--network-tier=PREMIUM \
--metadata=google-logging-enabled=false \
Expand Down
22 changes: 22 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,3 +35,25 @@ services:
- .env
depends_on:
- redis

test_redis:
build:
context: ./
dockerfile: docker/redis/Dockerfile
ports:
- "26379:6379"
env_file:
- .env.test

test:
build:
context: ./
dockerfile: docker/app/Dockerfile
volumes:
- $PWD:/redis-sitesearch
env_file:
- .env.test
depends_on:
- test_redis
entrypoint: "pytest"

6 changes: 5 additions & 1 deletion docker/app/supervisord.conf
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@ stderr_logfile_maxbytes=0
stdout_logfile_maxbytes = 0
stderr_logfile_maxbytes = 0
startretries=10
command=newrelic-admin run-program gunicorn -c gunicorn.conf.py --reload sitesearch.api.wsgi:app
# roduction - reload does not work
command=gunicorn -c gunicorn.conf.py sitesearch.api.wsgi:app
# command=hypercorn --bind=localhost:8081 sitesearch.api.wsgi:app
# Local dev - reload works, worse performance
# command=newrelic-admin run-program uvicorn --reload --port 8081 sitesearch.api.wsgi:app

[program:nginx]
command=/usr/sbin/nginx -g "daemon off;"
Expand Down
8 changes: 5 additions & 3 deletions gunicorn.conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,11 @@
worker_temp_dir = '/dev/shm'
reload = True

# Gevent worker settings
worker_class = 'gevent'
worker_connections = 5000
# https://docs.gunicorn.org/en/latest/faq.html#how-do-i-avoid-gunicorn-excessively-blocking-in-os-fchmod
worker_temp_dir = '/dev/shm'

worker_class = 'uvicorn.workers.UvicornWorker'
worker_connections = 10000

# These settings only really help with sync and thread
# worker types, but let's configure them anyway.
Expand Down
1 change: 1 addition & 0 deletions requirements-dev.in
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pytest-asyncio==0.15.1
pytest==6.0.1
mypy==0.761
ipdb
Expand Down
6 changes: 5 additions & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,12 @@ pylint==2.7.2
# via -r requirements-dev.in
pyparsing==2.4.7
# via packaging
pytest==6.0.1
pytest-asyncio==0.15.1
# via -r requirements-dev.in
pytest==6.0.1
# via
# -r requirements-dev.in
# pytest-asyncio
toml==0.10.2
# via
# ipdb
Expand Down
14 changes: 10 additions & 4 deletions requirements.in
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
fastapi-cprofile
fastapi==0.63.0
ujson==4.0.2
httpx==0.18.1
uvicorn[standard]==0.13.4
click==7.1.2
beautifulsoup4==4.9.1
redis==3.5.3
aioredis==2.0.0a1
gunicorn==20.0.4
falcon-cors==1.1.7
python-dotenv==0.14.0
Cython==0.29.21
rq==1.7.0
python-dotenv==0.14.0
rq-scheduler==0.10.0
scrapy==2.3.0
gevent==20.9.0
hiredis==1.1.0
newrelic==6.0.1.155
redisearch==2.0.0
requests==2.7.0
idna<3
--no-binary falcon falcon==2.0.0
redisearch==2.0.0
74 changes: 64 additions & 10 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
#
# pip-compile --output-file=requirements.txt requirements.in
#
--no-binary falcon

aioredis==2.0.0a1
# via -r requirements.in
async-timeout==3.0.1
# via aioredis
attrs==20.3.0
# via
# automat
Expand All @@ -15,14 +17,19 @@ automat==20.2.0
# via twisted
beautifulsoup4==4.9.1
# via -r requirements.in
certifi==2020.12.5
# via httpx
cffi==1.14.5
# via cryptography
click==7.1.2
# via
# -r requirements.in
# rq
# uvicorn
constantly==15.1.0
# via twisted
croniter==1.0.13
# via rq-scheduler
cryptography==3.4.6
# via
# pyopenssl
Expand All @@ -32,28 +39,39 @@ cssselect==1.1.0
# via
# parsel
# scrapy
cython==0.29.21
# via -r requirements.in
falcon-cors==1.1.7
fastapi-cprofile==0.0.2
# via -r requirements.in
falcon==2.0.0
# via falcon-cors
fastapi==0.63.0
# via
# -r requirements.in
# fastapi-cprofile
gevent==20.9.0
# via -r requirements.in
greenlet==1.0.0
# via gevent
gunicorn==20.0.4
# via -r requirements.in
h11==0.12.0
# via
# httpcore
# uvicorn
hiredis==1.1.0
# via
# -r requirements.in
# redisearch
httpcore==0.13.3
# via httpx
httptools==0.1.2
# via uvicorn
httpx==0.18.1
# via -r requirements.in
hyperlink==21.0.0
# via twisted
idna==2.10
# via
# -r requirements.in
# hyperlink
# rfc3986
incremental==21.3.0
# via twisted
itemadapter==0.2.0
Expand Down Expand Up @@ -84,26 +102,41 @@ pyasn1==0.4.8
# service-identity
pycparser==2.20
# via cffi
pydantic==1.8.1
# via fastapi
pydispatcher==2.0.5
# via scrapy
pyopenssl==20.0.1
# via scrapy
python-dateutil==2.8.1
# via croniter
python-dotenv==0.14.0
# via -r requirements.in
# via
# -r requirements.in
# uvicorn
pyyaml==5.4.1
# via uvicorn
queuelib==1.5.0
# via scrapy
redis==3.5.3
# via
# -r requirements.in
# redisearch
# rmtest
# rq
redisearch==2.0.0
# via -r requirements.in
requests==2.7.0
# via -r requirements.in
rfc3986[idna2008]==1.4.0
# via httpx
rmtest==0.7.0
# via redisearch
rq==1.7.0
rq-scheduler==0.10.0
# via -r requirements.in
rq==1.7.0
# via
# -r requirements.in
# rq-scheduler
scrapy==2.3.0
# via -r requirements.in
service-identity==18.1.0
Expand All @@ -114,17 +147,38 @@ six==1.15.0
# parsel
# protego
# pyopenssl
# python-dateutil
# redisearch
# w3lib
sniffio==1.2.0
# via
# httpcore
# httpx
soupsieve==2.2.1
# via beautifulsoup4
starlette==0.13.6
# via fastapi
twisted==21.2.0
# via scrapy
typing-extensions==3.10.0.0
# via
# aioredis
# pydantic
ujson==4.0.2
# via -r requirements.in
uvicorn[standard]==0.13.4
# via -r requirements.in
uvloop==0.15.2
# via uvicorn
w3lib==1.22.0
# via
# itemloaders
# parsel
# scrapy
watchgod==0.7
# via uvicorn
websockets==8.1
# via uvicorn
zope.event==4.5.0
# via gevent
zope.interface==5.3.0
Expand Down
34 changes: 20 additions & 14 deletions sitesearch/api/app.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,34 @@
import falcon
from falcon_cors import CORS
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware

from sitesearch.config import AppConfiguration
from .search import SearchResource
from .indexer import IndexerResource
from .job import JobResource
from .health import HealthCheckResource
from sitesearch.api import search, indexer, health, job


def create_app(config=None):
config = config or AppConfiguration()

cors = CORS(allow_origins_list=[
origins = [
'https://docs.redislabs.com',
'https://developer.redislabs.com',
'http://localhost:3000',
'http://localhost:1313',
'http://localhost:8000',
], allow_all_headers=True)
]

api = falcon.API(middleware=[cors.middleware])
api.add_route('/search', SearchResource(config))
api.add_route('/indexer', IndexerResource(config))
api.add_route('/jobs/{job_id}', JobResource(config))
api.add_route('/health', HealthCheckResource(config))
app = FastAPI()

return api
app.add_middleware(
CORSMiddleware,
allow_origins=origins,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

app.include_router(search.router)
app.include_router(indexer.router)
app.include_router(health.router)
app.include_router(job.router)

return app
20 changes: 20 additions & 0 deletions sitesearch/api/authentication.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import os
import logging

from fastapi import Security, HTTPException, status
from fastapi.security.api_key import APIKeyHeader

API_KEY = os.environ['API_KEY']
API_KEY_NAME = "X-API-KEY"

api_key_header_auth = APIKeyHeader(name=API_KEY_NAME, auto_error=True)
log = logging.getLogger(__name__)


async def get_api_key(api_key_header: str = Security(api_key_header_auth)):
if api_key_header != API_KEY:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid API Key",
)
return api_key_header
Loading