Skip to content
This repository has been archived by the owner on Nov 10, 2021. It is now read-only.

Commit

Permalink
Merge pull request #3 from UWIT-IAM/develop
Browse files Browse the repository at this point in the history
Resync with uw-it-aca/uw-restclients master.
  • Loading branch information
jeffFranklin committed Mar 15, 2016
2 parents 465258a + 638486e commit 9e8ac2c
Show file tree
Hide file tree
Showing 200 changed files with 6,232 additions and 1,447 deletions.
14 changes: 12 additions & 2 deletions .travis.yml
@@ -1,16 +1,26 @@
sudo: false
language: python
services:
- memcached
python:
- "2.7"
install:
- if [[ $TRAVIS_PYTHON_VERSION == 2.6* ]]; then pip install -r travis-ci/python-2.6-require-django-1.6.txt; fi
- pip install -r restclients/requirements.txt
env:
- DJANGO_VERSION='<1.8'
- DJANGO_VERSION='<1.10'
before_script:
- pip install coverage
- pip install python-coveralls
- pip install pep8
- if [[ $TRAVIS_PYTHON_VERSION != 2.6* ]]; then pip install -U "Django$DJANGO_VERSION"; fi
- cp travis-ci/manage.py manage.py
- python manage.py syncdb --noinput
- if [[ $TRAVIS_PYTHON_VERSION != 2.6* ]]; then python manage.py migrate --noinput; fi
- if [[ $TRAVIS_PYTHON_VERSION == 2.6* ]]; then python manage.py syncdb --noinput; fi
script:
- pep8 --exclude=migrations,restclients/sws/,restclients/canvas/,restclients/util/,restclients/trumba/,restclients/dao_implementation,restclients/uwnetid/,restclients/test/,restclients/signals/,restclients/models/,restclients/hfs/,restclients/r25,restclients/nws.py,restclients/thread.py,restclients/pws.py,restclients/bookstore.py,restclients/exceptions.py,restclients/views.py,restclients/gws.py,restclients/dao.py,restclients/cache_manager.py,restclients/amazon_sqs.py,restclients/cache_implementation.py,restclients/sms.py,restclients/urls.py,restclients/library/,restclients/irws.py restclients/
- pep8 --exclude=migrations,restclients/sws/,restclients/canvas/,restclients/uwnetid/,restclients/test/,restclients/signals/,restclients/hfs/,restclients/r25,restclients/nws.py,restclients/sms.py,restclients/library/ restclients/
- python -m compileall restclients/
- coverage run --source=restclients manage.py test restclients
after_script:
- coveralls
1 change: 1 addition & 0 deletions restclients/amazon_sqs.py
Expand Up @@ -5,6 +5,7 @@
from boto.sqs.message import RawMessage
from django.conf import settings


class AmazonSQS(object):
"""
The AmazonSQS class has methods for getting/creating queues.
Expand Down
89 changes: 56 additions & 33 deletions restclients/bookstore.py
Expand Up @@ -9,33 +9,32 @@
import re


BOOK_PREFIX = "http://uw-seattle.verbacompare.com/m?section_id="


class Bookstore(object):
"""
Get book information for courses.
"""

def get_books_for_schedule(self, schedule):
"""
Returns a dictionary of data. SLNs are the keys, an array of Book
objects are the values.
"""
def get_books_by_quarter_sln(self, quarter, sln):
dao = Book_DAO()

url = self.get_books_url(schedule)

sln_string = self._get_sln_string(sln)
url = "/myuw/myuw_mobile_beta.ubs?quarter=%s&%s" % (
quarter,
sln_string,
)
response = dao.getURL(url, {"Accept": "application/json"})
if response.status != 200:
raise DataFailureException(url, response.status, response.data)

data = json.loads(response.data)

response = {}
books = []

for section in schedule.sections:
response[section.sln] = []
try:
sln_data = data[section.sln]
for book_data in sln_data:
sln_data = data[str(sln)]

if len(sln_data) > 0:
for book_data in sln_data:
book = Book()
book.isbn = book_data["isbn"]
book.title = book_data["title"]
Expand All @@ -51,12 +50,28 @@ def get_books_for_schedule(self, schedule):
author.name = author_data["name"]
book.authors.append(author)

response[section.sln].append(book)
except KeyError as err:
#do nothing if bookstore has no record of book
pass
books.append(book)
return books

return response
def get_books_for_schedule(self, schedule):
"""
Returns a dictionary of data. SLNs are the keys, an array of Book
objects are the values.
"""
slns = self._get_slns(schedule)

books = {}

for sln in slns:
try:
section_books = self.get_books_by_quarter_sln(
schedule.term.quarter, sln
)
books[sln] = section_books
except DataFailureException:
# do nothing if bookstore doesn't have sln
pass
return books

def get_verba_link_for_schedule(self, schedule):
"""
Expand All @@ -76,26 +91,34 @@ def get_verba_link_for_schedule(self, schedule):

for key in data:
if re.match(r'^[A-Z]{2}[0-9]{5}$', key):
return "http://uw-seattle.verbacompare.com/m?section_id=%s&quarter=%s" % (key, schedule.term.quarter)

def get_books_url(self, schedule):
sln_string = self._get_slns_string(schedule)
url = "/myuw/myuw_mobile_beta.ubs?quarter=%s&%s" % (
schedule.term.quarter,
sln_string,
)

return url
return "%s%s&quarter=%s" % (BOOK_PREFIX,
key,
schedule.term.quarter)

def get_verba_url(self, schedule):
sln_string = self._get_slns_string(schedule)
url = "/myuw/myuw_mobile_v.ubs?quarter=%s&%s" % (
schedule.term.quarter,
sln_string,
)
schedule.term.quarter,
sln_string,
)

return url

def _get_sln_string(self, sln):
return "sln1=%s" % sln

def _get_slns(self, schedule):
slns = []
# Prevent dupes - mainly for mock data
seen_slns = {}
for section in schedule.sections:
sln = section.sln
if sln not in seen_slns:
seen_slns[sln] = True
slns.append(sln)

return slns

def _get_slns_string(self, schedule):
slns = []
# Prevent dupes - mainly for mock data
Expand Down
89 changes: 88 additions & 1 deletion restclients/cache_implementation.py
Expand Up @@ -7,6 +7,13 @@
from datetime import datetime, timedelta
from django.utils.timezone import make_aware, get_current_timezone
from django.conf import settings
import json
import bmemcached
import logging
import threading


logger = logging.getLogger(__name__)


class NoCache(object):
Expand All @@ -28,7 +35,8 @@ class TimedCache(object):
def _response_from_cache(self, service, url, headers, max_age_in_seconds,
max_error_age=60 * 5):

# If max_age_in_seconds is 0, make sure we don't get a hit from this same second.
# If max_age_in_seconds is 0,
# make sure we don't get a hit from this same second.
if not max_age_in_seconds:
return None
now = make_aware(datetime.now(), get_current_timezone())
Expand Down Expand Up @@ -183,3 +191,82 @@ def processResponse(self, service, url, response):
store_cache_entry(cache_entry)

return


class MemcachedCache(object):
"""
Cache resources in memcached.
"""
client = None

def getCache(self, service, url, headers):
client = self._get_client()
key = self._get_key(service, url)
try:
data = client.get(key)
except bmemcached.exceptions.MemcachedException as ex:
logger.warning("MemCached Err on get with key '%s' ==> '%s'",
key, str(ex))
return

if not data:
return

values = json.loads(data)
response = MockHTTP()
response.status = values["status"]
response.data = values["data"]
response.headers = values["headers"]

return {"response": response}

def processResponse(self, service, url, response):
if response.status != 200:
# don't cache errors, at least for now...
return

header_data = {}
for header in response.headers:
header_data[header] = response.getheader(header)

data = json.dumps({"status": response.status,
"data": response.data,
"headers": header_data})

time_to_store = self.get_cache_expiration_time(service, url)
key = self._get_key(service, url)

client = self._get_client()
try:
client.set(key, data, time=time_to_store)
logger.info("MemCached set with key '%s', %d seconds",
key, time_to_store)
except bmemcached.exceptions.MemcachedException as ex:
logger.warning("MemCached Err on set with key '%s' ==> '%s'",
key, str(ex))
return

def get_cache_expiration_time(self, service, url):
# Over-ride this to define your own.
return 60 * 60 * 4

def _get_key(self, service, url):
return "%s-%s" % (service, url)

def _get_client(self):
thread_id = threading.current_thread().ident
if not hasattr(MemcachedCache, "_memcached_cache"):
MemcachedCache._memcached_cache = {}

if thread_id in MemcachedCache._memcached_cache:
return MemcachedCache._memcached_cache[thread_id]

servers = settings.RESTCLIENTS_MEMCACHED_SERVERS
username = getattr(settings, "RESTCLIENTS_MEMCACHED_USER", None)
password = getattr(settings, "RESTCLIENTS_MEMCACHED_PASS", None)

client = bmemcached.Client(servers, username, password)

MemcachedCache._memcached_cache[thread_id] = client

return client
10 changes: 8 additions & 2 deletions restclients/cache_manager.py
Expand Up @@ -4,9 +4,12 @@
innodb gap locks from deadlocking sequential inserts.
"""

from django.db import IntegrityError


__manage_bulk_inserts = False
__bulk_insert_queue = []
from django.db import IntegrityError


def store_cache_entry(entry):
global __manage_bulk_inserts
Expand All @@ -18,6 +21,7 @@ def store_cache_entry(entry):
else:
entry.save()


def save_all_queued_entries():
global __bulk_insert_queue

Expand All @@ -26,18 +30,20 @@ def save_all_queued_entries():

try:
for entry in __bulk_insert_queue:
if not entry.url in seen_urls:
if entry.url not in seen_urls:
entry.save()
seen_urls[entry.url] = True
except Exception as ex:
print "Error bulk saving cache entries: ", ex

__bulk_insert_queue = []


def enable_cache_entry_queueing():
global __manage_bulk_inserts
__manage_bulk_inserts = True


def disable_cache_entry_queueing():
global __manage_bulk_inserts
__manage_bulk_inserts = False
Expand Down

0 comments on commit 9e8ac2c

Please sign in to comment.