Skip to content

Commit

Permalink
Merge branch 'master' into 111+master
Browse files Browse the repository at this point in the history
  • Loading branch information
Nick Pellegrino committed Oct 25, 2017
2 parents effa1ed + 2f114a9 commit cf879c1
Show file tree
Hide file tree
Showing 23 changed files with 106 additions and 59 deletions.
3 changes: 2 additions & 1 deletion corehq/apps/domain/auth.py
Expand Up @@ -7,7 +7,7 @@
from django.http import HttpResponse
from tastypie.authentication import ApiKeyAuthentication
from corehq.toggles import ANONYMOUS_WEB_APPS_USAGE

from corehq.util.string_utils import ensure_unicode

J2ME = 'j2me'
ANDROID = 'android'
Expand Down Expand Up @@ -85,6 +85,7 @@ def basicauth(realm=''):
def real_decorator(view):
def wrapper(request, *args, **kwargs):
uname, passwd = get_username_and_password_from_request(request)
uname, passwd = ensure_unicode(uname), ensure_unicode(passwd)
if uname and passwd:
user = authenticate(username=uname, password=passwd)
if user is not None and user.is_active:
Expand Down
2 changes: 1 addition & 1 deletion corehq/apps/hqadmin/views.py
Expand Up @@ -725,7 +725,7 @@ def web_user_lookup(request):
web_user = WebUser.get_by_username(web_user_email)
if web_user is None:
messages.error(
request, "Sorry, no user found with email {}. Did you enter it correctly?".format(web_user_email)
request, u"Sorry, no user found with email {}. Did you enter it correctly?".format(web_user_email)
)
else:
from django_otp import user_has_device
Expand Down
6 changes: 3 additions & 3 deletions corehq/apps/reports/static/reports/js/saved_reports.js
Expand Up @@ -37,13 +37,13 @@ var ReportConfig = function (data) {
self.unwrap = function () {
var data = ko.mapping.toJS(self);
var standardHQReport = hqImport("reports/js/standard_hq_report").getStandardHQReport();
if (null !== standardHQReport.slug) {
if (standardHQReport.slug) {
data['report_slug'] = standardHQReport.slug;
}
if ("" !== standardHQReport.type) {
if (standardHQReport.type) {
data['report_type'] = standardHQReport.type;
}
if ("" !== standardHQReport.subReportSlug) {
if (standardHQReport.subReportSlug) {
data['subreport_slug'] = standardHQReport.subReportSlug;
}
return data;
Expand Down
2 changes: 1 addition & 1 deletion corehq/apps/reports/tasks.py
Expand Up @@ -296,7 +296,7 @@ def build_form_multimedia_zip(
_, fpath = tempfile.mkstemp()

_write_attachments_to_file(fpath, use_transfer, num_forms, forms_info, case_id_to_name)
filename = zip_name.append('.zip')
filename = u"{}.zip".format(zip_name)
expose_download(use_transfer, fpath, filename, download_id, 'zip')
DownloadBase.set_progress(build_form_multimedia_zip, num_forms, num_forms)

Expand Down
2 changes: 1 addition & 1 deletion corehq/apps/userreports/tasks.py
Expand Up @@ -433,7 +433,7 @@ def _indicator_metrics(date_created=None):
@task
def export_ucr_async(export_table, download_id, title, user):
use_transfer = settings.SHARED_DRIVE_CONF.transfer_enabled
filename = '{}.xlsx'.format(title)
filename = u'{}.xlsx'.format(title)
file_path = get_download_file_path(use_transfer, filename)
export_from_tables(export_table, file_path, Format.XLS_2007)
expose_download(use_transfer, file_path, filename, download_id, 'xlsx')
Expand Down
2 changes: 2 additions & 0 deletions corehq/apps/users/models.py
Expand Up @@ -12,6 +12,7 @@
from django.db import models
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _, override as override_language, ugettext_noop
from casexml.apps.phone.restore_caching import get_loadtest_factor_for_user
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.domain.dbaccessors import get_docs_in_domain_by_class
from corehq.apps.users.landing_pages import ALL_LANDING_PAGES
Expand Down Expand Up @@ -1535,6 +1536,7 @@ def _is_demo_user_cached_value_is_stale(self):
def clear_quickcache_for_user(self):
from corehq.apps.users.dbaccessors.all_commcare_users import get_practice_mode_mobile_workers
self.get_usercase_id.clear(self)
get_loadtest_factor_for_user.clear(self.domain, self.user_id)

if self._is_demo_user_cached_value_is_stale():
get_practice_mode_mobile_workers.clear(self.domain)
Expand Down
18 changes: 16 additions & 2 deletions corehq/ex-submodules/casexml/apps/phone/restore_caching.py
@@ -1,6 +1,8 @@
import hashlib
import logging
from casexml.apps.phone.const import RESTORE_CACHE_KEY_PREFIX, ASYNC_RESTORE_CACHE_KEY_PREFIX
from corehq.toggles import ENABLE_LOADTEST_USERS
from corehq.util.quickcache import quickcache
from dimagi.utils.couch.cache.cache_core import get_redis_default_cache

logger = logging.getLogger(__name__)
Expand All @@ -26,6 +28,16 @@ def invalidate(self):
get_redis_default_cache().delete(self.cache_key)


@quickcache(['domain', 'user_id'], timeout=24 * 60 * 60)
def get_loadtest_factor_for_user(domain, user_id):
from corehq.apps.users.models import CouchUser, CommCareUser
if ENABLE_LOADTEST_USERS.enabled(domain) and user_id:
user = CouchUser.get_by_user_id(user_id, domain=domain)
if isinstance(user, CommCareUser):
return user.loadtest_factor or 1
return 1


class RestorePayloadPathCache(_CacheAccessor):
timeout = 24 * 60 * 60

Expand All @@ -36,12 +48,13 @@ def __init__(self, domain, user_id, sync_log_id, device_id):
@staticmethod
def _make_cache_key(domain, user_id, sync_log_id, device_id):
# to invalidate all restore cache keys, increment the number below
hashable_key = '0,{prefix},{domain},{user},{sync_log_id},{device_id}'.format(
hashable_key = '0,{prefix},{domain},{user},{sync_log_id},{device_id},{loadtest_factor}'.format(
domain=domain,
prefix=RESTORE_CACHE_KEY_PREFIX,
user=user_id,
sync_log_id=sync_log_id or '',
device_id=device_id or '',
loadtest_factor=get_loadtest_factor_for_user(domain, user_id),
)
return hashlib.md5(hashable_key).hexdigest()

Expand All @@ -56,11 +69,12 @@ def __init__(self, domain, user_id, sync_log_id, device_id):
@staticmethod
def _make_cache_key(domain, user_id, sync_log_id, device_id):
# to invalidate all restore cache keys, increment the number below
hashable_key = '0,{prefix},{domain},{user},{sync_log_id},{device_id}'.format(
hashable_key = '0,{prefix},{domain},{user},{sync_log_id},{device_id},{loadtest_factor}'.format(
domain=domain,
prefix=ASYNC_RESTORE_CACHE_KEY_PREFIX,
user=user_id,
sync_log_id=sync_log_id or '',
device_id=device_id or '',
loadtest_factor=get_loadtest_factor_for_user(domain, user_id),
)
return hashlib.md5(hashable_key).hexdigest()
6 changes: 6 additions & 0 deletions corehq/ex-submodules/dimagi/utils/django/email.py
Expand Up @@ -26,9 +26,15 @@ def send_HTML_email(subject, recipient, html_content, text_content=None,

recipient = list(recipient) if not isinstance(recipient, basestring) else [recipient]

if not isinstance(html_content, unicode):
html_content = html_content.decode('utf-8')

if not text_content:
text_content = getattr(settings, 'NO_HTML_EMAIL_MESSAGE',
NO_HTML_EMAIL_MESSAGE)
elif not isinstance(text_content, unicode):
text_content = text_content.decode('utf-8')


if ga_track and settings.ANALYTICS_IDS.get('GOOGLE_ANALYTICS_API_ID'):
ga_data = {
Expand Down
15 changes: 10 additions & 5 deletions corehq/ex-submodules/pillow_retry/tests.py
Expand Up @@ -10,6 +10,7 @@
from pillow_retry.tasks import process_pillow_retry
from pillowtop import get_all_pillow_configs
from pillowtop.checkpoints.manager import PillowCheckpoint
from pillowtop.dao.exceptions import DocumentMissingError
from pillowtop.feed.couch import change_from_couch_row
from pillowtop.feed.interface import Change, ChangeMeta
from pillowtop.feed.mock import RandomChangeFeed
Expand Down Expand Up @@ -298,17 +299,22 @@ def test_all_pillows_handle_errors(self):

def _test_error_logging_for_pillow(self, pillow_config):
pillow = _pillow_instance_from_config_with_mock_process_change(pillow_config)
if not pillow.retry_errors:
return

if pillow.retry_errors:
exc_class = Exception
exc_class_string = 'exceptions.Exception'
else:
exc_class = DocumentMissingError
exc_class_string = 'pillowtop.dao.exceptions.DocumentMissingError'

pillow.process_change = MagicMock(side_effect=exc_class(pillow.pillow_id))
doc = self._get_random_doc()
pillow.process_with_error_handling(Change(id=doc['id'], sequence_id='3', document=doc))

errors = PillowError.objects.filter(pillow=pillow.pillow_id).all()
self.assertEqual(1, len(errors), pillow_config)
error = errors[0]
self.assertEqual(error.doc_id, doc['id'], pillow_config)
self.assertEqual('exceptions.Exception', error.error_type)
self.assertEqual(exc_class_string, error.error_type)
self.assertIn(pillow.pillow_id, error.error_traceback)

def _get_random_doc(self):
Expand All @@ -327,7 +333,6 @@ def _pillow_instance_from_config_with_mock_process_change(pillow_config):
else:
instance = pillow_config.get_instance()

instance.process_change = MagicMock(side_effect=Exception(instance.pillow_id))
return instance


Expand Down
4 changes: 3 additions & 1 deletion corehq/ex-submodules/pillowtop/pillow/interface.py
Expand Up @@ -8,6 +8,7 @@
from dimagi.utils.logging import notify_exception
from kafka.common import TopicAndPartition
from pillowtop.const import CHECKPOINT_MIN_WAIT
from pillowtop.dao.exceptions import DocumentMissingError
from pillowtop.utils import force_seq_int
from pillowtop.exceptions import PillowtopCheckpointReset
from pillowtop.logger import pillow_logging
Expand Down Expand Up @@ -264,7 +265,8 @@ def fire_change_processed_event(self, change, context):
def handle_pillow_error(pillow, change, exception):
from pillow_retry.models import PillowError
error_id = None
if pillow.retry_errors:
# always retry document missing errors, because the error is likely with couch
if pillow.retry_errors or isinstance(exception, DocumentMissingError):
error = PillowError.get_or_create(change, pillow)
error.add_attempt(exception, sys.exc_info()[2])
error.save()
Expand Down
2 changes: 1 addition & 1 deletion corehq/ex-submodules/soil/views.py
Expand Up @@ -68,4 +68,4 @@ def retrieve_download(request, download_id, template="soil/file_download.html",
raise Http404
return download.toHttpResponse()

return render_to_response(template, context=context)
return render_to_response(template, context=context.flatten())
2 changes: 1 addition & 1 deletion corehq/motech/repeaters/const.py
Expand Up @@ -6,7 +6,7 @@
CHECK_REPEATERS_INTERVAL = timedelta(minutes=5)
CHECK_REPEATERS_KEY = 'check-repeaters-key'

POST_TIMEOUT = 45 # seconds
POST_TIMEOUT = 75 # seconds

RECORD_PENDING_STATE = 'PENDING'
RECORD_SUCCESS_STATE = 'SUCCESS'
Expand Down
8 changes: 1 addition & 7 deletions corehq/util/log.py
Expand Up @@ -304,10 +304,4 @@ def draw(position):

def send_HTML_email(subject, recipient, html_content, *args, **kwargs):
kwargs['ga_track'] = kwargs.get('ga_track', False) and analytics_enabled_for_email(recipient)
unicode_html_content = html_content if isinstance(html_content, unicode) else html_content.decode('utf-8')
text_content = kwargs.get('text_content')
if text_content is not None:
kwargs['text_content'] = (
text_content if isinstance(text_content, unicode) else text_content.decode('utf-8')
)
return _send_HTML_email(subject, recipient, unicode_html_content, *args, **kwargs)
return _send_HTML_email(subject, recipient, html_content, *args, **kwargs)
7 changes: 7 additions & 0 deletions corehq/util/string_utils.py
Expand Up @@ -5,3 +5,10 @@
def random_string(n=6):
# http://stackoverflow.com/a/23728630/835696
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(n))


def ensure_unicode(s):
if not s or isinstance(s, unicode):
return s
else:
return s.decode('utf-8')
32 changes: 16 additions & 16 deletions custom/enikshay/ucr/data_sources/episode_2b_v4.json
Expand Up @@ -567,7 +567,7 @@
"type": "conditional",
"test": {
"type": "boolean_expression",
"operator": "eq",
"operator": "in",
"expression": {
"value_expression": {
"datatype": "string",
Expand All @@ -582,24 +582,14 @@
"name": "occurrence_id"
}
},
"property_value": "Other"
"property_value": ["Other", "other"]
},
"expression_if_true": {
"separator": ", ",
"expressions": [
{
"value_expression": {
"datatype": "string",
"type": "property_name",
"property_name": "site_choice"
},
"type": "related_doc",
"related_doc_type": "CommCareCase",
"doc_id_expression": {
"datatype": null,
"type": "named",
"name": "occurrence_id"
}
"type": "constant",
"constant": "Other"
},
{
"value_expression": {
Expand Down Expand Up @@ -980,8 +970,18 @@
"name": "other_test"
},
"value_expression": {
"type": "property_name",
"property_name": "result_summary_display"
"type": "concatenate_strings",
"expressions": [
{
"type": "property_name",
"property_name": "test_type_label"
},
{
"type": "property_name",
"property_name": "result_summary_display"
}
],
"separator": ", "
}
},
"transform": {},
Expand Down
@@ -1,11 +1,9 @@
{
"domains": [
"enikshay-reports-qa",
"sheel-enikshay"
"enikshay"
],
"server_environment": [
"enikshay",
"softlayer"
"enikshay"
],
"config": {
"referenced_doc_type": "CommCareCase",
Expand All @@ -28,6 +26,15 @@
},
"type": "boolean_expression",
"property_value": "episode"
},
{
"operator": "eq",
"expression": {
"type": "property_name",
"property_name": "enrolled_in_private"
},
"type": "boolean_expression",
"property_value": "true"
}
]
},
Expand Down
@@ -1,17 +1,15 @@
{
"domains": [
"enikshay-reports-qa",
"sheel-enikshay"
"enikshay"
],
"server_environment": [
"enikshay",
"softlayer"
"enikshay"
],
"data_source_table": "episode_for_adherence_report",
"report_id": "adherence",
"config": {
"description": "",
"title": "Adherence (QA)",
"title": "Adherence",
"sort_expression": [],
"visible": false,
"configured_charts": [],
Expand Down
6 changes: 4 additions & 2 deletions custom/enikshay/ucr/tests/test_episode_2b.py
Expand Up @@ -743,6 +743,7 @@ def test_not_microbiological_result(self):
'date_tested': '2017-08-10',
'date_reported': '2017-08-10',
'test_type_value': 'cytopathology',
'test_type_label': 'Cytopathology',
'result_grade': 'result_grade',
'result_recorded': 'yes',
'result_summary_display': 'result_cytopathology'
Expand All @@ -767,6 +768,7 @@ def test_not_microbiological_result(self):
'test_requested_date': '2017-10-10',
'date_reported': '2017-10-10',
'test_type_value': 'other_clinical',
'test_type_label': 'Other clinical',
'result_summary_display': 'result_other_clinical'
}
]
Expand All @@ -787,7 +789,7 @@ def test_not_microbiological_result(self):
with mock.patch.object(SubcasesExpressionSpec, '__call__', lambda *args: subcases):
self.assertEqual(
not_microbiological_result_expression(episode_case, EvaluationContext(episode_case, 0)),
'result_cytopathology'
'Cytopathology, result_cytopathology'
)

def test_disease_classification_pulmonary(self):
Expand Down Expand Up @@ -894,7 +896,7 @@ def test_disease_classification_extra_pulmonary_site_choice_other(self):
'_id': 'occurrence_case_id',
'domain': 'enikshay-test',
'disease_classification': 'extra_pulmonary',
'site_choice': 'Other',
'site_choice': 'other',
'site_detail': 'test detail',
'indices': [
{'referenced_id': 'person_case_id'}
Expand Down
2 changes: 1 addition & 1 deletion custom/icds_reports/sqldata.py
Expand Up @@ -1330,7 +1330,7 @@ def columns(self):
percent,
[
SumColumn('cases_person_has_aadhaar'),
SumColumn('cases_person_beneficiary')
AliasColumn('cases_person_beneficiary')
],
slug='num_people_with_aadhar'
),
Expand Down

0 comments on commit cf879c1

Please sign in to comment.