Skip to content
This repository has been archived by the owner on Dec 15, 2018. It is now read-only.

Commit

Permalink
Increase default of max artifact size limit and make it configurable.…
Browse files Browse the repository at this point in the history
… Set a tighter default for analytics json handler.

Reviewers: andrewhe, anupc

Reviewed By: anupc

Subscribers: stacey, changesbot, kylec

Differential Revision: https://tails.corp.dropbox.com/D221214
  • Loading branch information
paulruan committed Aug 18, 2016
1 parent 0245707 commit 9189b8c
Show file tree
Hide file tree
Showing 5 changed files with 29 additions and 21 deletions.
4 changes: 4 additions & 0 deletions changes/artifacts/analytics_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ class AnalyticsJsonHandler(ArtifactHandler):
"""
FILENAMES = ('CHANGES_ANALYTICS.json', '*.CHANGES_ANALYTICS.json')

def __init__(self, step):
super(AnalyticsJsonHandler, self).__init__(step)
self.max_artifact_bytes = current_app.config['MAX_ARTIFACT_BYTES_ANALYTICS_JSON']

def process(self, fp, artifact):
allowed_tables = current_app.config.get('ANALYTICS_PROJECT_TABLES', [])
try:
Expand Down
4 changes: 3 additions & 1 deletion changes/artifacts/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import logging
import os

from flask import current_app

from changes.config import db
from changes.db.utils import try_create
from changes.models.failurereason import FailureReason
Expand All @@ -17,11 +19,11 @@ class ArtifactParseError(Exception):

class ArtifactHandler(object):
FILENAMES = ()
MAX_ARTIFACT_BYTES = 50 * 1024 * 1024
logger = logging.getLogger('artifacts')

def __init__(self, step):
self.step = step
self.max_artifact_bytes = current_app.config['MAX_ARTIFACT_BYTES']

@staticmethod
def _sanitize_path(artifact_path):
Expand Down
2 changes: 1 addition & 1 deletion changes/artifacts/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ def process(self, artifact, fp=None):
if cls.can_process(artifact_name):
handler = cls(step)
size = artifact.file.get_size()
if size > cls.MAX_ARTIFACT_BYTES:
if size > handler.max_artifact_bytes:
handler.report_malformed()
continue
if not fp:
Expand Down
5 changes: 5 additions & 0 deletions changes/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,11 @@ def create_app(_read_config=True, **config):
# without being overridden. This value is referenced in test code.
app.config['ARTIFACTS_SERVER'] = 'http://localhost:1234'

# The default max artifact size handlers should be capable of processing.
app.config['MAX_ARTIFACT_BYTES'] = 200 * 1024 * 1024
# The max artifact size the analytics json handler should be capable of processing.
app.config['MAX_ARTIFACT_BYTES_ANALYTICS_JSON'] = 70 * 1024 * 1024

# the binary to use for running changes-client. Default is just
# "changes-client", but can also be specified as e.g. a full path.
app.config['CHANGES_CLIENT_BINARY'] = 'changes-client'
Expand Down
35 changes: 16 additions & 19 deletions tests/changes/artifacts/test_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@ class ManagerTest(TestCase):
def test_process_behavior(self, process):
class _CovHandler(ArtifactHandler):
FILENAMES = ('coverage.xml',)
MAX_ARTIFACT_SIZE = 200

class _OtherHandler(ArtifactHandler):
FILENAMES = ('/other.xml',)
MAX_ARTIFACT_SIZE = 200

manager = Manager([_CovHandler, _OtherHandler])
jobstep = self.create_any_jobstep()
Expand Down Expand Up @@ -70,39 +68,38 @@ class _OtherHandler(ArtifactHandler):
@mock.patch.object(ArtifactHandler, 'process')
@mock.patch.object(ArtifactHandler, 'report_malformed')
def test_process_too_large(self, report_malformed, process):
max_artifact_bytes = 500

# artifact name => [size, expected to be processed]
file_sizes = {
'large/size.xml': 1000,
'medium/size.xml': 500,
'small/size.xml': 10,
'very_large/size.xml': (max_artifact_bytes * 2, False),
'large/size.xml': (max_artifact_bytes, True),
'small/size.xml': (max_artifact_bytes / 2, True),
}

class _Handler(ArtifactHandler):
FILENAMES = ('size.xml',)
MAX_ARTIFACT_BYTES = 500

def __init__(self, step):
super(_Handler, self).__init__(step)
self.max_artifact_bytes = max_artifact_bytes

manager = Manager([_Handler])
jobstep = self.create_any_jobstep()

for name, size in file_sizes.iteritems():
for name, (size, expected_process) in file_sizes.iteritems():
process.reset_mock()
report_malformed.reset_mock()
artifact = self.create_artifact(
step=jobstep,
name=name,
)
artifact.file.save(StringIO('a' * size), artifact.name)
if size > _Handler.MAX_ARTIFACT_BYTES:
manager.process(artifact)
assert not process.called, \
"Artifact %s should not have been processed" % (artifact.name,)
assert report_malformed.called, \
"Artifact %s should have been reported as malformed" % (artifact.name,)
else:
manager.process(artifact)
assert process.called, \
"Artifact %s should have been processed" % (artifact.name,)
assert not report_malformed.called, \
"Artifact %s should not have been reported as malformed" % (artifact.name,)
manager.process(artifact)
assert expected_process == process.called, \
"Incorrectly handled %s." % (artifact.name,)
assert expected_process != report_malformed.called, \
"Incorrectly handled %s." % (artifact.name,)

def test_can_process(self):
class _CovHandler(ArtifactHandler):
Expand Down

0 comments on commit 9189b8c

Please sign in to comment.