Skip to content

Commit

Permalink
bug 803209 - code review fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
selenamarie committed Jun 12, 2013
1 parent 459939e commit 6c0e607
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 70 deletions.
7 changes: 5 additions & 2 deletions Makefile
Expand Up @@ -102,5 +102,8 @@ analysis:
rsync akela/target/*.jar analysis/
rsync -a socorro-toolbox/src/main/pig/ analysis/

json: virtualenv
if [ ! -f `pg_config --pkglibdir`/json_enhancements.so ]; then sudo ./socorro-virtualenv/bin/pgxn install json_enhancements ; fi
json_enhancements_pg_extension: virtualenv
# This is only run manually, as it is a one-time operation
# to be performed at system installation time, rather than
# every time Socorro is built
if [ ! -f `pg_config --pkglibdir`/json_enhancements.so ]; then sudo $(VIRTUALENV)/bin/pgxn install json_enhancements ; fi
16 changes: 4 additions & 12 deletions alembic/script.py.mako
Expand Up @@ -24,14 +24,10 @@ class CITEXT(types.UserDefinedType):
return 'CITEXT'

def bind_processor(self, dialect):
def process(value):
return value
return process
return lambda value: value

def result_processor(self, dialect, coltype):
def process(value):
return value
return process
return lambda value: value

def __repr__(self):
return "citext"
Expand All @@ -43,14 +39,10 @@ class JSON(types.UserDefinedType):
return 'JSON'

def bind_processor(self, dialect):
def process(value):
return value
return process
return lambda value: value

def result_processor(self, dialect, coltype):
def process(value):
return value
return process
return lambda value: value

def __repr__(self):
return "json"
Expand Down
20 changes: 6 additions & 14 deletions alembic/versions/2b285e76f71d_bug_803209_add_garag.py
Expand Up @@ -25,14 +25,10 @@ def get_col_spec(self):
return 'CITEXT'

def bind_processor(self, dialect):
def process(value):
return value
return process
return lambda value: value

def result_processor(self, dialect, coltype):
def process(value):
return value
return process
return lambda value: value

def __repr__(self):
return "citext"
Expand All @@ -44,14 +40,10 @@ def get_col_spec(self):
return 'JSON'

def bind_processor(self, dialect):
def process(value):
return value
return process
return lambda value: value

def result_processor(self, dialect, coltype):
def process(value):
return value
return process
return lambda value: value

def __repr__(self):
return "json"
Expand All @@ -65,8 +57,8 @@ def upgrade():
'update_tcbs.sql'
]
for myfile in [app_path + '/socorro/external/postgresql/raw_sql/procs/' + line for line in procs]:
proc = open(myfile, 'r').read()
op.execute(proc)
with open(myfile, 'r') as file:
op.execute(file.read())

def downgrade():
op.drop_column(u'tcbs_build', u'is_gc_count')
Expand Down
2 changes: 1 addition & 1 deletion docs/installation.rst
Expand Up @@ -180,7 +180,7 @@ Install json_extensions for use with PostgreSQL
```````````````````````````````````````````````
From inside the Socorro checkout
::
make json
make json_enhancements_pg_extension

Run unit/functional tests
````````````
Expand Down
8 changes: 4 additions & 4 deletions socorro/external/postgresql/fakedata.py
Expand Up @@ -12,7 +12,7 @@
import csv
import os

CRASHIDS = []
crash_ids = []

def date_range(start_date, end_date, delta=None):
if delta is None:
Expand Down Expand Up @@ -335,7 +335,7 @@ def generate_crashid(self, timestamp):
depth = 0
final_crashid = "%s%d%02d%02d%02d" % (crashid[:-7], depth, timestamp.year % 100,
timestamp.month, timestamp.day)
CRASHIDS.append( (final_crashid, timestamp) )
crash_ids.append( (final_crashid, timestamp) )
return final_crashid

def buildid(self, fragment, format='%Y%m%d', days=None):
Expand Down Expand Up @@ -684,8 +684,8 @@ class RawCrashes(BaseTable):
columns = ['uuid', 'raw_crash', 'date_processed']

def generate_rows(self):
for crashid, date_processed, in CRASHIDS:
raw_crash = '{ "uuid": "%s", "IsGarbageCollecting": "1" }'
for crashid, date_processed, in crash_ids:
raw_crash = '{ "uuid": "%s", "IsGarbageCollecting": "1" }' % crashid
row = [crashid, raw_crash, date_processed]
yield row

Expand Down
70 changes: 33 additions & 37 deletions socorro/external/postgresql/raw_sql/procs/update_tcbs.sql
Expand Up @@ -11,31 +11,31 @@ BEGIN
-- check that it hasn't already been run

IF checkdata THEN
PERFORM 1 FROM tcbs
WHERE report_date = updateday LIMIT 1;
IF FOUND THEN
RAISE NOTICE 'TCBS has already been run for the day %.',updateday;
RETURN FALSE;
END IF;
PERFORM 1 FROM tcbs
WHERE report_date = updateday LIMIT 1;
IF FOUND THEN
RAISE NOTICE 'TCBS has already been run for the day %.',updateday;
RETURN FALSE;
END IF;
END IF;

-- check if reports_clean is complete
IF NOT reports_clean_done(updateday, check_period) THEN
IF checkdata THEN
RAISE NOTICE 'Reports_clean has not been updated to the end of %',updateday;
IF checkdata THEN
RAISE NOTICE 'Reports_clean has not been updated to the end of %',updateday;
RETURN FALSE;
ELSE
RETURN FALSE;
ELSE
RETURN FALSE;
END IF;
END IF;
END IF;

-- populate the matview for regular releases

INSERT INTO tcbs (
signature_id, report_date, product_version_id,
process_type, release_channel,
report_count, win_count, mac_count, lin_count, hang_count,
startup_count, is_gc_count
signature_id, report_date, product_version_id,
process_type, release_channel,
report_count, win_count, mac_count, lin_count, hang_count,
startup_count, is_gc_count
)
WITH raw_crash_filtered AS (
SELECT
Expand All @@ -48,36 +48,32 @@ WITH raw_crash_filtered AS (
)
SELECT signature_id
, updateday
, product_version_id
, process_type
, product_version_id
, process_type
, release_channel
, count(*)
, sum(case when os_name = 'Windows' THEN 1 else 0 END)
, sum(case when os_name = 'Mac OS X' THEN 1 else 0 END)
, sum(case when os_name = 'Linux' THEN 1 else 0 END)
, count(*)
, sum(case when os_name = 'Windows' THEN 1 else 0 END)
, sum(case when os_name = 'Mac OS X' THEN 1 else 0 END)
, sum(case when os_name = 'Linux' THEN 1 else 0 END)
, count(hang_id)
, sum(case when uptime < INTERVAL '1 minute' THEN 1 else 0 END)
, sum(CASE WHEN r.is_garbage_collecting = '1' THEN 1 ELSE 0 END) as gc_count
FROM reports_clean
JOIN product_versions USING (product_version_id)
JOIN product_versions USING (product_version_id)
JOIN signatures USING (signature_id)
JOIN raw_crash_filtered r ON r.uuid::text = reports_clean.uuid
LEFT JOIN raw_crash_filtered r ON r.uuid::text = reports_clean.uuid
WHERE utc_day_is(date_processed, updateday)
AND tstz_between(date_processed, build_date, sunset_date)
AND tstz_between(date_processed, build_date, sunset_date)
GROUP BY signature_id, updateday, product_version_id,
process_type, release_channel;


RAISE WARNING 'got here';
RETURN TRUE;
process_type, release_channel;

-- populate summary statistics for rapid beta parent records

INSERT INTO tcbs (
signature_id, report_date, product_version_id,
process_type, release_channel,
report_count, win_count, mac_count, lin_count, hang_count,
startup_count, is_gc_count )
signature_id, report_date, product_version_id,
process_type, release_channel,
report_count, win_count, mac_count, lin_count, hang_count,
startup_count, is_gc_count )
SELECT signature_id
, updateday
, rapid_beta_id
Expand All @@ -91,12 +87,12 @@ SELECT signature_id
, sum(startup_count)
, sum(is_gc_count)
FROM tcbs
JOIN product_versions USING (product_version_id)
JOIN product_versions USING (product_version_id)
WHERE report_date = updateday
AND build_type = 'beta'
AND rapid_beta_id is not null
AND build_type = 'beta'
AND rapid_beta_id is not null
GROUP BY signature_id, updateday, rapid_beta_id,
process_type, release_channel;
process_type, release_channel;

-- tcbs_ranking removed until it's being used

Expand Down

0 comments on commit 6c0e607

Please sign in to comment.