Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixes Bug 716679 - removal of 'build_date' #372

Merged
merged 1 commit into from Feb 22, 2012
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 3 additions & 5 deletions socorro/database/schema.py
Expand Up @@ -307,7 +307,6 @@ def __init__ (self, logger, **kwargs):
os_name character varying(100),
os_version character varying(100),
email character varying(100),
build_date timestamp with time zone,
user_id character varying(50),
started_datetime timestamp with time zone,
completed_datetime timestamp with time zone,
Expand Down Expand Up @@ -345,10 +344,10 @@ def __init__ (self, logger, **kwargs):
CREATE INDEX %(partitionName)s_reason ON %(partitionName)s (reason);
"""
)
self.columns = ("uuid", "client_crash_date", "date_processed", "product", "version", "build", "url", "install_age", "last_crash", "uptime", "email", "build_date", "user_id", "user_comments", "app_notes", "distributor", "distributor_version", "topmost_filenames", "addons_checked", "flash_version", "hangid", "process_type", "release_channel")
self.columns = ("uuid", "client_crash_date", "date_processed", "product", "version", "build", "url", "install_age", "last_crash", "uptime", "email", "user_id", "user_comments", "app_notes", "distributor", "distributor_version", "topmost_filenames", "addons_checked", "flash_version", "hangid", "process_type", "release_channel")
self.insertSql = """insert into TABLENAME
(uuid, client_crash_date, date_processed, product, version, build, url, install_age, last_crash, uptime, email, build_date, user_id, user_comments, app_notes, distributor, distributor_version, topmost_filenames, addons_checked, flash_version, hangid, process_type, release_channel) values
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
(uuid, client_crash_date, date_processed, product, version, build, url, install_age, last_crash, uptime, email, user_id, user_comments, app_notes, distributor, distributor_version, topmost_filenames, addons_checked, flash_version, hangid, process_type, release_channel) values
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"""
#-----------------------------------------------------------------------------------------------------------------
def additionalCreationProcedures(self, databaseCursor):
pass
Expand Down Expand Up @@ -865,7 +864,6 @@ def __init__(self, logger, **kwargs):
# cpu_info TEXT, -- varchar(100)
# reason TEXT, -- varchar(255)
# address TEXT, -- varchar(20)
# build_date TIMESTAMP without time zone,
# started_datetime TIMESTAMP without time zone,
# completed_datetime TIMESTAMP without time zone,
# date_processed TIMESTAMP without time zone,
Expand Down
10 changes: 1 addition & 9 deletions socorro/processor/processor.py
Expand Up @@ -703,22 +703,14 @@ def insertReportIntoDatabase(self, threadLocalCursor, uuid, jsonDocument, date_p
logger.warning("no 'crash_time' calculated in %s: Using date_processed", uuid)
#sutil.reportExceptionAndContinue(logger, logging.WARNING)
processorErrorMessages.append("WARNING: No 'client_crash_date' could be determined from the Json file")
build_date = None
if buildID:
try:
build_date = datetime.datetime(*[int(x) for x in Processor.buildDatePattern.match(str(buildID)).groups()], tzinfo=UTC)
except (AttributeError, ValueError, KeyError):
logger.warning("no 'build_date' calculated in %s", uuid)
processorErrorMessages.append("WARNING: No 'build_date' could be determined from the Json file")
sutil.reportExceptionAndContinue(logger, logging.WARNING)
try:
last_crash = int(jsonDocument['SecondsSinceLastCrash'])
except:
last_crash = None

release_channel = jsonDocument.get('ReleaseChannel','unknown')

newReportRecordAsTuple = (uuid, crash_date, date_processed, product, version, buildID, url, install_age, last_crash, uptime, email, build_date, user_id, user_comments, app_notes, distributor, distributor_version,None,None,None,hangid,process_type,release_channel)
newReportRecordAsTuple = (uuid, crash_date, date_processed, product, version, buildID, url, install_age, last_crash, uptime, email, user_id, user_comments, app_notes, distributor, distributor_version,None,None,None,hangid,process_type,release_channel)
newReportRecordAsDict = dict(x for x in zip(self.reportsTable.columns, newReportRecordAsTuple))
if not product or not version:
msgTemplate = "Skipping report: Missing product&version: ["+", ".join(["%s:%%s"%x for x in self.reportsTable.columns])+"]"
Expand Down
12 changes: 6 additions & 6 deletions socorro/unittest/cron/testBugzilla.py
Expand Up @@ -29,12 +29,12 @@
def makeBogusReports (connection, cursor, logger):
# make some bogus data in the reports table
reportsTable = sch.ReportsTable(logger)
# ( uuid, client_crash_date, date_processed, product, version, build, url, install_age, last_crash, uptime, email, build_date, user_id, user_comments, app_notes, distributor, distributor_version, topmost_filenames, addons_checked, flash_version, hangid, process_type) values
fakeReportData = [ (( "uuid1", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "BogusClass::bogus_signature (const char**, void *)"),
(( "uuid2", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "js3250.dll@0x6cb96"),
(( "uuid3", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "libobjc.A.dylib@0x1568c"),
(( "uuid4", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "nanojit::LIns::isTramp()"),
(( "uuid5", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "libobjc.A.dylib@0x1568c"),
# ( uuid, client_crash_date, date_processed, product, version, build, url, install_age, last_crash, uptime, email, user_id, user_comments, app_notes, distributor, distributor_version, topmost_filenames, addons_checked, flash_version, hangid, process_type) values
fakeReportData = [ (( "uuid1", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "BogusClass::bogus_signature (const char**, void *)"),
(( "uuid2", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "js3250.dll@0x6cb96"),
(( "uuid3", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "libobjc.A.dylib@0x1568c"),
(( "uuid4", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "nanojit::LIns::isTramp()"),
(( "uuid5", None, dt.datetime(2009, 05, 04, tzinfo=UTC), "bogus", "1.0", "xxx", "http://cnn.com", 100, 14, 10, None, None, "bogus", "", "", ",", None, None, None, None, None, 'release'), "libobjc.A.dylib@0x1568c"),
]
try:
#altconn = psycopg2.connect(me.dsn)
Expand Down
6 changes: 3 additions & 3 deletions socorro/unittest/cron/testNamedCursor.py
Expand Up @@ -37,12 +37,12 @@ def addReportData(cursor, dataToAdd):
# dataToAdd is [{},...] for dictionaries of values as shown in sql below
sql = """INSERT INTO reports
(uuid, client_crash_date, date_processed, product, version, build, url, install_age, last_crash, uptime,
email, os_name, os_version, build_date,
email, os_name, os_version,
user_id, -- ignored (no longer collected)
user_comments,
app_notes, distributor, distributor_version) VALUES -- These are ignored for testing purposes
(%(uuid)s,%(client_crash_date)s,%(date_processed)s,%(product)s,%(version)s,%(build)s,%(url)s,%(install_age)s,%(last_crash)s,%(uptime)s,
%(email)s,%(os_name)s,%(os_version)s,%(build_date)s,
%(email)s,%(os_name)s,%(os_version)s,
0,
%(user_comments)s,
%(app_notes)s, %(distributor)s, %(distributor_version)s)"""
Expand Down Expand Up @@ -129,7 +129,7 @@ def reportDataGenerator(self,sizePerDay,numDays):
'email':None,
'os_name': os_name,
'os_version': os_version,
'build_date': buildDates[count%len(buildDates)],
#'build_date': buildDates[count%len(buildDates)],
'user_comments': 'oh help',
'app_notes':"",
'distributor':"",
Expand Down
5 changes: 3 additions & 2 deletions socorro/unittest/database/testSchemaPartitionedTable.py
Expand Up @@ -184,9 +184,10 @@ def testPartitionInsert(self):
cursor = self.connection.cursor()
me.logger.debug("DEBUG before createDB")
# test in this order, because other things depend on reports
insertRows = [ # uuid, client_crash_date, date_processed, install_age,last_crash,uptime,user_comments, app_notes, distributor, distributor_version,productdims_id,urldims_id
insertRows = [
#[schema.CrashReportsTable,['0bba61c5-dfc3-43e7-dead-8afd20071025',dt.datetime(2007,12,25,5,4,3,21,tz),dt.datetime(2007,12,25,5,4,3,33,tz),10000,100,110,"","","","",1,1]],
[schema.ReportsTable, ['0bba61c5-dfc3-43e7-dead-8afd20071025',dt.datetime(2007,12,25,5,4,3,21,tz),dt.datetime(2007,12,25,5,4,3,33,tz),'Firefox','1.0b4', '200403041354','http://www.a.com', 10000, 100, 110, "", dt.datetime(2004,3,4,13,54,tzinfo=tz),"", "", "", "", "",None,None,None,'bogus_hangid',None,'some_chonnel']],
#"uuid", "client_crash_date", "date_processed", "product", "version", "build", "url", "install_age", "last_crash", "uptime", "email", "user_id", "user_comments", "app_notes", "distributor", "distributor_version", "topmost_filenames", "addons_checked", "flash_version", "hangid", "process_type", "release_channel"
[schema.ReportsTable, ['0bba61c5-dfc3-43e7-dead-8afd20071025',dt.datetime(2007,12,25,5,4,3,21,tz),dt.datetime(2007,12,25,5,4,3,33,tz),'Firefox', '1.0b4', '200403041354','http://www.a.com', 10000, 100, 110, "", "", "", "", "", "", None, None, None, 'bogus_hangid', None, 'some_chonnel']],
[schema.ExtensionsTable,[1,dt.datetime(2007,12,25,5,4,3,33,tz),1,'extensionid','version']],
[schema.FramesTable,[1,2,dt.datetime(2007,12,25,5,4,3,33,tz),'somesignature']],
#[schema.DumpsTable,[1,dt.datetime(2007,12,25,5,4,3,33,tz),"data"]],
Expand Down
2 changes: 0 additions & 2 deletions socorro/unittest/processor/testProcessor.py
Expand Up @@ -1107,7 +1107,6 @@ def testGetJsonOrWarn():
439,
2,
'nobody@mozilla.com',
dt.datetime(2010, 6, 25, 23, 0, tzinfo=UTC),
'',
None,
None,
Expand All @@ -1128,7 +1127,6 @@ def testGetJsonOrWarn():
'topmost_filenames': None,
'id': 234,
'user_comments': None,
'build_date': dt.datetime(2010, 6, 25, 23, 0, tzinfo=UTC),
'uptime': 2,
'user_id': '',
'uuid': 'ooid1',
Expand Down