Skip to content

Commit

Permalink
fix bug 1398200, 1457484: remove unused tables
Browse files Browse the repository at this point in the history
This removes a bunch of tables that aren't used anymore plus some upkeep
stored procedures for rank_compare.
  • Loading branch information
willkg committed Apr 30, 2018
1 parent 0b39fb9 commit 7f3f35f
Show file tree
Hide file tree
Showing 9 changed files with 52 additions and 409 deletions.
@@ -0,0 +1,44 @@
"""bug 1398200, 1457484 remove unused tables
Revision ID: 1e188109fc6b
Revises: 8e8390138426
Create Date: 2018-04-27 14:12:16.709146
"""

from alembic import op

from socorro.lib.migrations import load_stored_proc


# revision identifiers, used by Alembic.
revision = '1e188109fc6b'
down_revision = '8e8390138426'


def upgrade():
op.execute('DROP TABLE IF EXISTS plugins')
op.execute('DROP TABLE IF EXISTS release_channel_matches')
op.execute('DROP TABLE IF EXISTS replication_test')
op.execute('DROP TABLE IF EXISTS sessions')
op.execute('DROP TABLE IF EXISTS socorro_db_version')
op.execute('DROP TABLE IF EXISTS socorro_db_version_history')
op.execute('DROP TABLE IF EXISTS transform_rules')
op.execute('DROP TABLE IF EXISTS crashes_by_user')
op.execute('DROP TABLE IF EXISTS crashes_by_user_build')
op.execute('DROP TABLE IF EXISTS uptime_levels')
op.execute('DROP TABLE IF EXISTS modules')
op.execute('DROP TABLE IF EXISTS crash_types')
op.execute('DROP TABLE IF EXISTS process_types')
op.execute('DROP TABLE IF EXISTS rank_compare')

op.execute('DROP FUNCTION IF EXISTS backfill_rank_compare(date)')
op.execute('DROP FUNCTION IF EXISTS update_rank_compare(date, boolean, interval)')

# Load the new version of backfill_matviews
load_stored_proc(op, ['backfill_matviews.sql'])


def downgrade():
# There is no going back
pass
45 changes: 1 addition & 44 deletions socorro/external/postgresql/crashstorage.py
Expand Up @@ -103,8 +103,7 @@ def save_processed(self, processed_crash):
self.transaction(self._save_processed_transaction, processed_crash)

def _save_processed_transaction(self, connection, processed_crash):
report_id = self._save_processed_report(connection, processed_crash)
self._save_plugins(connection, processed_crash, report_id)
self._save_processed_report(connection, processed_crash)

def _save_processed_report(self, connection, processed_crash):
"""Here we INSERT or UPDATE a row in the reports table.
Expand Down Expand Up @@ -199,48 +198,6 @@ def print_as(a, b):
report_id = single_value_sql(connection, upsert_sql, value_list)
return report_id

def _save_plugins(self, connection, processed_crash, report_id):
""" Electrolysis Support - Optional - processed_crash may contain a
ProcessType of plugin. In the future this value would be default,
content, maybe even Jetpack... This indicates which process was the
crashing process.
plugin - When set to plugin, the jsonDocument MUST calso contain
PluginFilename and PluginName
"""
process_type = processed_crash['process_type']
if not process_type:
return

if process_type == "plugin":

# Bug#543776 We actually will are relaxing the non-null policy...
# a null filename, name, and version is OK. We'll use empty strings
try:
plugin_filename = processed_crash['PluginFilename']
plugin_name = processed_crash['PluginName']
except KeyError as x:
self.config.logger.error(
'the crash is missing a required field: %s', str(x)
)
return
find_plugin_sql = ('select id from plugins '
'where filename = %s '
'and name = %s')
try:
single_value_sql(
connection,
find_plugin_sql,
(plugin_filename, plugin_name)
)
except SQLDidNotReturnSingleValue:
insert_plugsins_sql = ("insert into plugins (filename, name) "
"values (%s, %s) returning id")
execute_no_results(
connection,
insert_plugsins_sql,
(plugin_filename, plugin_name)
)

@staticmethod
def _table_suffix_for_crash_id(crash_id):
"""given an crash_id, return the name of its storage table"""
Expand Down

0 comments on commit 7f3f35f

Please sign in to comment.