Skip to content

Commit

Permalink
Support the new Firebase Realtime Database structure #104
Browse files Browse the repository at this point in the history
  • Loading branch information
Matthias committed Mar 25, 2019
1 parent 391ae49 commit c90f262
Show file tree
Hide file tree
Showing 4 changed files with 98 additions and 81 deletions.
145 changes: 81 additions & 64 deletions mapswipe_workers/basic/BaseImport.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,8 @@ def __init__(self, project_draft):

logging.warning(f'{submission_key} - __init__ - start init')

# TODO: remove project_draft_id
# (there should be one id for project and projectDraft)
self.project_draft_id = project_draft['project_draft_id']
self.project_type = project_draft['projectType'],
self.name = project_draft['name']
Expand All @@ -77,8 +79,7 @@ def __init__(self, project_draft):
]:
self.info[key] = project_draft[key]


def create_project(self, fb_db):
def create_project(self, fb_db):
"""
The function to import a new project in firebase and postgres.
Expand All @@ -87,20 +88,23 @@ def create_project(self, fb_db):
tuple
project_id and project_type
"""

# psql_db = auth.psqlDB()

try:
logging.warning(
f'{self.project_draft_id}'
f'- import_project - start importing'
)

projects_ref = fb_db.reference('projects/')
# create a new empty project in firebase
new_project_ref = projects_ref.push()
# TODO: remove if project_draft_id and project_id are the same
# projects_ref = fb_db.reference('projects/')
# # create a new empty project in firebase
# new_project_ref = projects_ref.push()
# # get the project id of new created project
# project_id = new_project_ref.key

project_id = self.project_draft_id
new_project_ref = fb_db.reference(f'projects/{project_id}')
# get the project id of new created project
project_id = new_project_ref.key

# create groups and tasks for this project.
# this function is defined by the respective type of this project
Expand Down Expand Up @@ -141,7 +145,9 @@ def create_project(self, fb_db):

# upload data to firebase
new_project_ref.set(project)
logging.warning('%s - uploaded project in firebase' % project['id'])
logging.warning(
'%s - uploaded project in firebase' % project['id']
)

new_groups_ref = fb_db.reference(f'groups/{project_id}/')
new_groups_ref.set(groups)
Expand All @@ -151,23 +157,37 @@ def create_project(self, fb_db):
new_tasks_ref.set(tasks)
logging.warning('%s - uploaded tasks in firebase' % project_id)

# TODO
# # set import complete in firebase
# self.set_import_complete(firebase)
# logging.warning('%s - import_project - import finished' % self.import_key)
# logging.warning('%s - import_project - imported new project with id: %s' % (self.import_key, project_id))
# upload data to firebase
project_draft_ref = fb_db.reference(
f'projectDrafts/{self.project_draft_id}/complete'
)
project_draft_ref.set(True)

logging.warning(
f'{self.project_draft_id} '
f'- set_import_complete - set import complete'
)
logging.warning(
'%s - import_project - import finished' % self.import_key
)
logging.warning(
f'{self.project_draft_id}'
f' - import_project - '
f'imported new project with id: {project_id}'
)
return project_id

except Exception as e:
logging.warning('%s - import_project - could not import project' % self.project_draft_id)
logging.warning("%s - import_project - %s" % (self.project_draft_id, e))
logging.warning(
f'{self.project_draft_id}'
f' - import_project - '
f'could not import project'
)
logging.warning(
"%s - import_project - %s" % (self.project_draft_id, e))
error_handling.log_error(e, logging)

return (None, None)


def get_new_project_id(self, firebase):
"""
The function to get a project id which is not used in firebase
Expand Down Expand Up @@ -196,20 +216,26 @@ def get_new_project_id(self, firebase):

project_keys = fb_db.child('projects').shallow().get().val()
if not project_keys:
# set mininum project id to 1000, if no project has been imported yet
# set mininum project id to 1000,
# if no project has been imported yet
project_keys = [1000]

project_ids = list(map(int, list(project_keys)))
project_ids.sort()
highest_project_id = project_ids[-1]

logging.warning('ALL - get_new_project_id - highest existing project id: %s' % highest_project_id)
logging.warning(
f'ALL - get_new_project_id - '
f'highest existing project id: {highest_project_id}'
)
new_project_id = highest_project_id + 2

logging.warning('ALL - get_new_project_id - returned new project id: %s' % new_project_id)
logging.warning(
f'ALL - get_new_project_id - '
f'returned new project id: {new_project_id}'
)
return new_project_id


def execute_import_queries(self, project_id, project, groups):
'''
Defines SQL queries and data for import a project into postgres.
Expand All @@ -230,7 +256,6 @@ def execute_import_queries(self, project_id, project, groups):
VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s);
'''


data_project = [
int(project_dict['contributors']),
int(project_dict['groupAverage']),
Expand Down Expand Up @@ -345,10 +370,10 @@ def execute_import_queries(self, project_id, project, groups):
os.remove(groups_txt_filename)
os.remove(tasks_txt_filename)


def create_groups_txt_file(self, project_id, groups):
"""
Creates a text file containing groups information for a specific project.
Creates a text file containing groups information
for a specific project.
The text file is temporary and used only by BaseImport module.
Parameters
Expand All @@ -367,8 +392,9 @@ def create_groups_txt_file(self, project_id, groups):
if not os.path.isdir('{}/tmp'.format(DATA_PATH)):
os.mkdir('{}/tmp'.format(DATA_PATH))

# create txt file with header for later import with copy function into postgres
groups_txt_filename = '{}/tmp/raw_groups_{}.txt'.format(DATA_PATH, project_id)
# create txt file with header for later
# import with copy function into postgres
groups_txt_filename = f'{DATA_PATH}/tmp/raw_groups_{project_id}.txt'
groups_txt_file = open(groups_txt_filename, 'w', newline='')
fieldnames = (
'project_id',
Expand All @@ -378,7 +404,12 @@ def create_groups_txt_file(self, project_id, groups):
'verificationCount',
'info'
)
w = csv.DictWriter(groups_txt_file, fieldnames=fieldnames, delimiter='\t', quotechar="'")
w = csv.DictWriter(
groups_txt_file,
fieldnames=fieldnames,
delimiter='\t',
quotechar="'",
)

for group in groups:
try:
Expand All @@ -387,7 +418,9 @@ def create_groups_txt_file(self, project_id, groups):
"group_id": int(groups[group]['id']),
"count": int(groups[group]['count']),
"completedCount": int(groups[group]['completedCount']),
"verificationCount": int(groups[group]['verificationCount']),
"verificationCount": int(
groups[group]['verificationCount']
),
"info": {}
}

Expand All @@ -406,17 +439,21 @@ def create_groups_txt_file(self, project_id, groups):
w.writerow(output_dict)

except Exception as e:
logging.warning('%s - set_groups_postgres - groups missed critical information: %s' % (project_id, e))
logging.warning(
f'{project_id}'
f' - set_groups_postgres - '
f'groups missed critical information: {e}'
)
error_handling.log_error(e, logging)

groups_txt_file.close()

return groups_txt_filename


def create_tasks_txt_file(self, project_id, tasks):
"""
Creates a text file containing tasks information for a specific project.
Creates a text file containing tasks information
for a specific project.
It interates over groups and extracts tasks.
The text file is temporary and used only by BaseImport module.
Expand All @@ -437,11 +474,16 @@ def create_tasks_txt_file(self, project_id, tasks):
os.mkdir('{}/tmp'.format(DATA_PATH))

# save tasks in txt file
tasks_txt_filename = '{}/tmp/raw_tasks_{}.txt'.format(DATA_PATH, project_id)
tasks_txt_filename = f'{DATA_PATH}/tmp/raw_tasks_{project_id}.txt'
tasks_txt_file = open(tasks_txt_filename, 'w', newline='')

fieldnames = ('task_id', 'project_id', 'group_id', 'info')
w = csv.DictWriter(tasks_txt_file, fieldnames=fieldnames, delimiter='\t', quotechar="'")
w = csv.DictWriter(
tasks_txt_file,
fieldnames=fieldnames,
delimiter='\t',
quotechar="'",
)

for group in tasks:
group_id = int(group)
Expand All @@ -460,36 +502,11 @@ def create_tasks_txt_file(self, project_id, tasks):

w.writerow(output_dict)
except Exception as e:
logging.warning('%s - set_tasks_postgres - tasks missed critical information: %s' % (project_id, e))
logging.warning(
f'{project_id}'
f' - set_tasks_postgres - '
f'tasks missed critical information: {e}'
)

tasks_txt_file.close()
return tasks_txt_filename


def set_import_complete(self, firebase):
"""
The function to set an import as complete by adding a "complete" attribute in firebase
Parameters
----------
firebase : pyrebase firebase object
initialized firebase app with admin authentication
project_id : int
The id of the project
groups : dictionary
Dictionary containing groups of a project
Returns
-------
bool
True if successful. False otherwise
"""
# TODO: Do we need this funciton?

fb_db = firebase.database()
fb_db.child("imports").child(self.project_draft_id).child('complete').set(True)

logging.warning(
f'{self.project_draft_id} '
f'- set_import_complete - set import complete')
return True
10 changes: 5 additions & 5 deletions tests/new_test_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@


def test_import_process():
imported_project_ids = BaseFunctions.run_create_project()
created_project_ids = BaseFunctions.run_create_project()

# save all keys to disk
filename = 'imported_project_ids.pickle'
filename = 'created_project_ids.pickle'
if os.path.isfile(filename):
with open(filename, 'rb') as f:
already_imported_project_ids = pickle.load(f)
imported_project_ids = imported_project_ids + already_imported_project_ids
already_created_project_ids = pickle.load(f)
created_project_ids = created_project_ids + already_created_project_ids

with open(filename, 'wb') as f:
pickle.dump(imported_project_ids, f)
pickle.dump(created_project_ids, f)


if __name__ == '__main__':
Expand Down
16 changes: 8 additions & 8 deletions tests/new_test_initialize.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ def create_project_drafts_in_firebase(ref):
sample_project_drafts = json.load(f)

# upload sample data to firebaseio.com/imports
project_draft_keys = []
project_draft_ids = []
for project in sample_project_drafts:
project_draft_keys.append(
project_draft_ids.append(
ref.push(sample_project_drafts[project]).key
)

save_project_draft_keys_to_disk(project_draft_keys)
save_project_draft_ids_to_disk(project_draft_ids)

# for import_key in uploaded_project_keys:
# fb_db.update(
Expand All @@ -27,15 +27,15 @@ def create_project_drafts_in_firebase(ref):
# )


def save_project_draft_keys_to_disk(project_draft_keys):
filename = 'project_draft_keys.pickle'
def save_project_draft_ids_to_disk(project_draft_ids):
filename = 'project_draft_ids.pickle'
if os.path.isfile(filename):
with open(filename, 'rb') as f:
existing_project_draft_keys = pickle.load(f)
project_draft_keys = existing_project_draft_keys + project_draft_keys
existing_project_draft_ids = pickle.load(f)
project_draft_ids = existing_project_draft_ids + project_draft_ids

with open(filename, 'wb') as f:
pickle.dump(project_draft_keys, f)
pickle.dump(project_draft_ids, f)


if __name__ == '__main__':
Expand Down
8 changes: 4 additions & 4 deletions tests/new_test_terminate.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,22 +64,22 @@ def delete_local_files(project_id, import_key):
#pg_db = auth.postgresDB()
fb_db = auth.firebaseDB()

filename = 'firebase_project_ids.pickle'
filename = 'created_project_ids.pickle'
if os.path.isfile(filename):
with open(filename, 'rb') as f:
project_ids = pickle.load(f)
for project_id, in projects_ids:
delete_sample_data_from_firebase(fb_db, project_id)
os.remove('firebase_project_ids.pickle')
os.remove('created_project_ids.pickle')

filename = 'project_draft_keys.pickle'
filename = 'project_draft_ids.pickle'
if os.path.isfile(filename):
with open(filename, 'rb') as f:
project_draft_ids = pickle.load(f)
for project_draft_id in project_draft_ids:
ref = fb_db.reference(f'projectDrafts/{project_draft_id}')
ref.set({})
os.remove('project_draft_keys.pickle')
os.remove('project_draft_ids.pickle')


# delete_local_files(project_id, import_key)
Expand Down

0 comments on commit c90f262

Please sign in to comment.