Skip to content
This repository has been archived by the owner on Aug 25, 2023. It is now read-only.

Commit

Permalink
Merge d8fa829 into 08fbada
Browse files Browse the repository at this point in the history
  • Loading branch information
radkomateusz committed Oct 16, 2018
2 parents 08fbada + d8fa829 commit f5ccc8d
Show file tree
Hide file tree
Showing 6 changed files with 144 additions and 1 deletion.
39 changes: 39 additions & 0 deletions backup-entity-migrator-service.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
runtime: python27
api_version: 1
threadsafe: true
service: backup-entity-migration

instance_class: F2

libraries:
- name: ssl
version: 2.7.11

skip_files:
- ^(.*/)?#.*#$
- ^(.*/)?.*~$
- ^(.*/)?.*\.py[co]$
- ^(.*/)?.*/RCS/.*$
- ^(.*/)?\..*$
- google_appengine.*
- gitlab-bbq
- docs
- tests
- src/datastore_export
- src/restore
- src/retention

handlers:
- url: /favicon.ico
static_files: static/images/favicon.ico
upload: static/images/favicon.ico
application_readable: true
- url: /backup_entity/migrate
script: src.backup_entity_migration.backup_entity_migration_handlers.app
secure: always
login: admin
- url: /backup_entity/migrate_backup_for_table
script: src.backup_entity_migration.backup_entity_migration_handlers.app
secure: always
login: admin

18 changes: 17 additions & 1 deletion config/queue.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,20 @@ queue:
bucket_size: 1
retry_parameters:
task_retry_limit: 5
task_age_limit: 1h
task_age_limit: 1h

- name: entity-backup-migration-scheduler
rate: 10/s
max_concurrent_requests: 20
bucket_size: 20
retry_parameters:
task_retry_limit: 5
task_age_limit: 3h

- name: entity-backup-migration-worker
rate: 100/s
max_concurrent_requests: 100
bucket_size: 100
retry_parameters:
task_retry_limit: 5
task_age_limit: 6h
7 changes: 7 additions & 0 deletions src/backup/datastore/Backup.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,13 @@ def get_all_backups_sorted(cls, ancestor_key):
.order(-Backup.created) \
.fetch()

# nopep8 pylint: disable=C0121
# TODO remove after migration
@classmethod
@retry(Exception, tries=5, delay=1, backoff=2)
def get_all_backups_entities(cls, ancestor_key):
return Backup.query(ancestor=ancestor_key).fetch()

@classmethod
def sort_backups_by_create_time_desc(cls, backups):
copy = list(backups)
Expand Down
11 changes: 11 additions & 0 deletions src/backup/datastore/Table.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,17 @@ class Table(ndb.Model):
last_checked = ndb.DateTimeProperty(indexed=True)
partition_id = ndb.StringProperty(indexed=True)


# TODO remove after migration
@classmethod
def get_all_with_cursor(cls, cursor):
ctx = ndb.get_context()
ctx.set_cache_policy(False)
query = cls.query()
tables, cursor, more = query.fetch_page(1000, start_cursor=cursor)
return tables, cursor, more


@classmethod
def get_table_from_backup(cls, backup):
parent = backup.key.parent()
Expand Down
Empty file.
70 changes: 70 additions & 0 deletions src/backup_entity_migration/backup_entity_migration_handlers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import logging

import webapp2
from google.appengine.ext import ndb
from google.appengine.ext.ndb import Cursor

from src.backup.datastore.Backup import Backup
from src.backup.datastore.Table import Table
from src.commons.config.configuration import configuration
from src.commons.tasks import Tasks


class BackupEntityMigrationHandler(webapp2.RequestHandler):

def get(self):

table_cursor_value = self.request.get('cursor', None)
if table_cursor_value:
table_cursor = Cursor(urlsafe=table_cursor_value)
else:
table_cursor = None
logging.info("Migration of Backup entities was started. Table_cursor: %s",
table_cursor)

self.schedule_backup_entities(table_cursor)

def schedule_backup_entities(self, table_cursor):
tables, next_cursor, more = Table.get_all_with_cursor(table_cursor)

Tasks.schedule('entity-backup-migration-worker',
self.__create_table_backups_migration_tasks(tables))

if more and next_cursor:
Tasks.schedule('entity-backup-migration-scheduler', Tasks.create(
method='GET',
url='/backup_entity/migrate',
params={'cursor': next_cursor.urlsafe()},
name='migrationSchedule_' + str(next_cursor.urlsafe()).replace(
'=', '_').replace('+', '__').replace('/', '___')))

def __create_table_backups_migration_tasks(self, tables):
return [
Tasks.create(
method='GET',
url='/backup_entity/migrate_backup_for_table',
params={'table_key': table.key.urlsafe()},
name='migrate_backup_for_table_' + str(
table.key.urlsafe()).replace(
'=', '_').replace('+', '__').replace('/', '___'))
for table in tables
]


class BackupEntityMigrationWorkerHandler(webapp2.RequestHandler):

def get(self):
table_key = self.request.get('table_key', None)
self.migrate_backup_entities(ndb.Key(urlsafe=table_key))

@ndb.transactional
def migrate_backup_entities(self, table_key):
backups = Backup.get_all_backups_entities(table_key)
logging.info("Migrating backups: %s", backups)
ndb.put_multi(backups, use_cache=False)


app = webapp2.WSGIApplication([
('/backup_entity/migrate', BackupEntityMigrationHandler),
('/backup_entity/migrate_backup_for_table', BackupEntityMigrationWorkerHandler)
], debug=configuration.debug_mode)

0 comments on commit f5ccc8d

Please sign in to comment.