| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,160 @@ | ||
| import re | ||
| import json | ||
| import datetime | ||
|
|
||
| class DefaultMangler(json.JSONDecoder): | ||
| """ TODO """ | ||
| def __init__(self, *args, **kwargs): | ||
|
|
||
| self.basepk = kwargs.get('basepk', -1) | ||
| self.owner = kwargs.get('owner', 'admin') | ||
| self.datastore = kwargs.get('datastore', '') | ||
| self.siteurl = kwargs.get('siteurl', '') | ||
|
|
||
| super(DefaultMangler, self).__init__(*args) | ||
|
|
||
| def default(self, obj): | ||
| # Let the base class default method raise the TypeError | ||
| return json.JSONEncoder.default(self, obj) | ||
|
|
||
| def decode(self, json_string): | ||
| """ | ||
| json_string is basicly string that you give to json.loads method | ||
| """ | ||
| default_obj = super(DefaultMangler, self).decode(json_string) | ||
|
|
||
| # manipulate your object any way you want | ||
| # .... | ||
| for obj in default_obj: | ||
| obj['pk'] = obj['pk'] + self.basepk | ||
|
|
||
| return default_obj | ||
|
|
||
|
|
||
| class ResourceBaseMangler(DefaultMangler): | ||
| """ TODO """ | ||
| def default(self, obj): | ||
| # Let the base class default method raise the TypeError | ||
| return json.JSONEncoder.default(self, obj) | ||
|
|
||
| def decode(self, json_string): | ||
| """ | ||
| json_string is basicly string that you give to json.loads method | ||
| """ | ||
| default_obj = super(ResourceBaseMangler, self).decode(json_string) | ||
|
|
||
| # manipulate your object any way you want | ||
| # .... | ||
| upload_sessions = [] | ||
| for obj in default_obj: | ||
| obj['pk'] = obj['pk'] + self.basepk | ||
|
|
||
| obj['fields']['owner'] = [self.owner] | ||
|
|
||
| if 'distribution_url' in obj['fields']: | ||
| if not obj['fields']['distribution_url'] is None and 'layers' in obj['fields']['distribution_url']: | ||
| try: | ||
| p = '(?P<protocol>http.*://)?(?P<host>[^:/ ]+).?(?P<port>[0-9]*)(?P<details_url>.*)' | ||
| m = re.search(p, obj['fields']['distribution_url']) | ||
| if 'http' in m.group('protocol'): | ||
| obj['fields']['detail_url'] = self.siteurl + m.group('details_url') | ||
| else: | ||
| obj['fields']['detail_url'] = self.siteurl + obj['fields']['distribution_url'] | ||
| except: | ||
| obj['fields']['detail_url'] = obj['fields']['distribution_url'] | ||
|
|
||
| upload_sessions.append(self.add_upload_session(obj['pk'], obj['fields']['owner'])) | ||
|
|
||
| default_obj.extend(upload_sessions) | ||
|
|
||
| return default_obj | ||
|
|
||
| def add_upload_session(self, pk, owner): | ||
| obj = dict() | ||
|
|
||
| obj['pk'] = pk | ||
| obj['model'] = 'layers.uploadsession' | ||
|
|
||
| obj['fields'] = dict() | ||
| obj['fields']['user'] = owner | ||
| obj['fields']['traceback'] = None | ||
| obj['fields']['context'] = None | ||
| obj['fields']['error'] = None | ||
| obj['fields']['processed'] = True | ||
| obj['fields']['date'] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S") | ||
|
|
||
| return obj | ||
|
|
||
|
|
||
| class LayerMangler(DefaultMangler): | ||
| """ TODO """ | ||
| def default(self, obj): | ||
| # Let the base class default method raise the TypeError | ||
| return json.JSONEncoder.default(self, obj) | ||
|
|
||
| def decode(self, json_string): | ||
| """ | ||
| json_string is basicly string that you give to json.loads method | ||
| """ | ||
| default_obj = super(LayerMangler, self).decode(json_string) | ||
|
|
||
| # manipulate your object any way you want | ||
| # .... | ||
| for obj in default_obj: | ||
| obj['pk'] = obj['pk'] + self.basepk | ||
|
|
||
| obj['fields']['upload_session'] = obj['pk'] | ||
| obj['fields']['service'] = None | ||
|
|
||
| if self.datastore: | ||
| obj['fields']['store'] = self.datastore | ||
| else: | ||
| obj['fields']['store'] = obj['fields']['name'] | ||
|
|
||
| return default_obj | ||
|
|
||
|
|
||
| class LayerAttributesMangler(DefaultMangler): | ||
| """ TODO """ | ||
| def default(self, obj): | ||
| # Let the base class default method raise the TypeError | ||
| return json.JSONEncoder.default(self, obj) | ||
|
|
||
| def decode(self, json_string): | ||
| """ | ||
| json_string is basicly string that you give to json.loads method | ||
| """ | ||
| default_obj = super(LayerAttributesMangler, self).decode(json_string) | ||
|
|
||
| # manipulate your object any way you want | ||
| # .... | ||
| for obj in default_obj: | ||
| obj['pk'] = obj['pk'] + self.basepk | ||
|
|
||
| obj['fields']['layer'] = obj['fields']['layer'] + self.basepk | ||
|
|
||
| return default_obj | ||
|
|
||
|
|
||
| class MapLayersMangler(DefaultMangler): | ||
| """ TODO """ | ||
| def default(self, obj): | ||
| # Let the base class default method raise the TypeError | ||
| return json.JSONEncoder.default(self, obj) | ||
|
|
||
| def decode(self, json_string): | ||
| """ | ||
| json_string is basicly string that you give to json.loads method | ||
| """ | ||
| default_obj = super(MapLayersMangler, self).decode(json_string) | ||
|
|
||
| # manipulate your object any way you want | ||
| # .... | ||
| for obj in default_obj: | ||
| obj['pk'] = obj['pk'] + self.basepk | ||
|
|
||
| obj['fields']['map'] = obj['fields']['map'] + self.basepk | ||
|
|
||
| return default_obj | ||
|
|
||
|
|
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,111 @@ | ||
| ######################################################################### | ||
| # | ||
| # Copyright (C) 2016 OpenPlans | ||
| # | ||
| # This program is free software: you can redistribute it and/or modify | ||
| # it under the terms of the GNU General Public License as published by | ||
| # the Free Software Foundation, either version 3 of the License, or | ||
| # (at your option) any later version. | ||
| # | ||
| # This program is distributed in the hope that it will be useful, | ||
| # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | ||
| # GNU General Public License for more details. | ||
| # | ||
| # You should have received a copy of the GNU General Public License | ||
| # along with this program. If not, see <http://www.gnu.org/licenses/>. | ||
| # | ||
| ######################################################################### | ||
|
|
||
| import traceback | ||
| import os, sys | ||
| import shutil | ||
| import helpers | ||
| import tempfile | ||
| import json | ||
|
|
||
| from optparse import make_option | ||
|
|
||
| from django.conf import settings | ||
| from django.core.management import call_command | ||
| from django.core.management.base import BaseCommand, CommandError | ||
| from django.db import ( | ||
| DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router, | ||
| transaction, | ||
| ) | ||
|
|
||
| class Command(BaseCommand): | ||
|
|
||
| help = 'Migrate existing Layers and Maps on GeoNode' | ||
|
|
||
| option_list = BaseCommand.option_list + ( | ||
| make_option( | ||
| '-i', | ||
| '--ignore-errors', | ||
| action='store_true', | ||
| dest='ignore_errors', | ||
| default=False, | ||
| help='Stop after any errors are encountered.'), | ||
| make_option( | ||
| '--backup-file', | ||
| dest='backup_file', | ||
| type="string", | ||
| help='Backup archive containing GeoNode data to restore.'), | ||
| make_option( | ||
| '--owner', | ||
| dest='owner', | ||
| type="string", | ||
| help='New owner of the GeoNode Layers/Maps.')) | ||
|
|
||
| def handle(self, **options): | ||
| ignore_errors = options.get('ignore_errors') | ||
| backup_file = options.get('backup_file') | ||
| owner = options.get('owner') | ||
|
|
||
| if not backup_file or len(backup_file) == 0: | ||
| raise CommandError("Backup archive '--backup-file' is mandatory") | ||
|
|
||
| if not owner or len(owner) == 0: | ||
| raise CommandError("Owner '--owner' is mandatory") | ||
|
|
||
| if helpers.confirm(prompt='WARNING: The migration may break some of your GeoNode existing Layers. Are you sure you want to proceed?', resp=False): | ||
| """Migrate existing Layers on GeoNode DB""" | ||
| try: | ||
| # Create Target Folder | ||
| restore_folder = os.path.join(tempfile.gettempdir(), 'restore') | ||
| if not os.path.exists(restore_folder): | ||
| os.makedirs(restore_folder) | ||
|
|
||
| # Extract ZIP Archive to Target Folder | ||
| target_folder = helpers.unzip_file(backup_file, restore_folder) | ||
|
|
||
| # Retrieve the max Primary Key from the DB | ||
| from geonode.base.models import ResourceBase | ||
| try: | ||
| higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk | ||
| except: | ||
| higher_pk = 0 | ||
|
|
||
| # Restore Fixtures | ||
| for app_name, dump_name in zip(helpers.app_names, helpers.dump_names): | ||
| for mig_name, mangler in zip(helpers.migrations, helpers.manglers): | ||
| if app_name == mig_name: | ||
| fixture_file = os.path.join(target_folder, dump_name+'.json') | ||
|
|
||
| print "Deserializing "+fixture_file | ||
| mangler = helpers.load_class(mangler) | ||
|
|
||
| obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL) | ||
|
|
||
| from django.core import serializers | ||
|
|
||
| objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True) | ||
| for obj in objects: | ||
| obj.save(using=DEFAULT_DB_ALIAS) | ||
|
|
||
| print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'." | ||
|
|
||
| except Exception, err: | ||
| traceback.print_exc() | ||
|
|
||
|
|