160 changes: 160 additions & 0 deletions scripts/backup-restore/gn24_to_24.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
import re
import json
import datetime

class DefaultMangler(json.JSONDecoder):
""" TODO """
def __init__(self, *args, **kwargs):

self.basepk = kwargs.get('basepk', -1)
self.owner = kwargs.get('owner', 'admin')
self.datastore = kwargs.get('datastore', '')
self.siteurl = kwargs.get('siteurl', '')

super(DefaultMangler, self).__init__(*args)

def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)

def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(DefaultMangler, self).decode(json_string)

# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk

return default_obj


class ResourceBaseMangler(DefaultMangler):
""" TODO """
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)

def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(ResourceBaseMangler, self).decode(json_string)

# manipulate your object any way you want
# ....
upload_sessions = []
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk

obj['fields']['owner'] = [self.owner]

if 'distribution_url' in obj['fields']:
if not obj['fields']['distribution_url'] is None and 'layers' in obj['fields']['distribution_url']:
try:
p = '(?P<protocol>http.*://)?(?P<host>[^:/ ]+).?(?P<port>[0-9]*)(?P<details_url>.*)'
m = re.search(p, obj['fields']['distribution_url'])
if 'http' in m.group('protocol'):
obj['fields']['detail_url'] = self.siteurl + m.group('details_url')
else:
obj['fields']['detail_url'] = self.siteurl + obj['fields']['distribution_url']
except:
obj['fields']['detail_url'] = obj['fields']['distribution_url']

upload_sessions.append(self.add_upload_session(obj['pk'], obj['fields']['owner']))

default_obj.extend(upload_sessions)

return default_obj

def add_upload_session(self, pk, owner):
obj = dict()

obj['pk'] = pk
obj['model'] = 'layers.uploadsession'

obj['fields'] = dict()
obj['fields']['user'] = owner
obj['fields']['traceback'] = None
obj['fields']['context'] = None
obj['fields']['error'] = None
obj['fields']['processed'] = True
obj['fields']['date'] = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")

return obj


class LayerMangler(DefaultMangler):
""" TODO """
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)

def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(LayerMangler, self).decode(json_string)

# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk

obj['fields']['upload_session'] = obj['pk']
obj['fields']['service'] = None

if self.datastore:
obj['fields']['store'] = self.datastore
else:
obj['fields']['store'] = obj['fields']['name']

return default_obj


class LayerAttributesMangler(DefaultMangler):
""" TODO """
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)

def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(LayerAttributesMangler, self).decode(json_string)

# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk

obj['fields']['layer'] = obj['fields']['layer'] + self.basepk

return default_obj


class MapLayersMangler(DefaultMangler):
""" TODO """
def default(self, obj):
# Let the base class default method raise the TypeError
return json.JSONEncoder.default(self, obj)

def decode(self, json_string):
"""
json_string is basicly string that you give to json.loads method
"""
default_obj = super(MapLayersMangler, self).decode(json_string)

# manipulate your object any way you want
# ....
for obj in default_obj:
obj['pk'] = obj['pk'] + self.basepk

obj['fields']['map'] = obj['fields']['map'] + self.basepk

return default_obj


30 changes: 25 additions & 5 deletions scripts/backup-restore/helpers.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
import time
import shutil

import json
import re

MEDIA_ROOT = 'uploaded'
STATIC_ROOT = 'static_root'
STATICFILES_DIRS = 'static_dirs'
Expand All @@ -26,6 +29,9 @@

app_names = config.get('fixtures', 'apps').split(',')
dump_names= config.get('fixtures', 'dumps').split(',')
migrations= config.get('fixtures', 'migrations').split(',')
manglers = config.get('fixtures', 'manglers').split(',')


def get_db_conn():
"""Get db conn (GeoNode)"""
Expand Down Expand Up @@ -73,17 +79,19 @@ def cleanup_db():
conn.commit()


def load_fixture(apps, fixture_file):
from django.core import serializers
def load_fixture(apps, fixture_file, mangler=None, basepk=-1, owner="admin", datastore='', siteurl=''):

fixture = open(fixture_file, 'rb')

objects = serializers.deserialize('json', fixture, ignorenonexistent=True)
for obj in objects:
obj.save()
if mangler:
objects = json.load(fixture, cls=mangler, **{"basepk":basepk, "owner":owner, "datastore":datastore, "siteurl":siteurl})
else:
objects = json.load(fixture)

fixture.close()

return objects


def get_dir_time_suffix():
"""Returns the name of a folder with the 'now' time as suffix"""
Expand Down Expand Up @@ -177,3 +185,15 @@ def confirm(prompt=None, resp=False):
return False


def load_class(name):

components = name.split('.')
mod = __import__(components[0])

for comp in components[1:]:
mod = getattr(mod, comp)

return mod



74 changes: 74 additions & 0 deletions scripts/backup-restore/migrate_layers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import traceback
import os, sys
import shutil
import helpers
import json

from django.conf import settings
from django.core.management import call_command
from django.db import (
DEFAULT_DB_ALIAS, DatabaseError, IntegrityError, connections, router,
transaction,
)

def migrate_layers(archive, owner):
"""Migrate existing Layers on GeoNode DB"""
try:
# Create Target Folder
restore_folder = 'restore'
if not os.path.exists(restore_folder):
os.makedirs(restore_folder)

# Extract ZIP Archive to Target Folder
target_folder = helpers.unzip_file(archive, restore_folder)

# Retrieve the max Primary Key from the DB
from geonode.base.models import ResourceBase
try:
higher_pk = ResourceBase.objects.all().order_by("-id")[0].pk
except:
higher_pk = 0

# Restore Fixtures
for app_name, dump_name in zip(helpers.app_names, helpers.dump_names):
for mig_name, mangler in zip(helpers.migrations, helpers.manglers):
if app_name == mig_name:
fixture_file = os.path.join(target_folder, dump_name+'.json')

print "Deserializing "+fixture_file
mangler = helpers.load_class(mangler)

obj = helpers.load_fixture(app_name, fixture_file, mangler=mangler, basepk=higher_pk, owner=owner, datastore=settings.OGC_SERVER['default']['DATASTORE'], siteurl=settings.SITEURL)

from django.core import serializers

objects = serializers.deserialize('json', json.dumps(obj), ignorenonexistent=True)
for obj in objects:
obj.save(using=DEFAULT_DB_ALIAS)

except Exception, err:
#print str(err)
traceback.print_exc()

print "Restore finished. Please find restored files and dumps into: '"+target_folder+"'."


if __name__ == '__main__':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geonode.settings")

restore_file = None
owner = None
try:
restore_file = sys.argv[1]
owner = sys.argv[2]
except:
pass

if restore_file and owner:
if helpers.confirm(prompt='WARNING: The migration may break some of your GeoNode existing Layers. Are you sure you want to proceed?', resp=False):
migrate_layers(restore_file, owner)
else:
print "Please, provide the full path to the ZIP archive to Restore AND the Owner of the imported Layers.\n"
print "Usage example: python migrate_layers.py backup/geonode_backup_test.zip admin\n"


3 changes: 2 additions & 1 deletion scripts/backup-restore/restore.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,7 @@ def restore_full(archive):
if helpers.confirm(prompt='WARNING: The restore will overwrite all your GeoNode data and files. Are you sure you want to proceed?', resp=False):
restore_full(restore_file)
else:
print "Please, provide the full path to the ZIP archive to Restore."
print "Please, provide the full path to the ZIP archive to Restore.\n"
print "Usage example: python restore.py backup/geonode_backup_test.zip\n"


8 changes: 8 additions & 0 deletions scripts/backup-restore/settings.ini
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,11 @@ apps = people,account,avatar.avatar,base.license,base.topiccategory,base.regio

dumps = people,accounts,avatars,licenses,topiccategories,regions,resourcebases,contactroles,links,restrictioncodetypes,spatialrepresentationtypes,useropermissions,groupopermissions,uploadsessions,layers,attributes,maps,maplayers,documents,tags

# Migrate from GN 2.0 to GN 2.4
#migrations = base.resourcebase,layers.layer,layers.attribute,maps.map,maps.maplayer
#manglers = gn20_to_24.ResourceBaseMangler,gn20_to_24.LayerMangler,gn20_to_24.LayerAttributesMangler,gn20_to_24.MapMangler,gn20_to_24.MapLayersMangler

# Migrate from GN 2.4 to GN 2.4
migrations = base.resourcebase,layers.layer,layers.attribute,maps.map,maps.maplayer
manglers = gn24_to_24.ResourceBaseMangler,gn24_to_24.LayerMangler,gn24_to_24.LayerAttributesMangler,gn24_to_24.DefaultMangler,gn24_to_24.MapLayersMangler