From 76a882b64471a12128a278006cc8f9a9fffa3776 Mon Sep 17 00:00:00 2001 From: Oliver Standeven Date: Mon, 11 Jan 2016 14:05:57 +0000 Subject: [PATCH] removed cachescripts, and smal modifications --- .gitignore | 1 + cachescripts/parse.py | 80 ------- cachescripts/requirements.txt | 3 - cachescripts/updateConfig.js | 98 -------- cachescripts/utils.py | 212 ----------------- cachescripts/wfsCapabilities.py | 122 ---------- cachescripts/wmsCapabilities.py | 329 -------------------------- cachescripts/wmsCapabilities_2_6_6.py | 212 ----------------- collaboration/views/authorised.jade | 2 +- collaboration/views/index.jade | 2 +- middleware/portalflask/views/proxy.py | 3 +- package.json | 3 + src/js/addLayersForm.js | 3 +- src/js/configure.js | 2 +- src/js/gisportal.js | 2 +- 15 files changed, 10 insertions(+), 1064 deletions(-) delete mode 100644 cachescripts/parse.py delete mode 100644 cachescripts/requirements.txt delete mode 100644 cachescripts/updateConfig.js delete mode 100644 cachescripts/utils.py delete mode 100644 cachescripts/wfsCapabilities.py delete mode 100644 cachescripts/wmsCapabilities.py delete mode 100644 cachescripts/wmsCapabilities_2_6_6.py diff --git a/.gitignore b/.gitignore index 14c8b148..57abf940 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,5 @@ node_modules html/index.html collaboration/config/config.json middleware/nodejs/ +middleware/nodejs/* middleware/python-flask.log \ No newline at end of file diff --git a/cachescripts/parse.py b/cachescripts/parse.py deleted file mode 100644 index 2e388016..00000000 --- a/cachescripts/parse.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python - -import re -import logging -logger = logging.getLogger("portal_proxy") - -#from lxml import etree -from BeautifulSoup import BeautifulStoneSoup - -def check_dupes(tag,data,res): - """Check whether an element/attribute already exists in a given parent node; - if not, add it to the parent; otherwise, create a list containing both. - - @param tag: name of element or attribute to check for. - @param data: data contained in the element or attribute. - @param res: parent node to check/modify.""" - - tag = re.sub('^.*:','',tag) - - if tag in res: - # this tag already exists. don't overwrite it! - if not isinstance(res[tag], list): - # not a list, turn it into one - res[tag] = [res[tag]] - # then append - res[tag].append(data) - else: - res[tag] = data - -def _process(el,depth=0): - """Recursively turn an lxml tree into a datastructure that should - more-or-less accurately represent it. - - @param el: the lxml Element to process. - @param depth: number of recursions. Not currently checked. - @return: a datastructure representing the element.""" - - res = {} - for i in el.contents: - if 'name' in dir(i) and i.name: - data = _process(i,depth+1) - if hasattr(i, 'string') and i.string is not None: - text = i.string.strip().encode('utf-8') - if len(text) > 0: - if data == {}: - try: - data = text - except: - pass - else: - data.update({'text': text}) - - check_dupes(i.name,data,res) - - if 'name' in dir(el): - for key,data in el._getAttrMap().iteritems(): - check_dupes(key,data,res) - - return res - -def process(file, tag='Layer'): - """Take a file and search for a given tag, returning a data structure representing it. - - @param file: string containing xml to process. - @param tag: tagname for lxml to search for. - @return: list of dictionaries, one per tag found.""" - - logger.debug("parse.process: tag=%s" % tag) - selfClosingTags = ['boundingbox'] - - root = BeautifulStoneSoup(file, selfClosingTags=selfClosingTags) - logger.debug(root.findAll(tag)) - obj = [_process(i) for i in root.findAll(tag)] - return obj - -if __name__ == '__main__': - from sys import argv - import yaml - - print yaml.dump(process(open(argv[1]).read(),argv[2]),default_flow_style=False) diff --git a/cachescripts/requirements.txt b/cachescripts/requirements.txt deleted file mode 100644 index f091f144..00000000 --- a/cachescripts/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -isodate==0.4.9 -BeautifulSoup==3.2.1 -minify==0.1.3 \ No newline at end of file diff --git a/cachescripts/updateConfig.js b/cachescripts/updateConfig.js deleted file mode 100644 index b59ee2d0..00000000 --- a/cachescripts/updateConfig.js +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env node - -/* -* Updates and combies the old wmsLayersTags.py and wmsServers.py into the new layers.py -* -*/ - -var currentDir = __dirname + '/../config/'; - -var newConfigPath = currentDir + 'wmsLayers.py'; - -var fs = require('fs'); -__dirname = "./"; -var serverPython = fs.readFileSync( currentDir + 'wmsServers.py').toString().replace('#!/usr/bin/env python', ''); -var layersPython = fs.readFileSync( currentDir + 'wmsLayerTags.py').toString().replace('#!/usr/bin/env python', ''); - -try{ - eval( serverPython ); -}catch( e ){ - throw " wmsServers.py wasnt a valid file "; -} - -try{ - eval( layersPython ); -}catch( e ){ - throw " wmsLayerTags.py wasnt a valid file "; -} - -var newList = []; - -var map = {}; - -Object.keys( servers ).forEach(function( serverName ){ - var oldConfig = servers[serverName]; - - var newServerConfig = { - name: oldConfig.name, - - options: oldConfig.options, - - services: { - wms: { - url: oldConfig.url, - params: { - GetCapabilities: { - 'SERVICE': 'WMS', - 'request': 'GetCapabilities', - 'version': '1.3.0' - } - - } - }, - wcs: { - url: oldConfig.wcsurl, - params: { - DescribeCoverage: { - 'SERVICE': 'WCS', - 'request': 'describeCoverage', - 'version': '1.0.0' - } - - } - } - }, - indicators: {} - - }; - - map[ oldConfig.name ] = newServerConfig; - - newList.push(newServerConfig); - -}); - - -Object.keys( layers ).forEach(function( providerName ){ - - var provider = layers[ providerName ]; - console.log("Updating provider:" + providerName) - var serverConfig = map[ providerName ]; - - if( ! serverConfig || ! provider ) - return; - - Object.keys( provider ).forEach(function( indicatorName ){ - var indicator = provider[ indicatorName ]; - //indicator.name = indicatorName; - - serverConfig.indicators[ indicatorName ] = indicator ; - }) - -}); - -var newConfig = "layers = " + JSON.stringify( newList , void(0), 3 ); - -if( fs.existsSync( newConfigPath ) ) - fs.unlinkSync( newConfigPath ); -fs.writeFileSync( newConfigPath , newConfig ); \ No newline at end of file diff --git a/cachescripts/utils.py b/cachescripts/utils.py deleted file mode 100644 index 6c7c5be0..00000000 --- a/cachescripts/utils.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python - -FILEEXTENSIONJSON = ".json" -FILEEXTENSIONXML = ".xml" -LAYERCACHEPATH = "../html/cache/layers/" - -import json -def updateCaches(createCache, dirtyCaches, serverList, cachePath, masterCachePath, cacheLife): - import urllib, urllib2 - - from providers import providers - - saveFile(LAYERCACHEPATH + '/../providers' + FILEEXTENSIONJSON, json.dumps(providers)) - - - - print 'Starting cache generation' - #servers = csvToList(serverList) - - change = False - - # Go through each server - for server in serverList: - - # Check if we are just passing a layer - if 'passthrough' in server['options'] and server['options']['passthrough']: - # Check if cache is valid - if not checkCacheValid(cachePath + server['name'] + FILEEXTENSIONJSON, cacheLife): - createCache(server, None) - - continue - - # Check if cache is valid - if not checkCacheValid(cachePath + server['name'] + FILEEXTENSIONXML, cacheLife): - oldCapabilitiesXML = None - newCapabilitiesXML = None - oldCoverageXML = None - newCoverageXML = None - - - try: - url = server['services']['wms']['url'] + urllib.urlencode(server['services']['wms']['params']['GetCapabilities']) - print 'Getting: ' + url - resp = urllib2.urlopen(url, timeout=30) - newCapabilitiesXML = resp.read() - - if set(('wcs')).issubset(server['services']): # Confirms that WCS is actually provided. - url = server['services']['wcs']['url'] + urllib.urlencode(server['services']['wcs']['params']['DescribeCoverage']) - print 'Getting: ' + url - resp = urllib2.urlopen(url, timeout=30) - newCoverageXML = resp.read() - - except urllib2.URLError as e: - print 'Failed to open url to ' + url - print e - # If we can't contact the server, skip to the next server - except IOError as e: - print 'Failed to open url to ' + url - print e - - # Check that we have the xml file - if newCapabilitiesXML == None or newCoverageXML == None: - dirtyCaches.append(server) - continue - - try: - oldCapabilitiesXML = getFile(cachePath + server['name'] + '-GetCapabilities' + FILEEXTENSIONXML) - oldCoverageXML = getFile(cachePath + server['name'] + '-DescribeCoverage' + FILEEXTENSIONXML) - except IOError as e: - print 'Failed to open xml file at "' + cachePath + server['name'] + FILEEXTENSIONXML + '"' - print e - # We don't have the oldXML so we want to skip the md5 check - createCache(server, newCapabilitiesXML, newCoverageXML) - change = True - continue - - # This shouldn't be needed - if oldXML == None: - oldXML = "old" - - # Check the md5s - if checkMD5(oldCapabilitiesXML, newCapabilitiesXML) or checkMD5(oldCoverageXML, newCoverageXML): - print 'md5 check failed...' - # Create the caches for this server - createCache(server, newCapabilitiesXML, newCoverageXML ) - change = True - continue - else: - print 'md5 check passed' - - dirtyCachesCopy = dirtyCaches[:] - print "Checking for dirty caches..." - for dirtyServer in dirtyCachesCopy: - print "server name: " + dirtyServer['name'] - regenerateCache(dirtyServer, dirtyCaches, createCache) - print "Dirty caches regenerated" - - if change: - createMasterCache(serverList, cachePath, masterCachePath) - - print 'Finished generating caches' - -def createMasterCache(servers, cachePath, masterCachePath): - import json - masterCache = [] - for server in servers: - file = None - try: - print 'Reading : ' + cachePath + server['name'] + FILEEXTENSIONJSON; - file = getFile(cachePath + server['name'] + FILEEXTENSIONJSON) - except IOError as e: - print 'Failed to open json file at "' + cachePath + server['name'] + FILEEXTENSIONJSON + '"' - print e - - if file != None: - masterCache.append(json.loads(file)) - - print "Saving mastercache..." - saveFile(masterCachePath + FILEEXTENSIONJSON, json.dumps(masterCache)) - print "Mastercache saved" - -def regenerateCache(dirtyServer, dirtyCaches, createCache): - import urllib, urllib2, time - for i in range(10): - if dirtyServer in dirtyCaches: - dirtyCaches.remove(dirtyServer) - if i < 10: - try: - url = dirtyServer['services']['wms']['url'] + urllib.urlencode(dirtyServer['services']['wms']['params']['GetCapabilities']) - resp = urllib2.urlopen(url, timeout=30) - newXML = resp.read() - createCache(dirtyServer, newXML) - if dirtyServer not in dirtyCaches: - return - else: - time.sleep(30) - except urllib2.URLError as e: - print 'Failed to open url to ' + url - print e - except IOError as e: - print 'Failed to open url to ' + url - print e - # We don't have the oldXML so we need to skip the md5 check - -def checkMD5(oldXML, newXML): - import hashlib - newMD5 = hashlib.md5(newXML) - oldMD5 = hashlib.md5(oldXML) - - print 'Checking md5...' - print newMD5.hexdigest() - print oldMD5.hexdigest() - - return newMD5.hexdigest() != oldMD5.hexdigest() - -def csvToList(file): - import csv - data = [] - try: - with open(file, 'rb') as csvfile: - reader = csv.reader(csvfile, delimiter=",") - titles = reader.next() - reader = csv.DictReader(csvfile, titles) - for row in reader: - data.append(row) - except IOError as e: - print 'Could not open csv file at "' + file + '"' - print e - return [] - - return data - -def checkCacheValid(file, life): - import os.path, time - try: - cDate = os.path.getctime(file) - if time.time() - cDate < life: - print '%s valid' % file - return True - else: - print '%s expired' % file - return False - except OSError as e: - print 'Failed to open %s' % file - print e - return False - -def getFile(filepath): - data = None - with open(filepath) as file: - data = file.read() - - return data - -def saveFile(path, data): - with open(path, 'wb') as file: - file.write(data) - - return data - -def replaceAll(text, dic): - for i, j in dic.iteritems(): - text = text.replace(i, j) - return text - -def blackfilter(stringToTest, filterList): - if len(filterList) != 0: - for v in filterList: - if stringToTest.find(v['name']) != -1: - return False - - return True \ No newline at end of file diff --git a/cachescripts/wfsCapabilities.py b/cachescripts/wfsCapabilities.py deleted file mode 100644 index bae0493b..00000000 --- a/cachescripts/wfsCapabilities.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python - -import os -import utils -import parse -import sys - -sys.path.append(os.path.join(sys.path[0],'..','config')) -print sys.path -# WFS Server list -import wfsServers - -# Extra layer info -import wfsLayerTags - -# Change the python working directory to be where this script is located -abspath = os.path.abspath(__file__) -dname = os.path.dirname(abspath) -os.chdir(dname) - -CACHELIFE = 3600 #3600 # cache time in seconds, 1 hour cache -LAYERCACHEPATH = "../html/cache/layers/" -SERVERCACHEPATH = "../html/cache/" -MASTERCACHEPATH = "../html/cache/wfsMasterCache" -FILEEXTENSIONJSON = ".json" -FILEEXTENSIONXML = ".xml" -GET_CAPABILITES_PARAMS = "SERVICE=WFS&REQUEST=GetCapabilities&VERSION=1.0.0" -SERVERLIST = "wfsServerList.csv" -NAMESPACE = '{http://www.opengis.net/wfs}' -XLINKNAMESPACE = '{http://www.w3.org/1999/xlink}' - -dirtyCaches = [] # List of caches that may need recreating if they don't get created the first time -extraInfo = wfsLayerTags.layers - - -def touch(fname, times=None): - import os - with file(fname, 'a'): - os.utime(fname, times) - -# Touch master cache so that it doesn't 404 if no data -touch(MASTERCACHEPATH + '.json') - -def createCache(server, xml): - import json - import urllib - - print 'Creating caches...' - subMasterCache = {} - subMasterCache['layers'] = [] - tags = None - - cleanServerName = server['name'].replace('/', '-') - cleanLayerName = server['name'] - - if server['params'] and server['params']['TypeName']: - cleanLayerName = utils.replaceAll(server['params']['TypeName'], {':': '-', '\\': '-'}) - - if server['name'] in extraInfo: - tags = extraInfo[server['name']] - - # Layer iter - - if 'passthrough' in server['options'] and server['options']['passthrough']: - if server['params']: - encodedParams = urllib.urlencode(server['params']) - subMasterCache['url'] = server['url'] + encodedParams - else: - subMasterCache['url'] = server['url'] - - layer = { - 'name': cleanLayerName, - 'options': server['options'] - } - - if tags: - layer['tags'] = tags - - subMasterCache['layers'].append(layer) - - elif xml != None: - # Save out the xml file for later - utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONXML, xml) - times = processTimes(server, xml) - - layer = { - 'name': cleanLayerName, - 'options': server['options'], - 'times': times - } - - if tags: - layer['tags'] = tags - - # Save out layer cache - utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) - subMasterCache['layers'].append(layer) - subMasterCache['url'] = server['url'] - - subMasterCache['serverName'] = server['name'] - - print 'Cache creation complete...' - - # Return and save out the cache for this server - return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache)) - - -def processTimes(server, xml): - from time import mktime, strptime - data = parse.process(xml,tag=server['options']['tag']) - - if (len(data) == 0): - return None - - sname = server['params']['propertyName'] - - data = data[0][data[0].keys()[0]] - [int(mktime(strptime(x[sname.lower()].split('T')[0],"%Y-%m-%d"))) for x in data] - - return data - -utils.updateCaches(createCache, dirtyCaches, wfsServers.servers, SERVERCACHEPATH, MASTERCACHEPATH, CACHELIFE) diff --git a/cachescripts/wmsCapabilities.py b/cachescripts/wmsCapabilities.py deleted file mode 100644 index bd8e3fd8..00000000 --- a/cachescripts/wmsCapabilities.py +++ /dev/null @@ -1,329 +0,0 @@ -#!/usr/bin/env python - -import os -import utils -import sys -import re -import dateutil.parser -import calendar -import json - -sys.path.append(os.path.join(sys.path[0],'..','config')) -sys.path.append(os.path.join(sys.path[0],'..','config/user_layers')) -# server list -import PML_RSG_THREDDS_Data_Server as wmsLayers -from providers import providers -from legendSettings import legendSettings as defaultLegendSettings - -# Change the python working directory to be where this script is located -abspath = os.path.abspath(__file__) -dname = os.path.dirname(abspath) -os.chdir(dname) - -CACHELIFE = 3600 #3600 # cache time in seconds, 1 hour cache -LAYERCACHEPATH = "../html/cache/layers/" -SERVERCACHEPATH = "../html/cache/" -MASTERCACHEPATH = "../html/cache/mastercache" -FILEEXTENSIONJSON = ".json" -FILEEXTENSIONXML = ".xml" - -WMS_NAMESPACE = '{http://www.opengis.net/wms}' -WCS_NAMESPACE = '{http://www.opengis.net/wcs}' -GML_NAMESPACE = '{http://www.opengis.net/gml}' -XLINKNAMESPACE = '{http://www.w3.org/1999/xlink}' - -MARKDOWN_DIR = '../markdown' -MARKDOWN_SUFFIX = '.md' - -PRODUCTFILTER = "productFilter.csv" -LAYERFILTER = "layerFilter.csv" - -dirtyCaches = [] # List of caches that may need recreating -#extraInfo = wmsLayerTags.layers - - - -def findCoverageNode( coverageRoot, name ): - possibleNodes = coverageRoot.findall('./%sCoverageOffering' % (WCS_NAMESPACE)) - for node in possibleNodes: - coverageName = node.find('./%sname' % (WCS_NAMESPACE)).text - if( coverageName == name ): - return node - return None - -def removeNonUTF8( text ): - - # Horrible way to remove non UTF-8 characters but if you can find a better way please go head - invalidCharacters = re.sub( '([\x00-\x7F]|[\xC2-\xDF][\x80-\xBF]|\xE0[\xA0-\xBF][\x80-\xBF]|[\xE1-\xEC][\x80-\xBF]{2}|\xED[\x80-\x9F][\x80-\xBF]|[\xEE-\xEF][\x80-\xBF]{2}|\xF0[\x90-\xBF][\x80-\xBF]{2}|[\xF1-\xF3][\x80-\xBF]{3}|\xF4[\x80-\x8F][\x80-\xBF]{2})', '', text ) - if( len( invalidCharacters ) == 0 ): - return text - - invalidCharacters = set( invalidCharacters ) - invalidCharacters = "".join(invalidCharacters) - invalidRegex = "[" + invalidCharacters + "]" - return re.sub( invalidRegex, '', text ) - - -def createCache(server, capabilitiesXML, coverageXML): - - #import xml.etree.ElementTree as ET - #from xml.etree.ElementTree import XMLParser - from lxml import etree as ET - - import json - - # Save out the xml file for later - utils.saveFile(SERVERCACHEPATH + server['name'] + '-GetCapabilities' + FILEEXTENSIONXML, capabilitiesXML) - utils.saveFile(SERVERCACHEPATH + server['name'] + '-DescribeCoverage' + FILEEXTENSIONXML, coverageXML) - - print 'Creating caches...' - subMasterCache = {} - subMasterCache['server'] = {} - - #parse = XMLParser( encoding="UTF-8" ) - - # Parse the GetCapabilities XML - #root = ET.XML(capabilitiesXML, parser=parse) - root = ET.fromstring( removeNonUTF8(capabilitiesXML) ) - - # Parse the DescribeCoverage XML - coverageRoot = ET.fromstring( removeNonUTF8(coverageXML) ) - - if root.find('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE,WMS_NAMESPACE,WMS_NAMESPACE)) == None: - dirtyCaches.append(server) - return - - for service in root.findall('./%sService' % (WMS_NAMESPACE)): - serverTitle = service.find('./%sTitle' % (WMS_NAMESPACE)).text - serverAbstract = service.find('./%sAbstract' % (WMS_NAMESPACE)).text if service.find('./%sAbstract' % (WMS_NAMESPACE)) is not None else None - - for product in root.findall('./%sCapability/%sLayer/%sLayer' % (WMS_NAMESPACE,WMS_NAMESPACE,WMS_NAMESPACE)): - sensorName = product.find('./%sTitle' % (WMS_NAMESPACE)).text - - if utils.blackfilter(sensorName, productBlackList): - sensorName = utils.replaceAll(sensorName, {' ':'_', '(':'_', ')':'_', '/':'_'}) - print sensorName - layers = [] - - for layer in product.findall('./%sLayer' % (WMS_NAMESPACE)): - name = layer.find('./%sName' % (WMS_NAMESPACE)).text - title = layer.find('./%sTitle' % (WMS_NAMESPACE)).text - abstract = layer.find('./%sAbstract' % (WMS_NAMESPACE)).text - temporal = False - - - if name not in server['indicators']: - print "NOTICE: Indicator '" + name + "' found on WMS server but not in local config file, ignoring." - continue - - #Find the CoverageOffering from DescribeCoverage - - - coverage = findCoverageNode( coverageRoot, name ) - if coverage == None: - print serverTitle + " " + name + " could not be found in DescribeCoverage. Not including." - continue - - offsetVectorsArray = coverage.findall( './/%soffsetVector' % (GML_NAMESPACE) ) - offsetVectors = [] - for i in range( 0 , len( offsetVectorsArray )): - offsetVectors.append(float(offsetVectorsArray[i].text.split(" ")[i])) - - exGeographicBoundingBox = {"WestBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%swestBoundLongitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, - "EastBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%seastBoundLongitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, - "SouthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%ssouthBoundLatitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text, - "NorthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%snorthBoundLatitude' % (WMS_NAMESPACE,WMS_NAMESPACE)).text} - - boundingBox = {"CRS": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('CRS'), - "MinX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('minx'), - "MaxX": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxx'), - "MinY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('miny'), - "MaxY": layer.find('./%sBoundingBox' % (WMS_NAMESPACE)).get('maxy')} - - dimensions = createDimensionsArray(layer, server) - temporal = dimensions['temporal'] - styles = createStylesArray(layer) - - if server['options']['providerShortTag'] not in providers: - raise Exception("Provider shortTag " + server['options']['providerShortTag'] + " was not in the 'providers.py' file") - - # Get the default details for the provider - providerDetails = providers[ server['options']['providerShortTag'] ] - if (layerHasMoreInfo(server['options']['providerShortTag'])): - moreProviderInfo = True - else: - moreProviderInfo = False - - if 'providerDetails' in server['indicators'][name]: - # Overwrite any details with the indicator specific details - for i in server['indicators'][name]['providerDetails']: - providerDetails[ i ] = server['indicators'][name]['providerDetails'][ i ] - - if 'LegendSettings' in server['indicators'][name]: - legendSettings = server['indicators'][name]['LegendSettings'] - else: - legendSettings = defaultLegendSettings - - #import pprint - #pprint.pprint(server['indicators'][name]) - #print '-'*40 - - if utils.blackfilter(name, layerBlackList): - if layerHasMoreInfo(server['indicators'][name]['niceName']): - moreIndicatorInfo = True - else: - moreIndicatorInfo = False - masterLayer = {"Name": name, - "Title": title, - "Abstract": abstract, - "FirstDate": dimensions['firstDate'], - "LastDate": dimensions['lastDate'], - "OffsetVectors": offsetVectors, - "ProviderDetails": providerDetails, - "EX_GeographicBoundingBox": exGeographicBoundingBox, - "MoreIndicatorInfo" : moreIndicatorInfo, - "MoreProviderInfo" : moreProviderInfo, - "LegendSettings": legendSettings } - - if name in server['indicators']: - masterLayer['tags'] = server['indicators'][name] - - # Data to be sent in the mastercache - layers.append(masterLayer) - - # Data to be saved out - layer = {#"Name": name, - #"wmsURL": server['wmsURL'], - #"wcsURL": server['wcsURL'], - #"Title": title, - #"Abstract": abstract, - "FirstDate": dimensions['firstDate'], - "LastDate": dimensions['lastDate'], - "OffsetVectors": offsetVectors, - #"EX_GeographicBoundingBox": exGeographicBoundingBox, - "BoundingBox": boundingBox, - "Dimensions": dimensions['dimensions'], - "Styles": styles } - - cleanServerName = server['name'].replace('/', '-') - cleanLayerName = name.replace('/', '-') - - # Save out layer cache - utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) - - subMasterCache['server'][sensorName] = layers - - subMasterCache['options'] = server['options'] - subMasterCache['wmsURL'] = server['services']['wms']['url'] - if set(('wcs')).issubset(server['services']): # Confirms that the WCS information has been given. - subMasterCache['wcsURL'] = server['services']['wcs']['url'] - subMasterCache['serverName'] = server['name'] - - print 'Cache creation complete...' - - # Return and save out the cache for this server - return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache)) - -def layerHasMoreInfo( layerNiceName ): - print os.getcwd() - print "testing %s for more info" % layerNiceName - for root, dirs, files in os.walk(MARKDOWN_DIR): - for _file in files: - #print _file - if _file.lower() == '%s%s' % (layerNiceName.lower(), MARKDOWN_SUFFIX): - print 'found %s file' % layerNiceName - return True - return False - - -def isoToTimestamp( strDate ): - dt = dateutil.parser.parse(strDate) - return calendar.timegm(dt.utctimetuple()) - - -def compareDateStrings( a, b ): - if isoToTimestamp(a) > isoToTimestamp(b): - return 1 - else: - return -1 - -def createDimensionsArray(layer, server): - import string - dimensions = {} - dimensions['dimensions'] = [] - dimensions['temporal'] = False - dimensions['firstDate'] = None - dimensions['lastDate'] = None - - # Iterate over each dimension - for dimension in layer.findall('./%sDimension' % (WMS_NAMESPACE)): - dimensionList = dimension.text.split(',') - dimensionValue = dimension.text.strip() - - # Tidy up temporal layer date-time values - if dimension.get('name') == 'time': - dimensions['temporal'] = True - # The following array will be built up with modified values as needed - newDates = [] - # Iterate through the date-time dimension array looking for errors and/or ISO8601 date ranges - for v in dimensionList: - dateTime = v.strip() - newDates.append(dateTime) - # Is there a date range present? - usually datetime/datetime/interval - if dateTime.find('/'): - debugString = "Date range found [" + dateTime + "] for layer " + server['name'] - range = dateTime.split('/') - # Check for corrupted or unexpected data range format and remove it if found - if len(range) == 3: - dateTimeRange = genDateRange(range[0], range[1], range[2]) - newDates.pop() - newDates = newDates + dateTimeRange - - # Is there a corrupted date present - if so, remove it - if dateTime.find('-') != 4: - newDates.pop() - - newDates.sort( compareDateStrings ) - - if len(newDates) > 0: - dimensions['firstDate'] = newDates[0].strip()[:10] - dimensions['lastDate'] = newDates[len(newDates) - 1].strip()[:10] - dimensionValue = string.join(newDates, ',').strip() - - dimensions['dimensions'].append({'Name': dimension.get('name'), - 'Units': dimension.get('units'), - 'Default': dimension.get('default'), - 'Value': dimensionValue}) - return dimensions - -def createStylesArray(layer): - styles = [] - - for style in layer.findall('./%sStyle' % (WMS_NAMESPACE)): - styles.append({"Name": style.find('./%sName' % (WMS_NAMESPACE)).text, - "Abstract": style.find('./%sAbstract' % (WMS_NAMESPACE)).text, - "LegendURL": style.find('./%sLegendURL/%sOnlineResource' % (WMS_NAMESPACE,WMS_NAMESPACE)).get('%shref' % (XLINKNAMESPACE)), - "Width": style.find('./%sLegendURL' % (WMS_NAMESPACE)).get('width'), - "Height": style.find('./%sLegendURL' % (WMS_NAMESPACE)).get('height')}) - - return styles - -def genDateRange(startDate, endDate, interval): - import isodate # https://github.com/gweis/isodate - - dates = [] - dateFrom = isodate.parse_datetime(startDate) - dateTo = isodate.parse_datetime(endDate) - dateInterval = isodate.parse_duration(interval) - currentDate = dateFrom - - while currentDate <= dateTo: - datetime = isodate.datetime_isoformat(currentDate) - dates.append(datetime) - currentDate = currentDate + dateInterval - - return dates - -layerBlackList = utils.csvToList(LAYERFILTER) -productBlackList = utils.csvToList(PRODUCTFILTER) -utils.updateCaches(createCache, dirtyCaches, wmsLayers.layers, SERVERCACHEPATH, MASTERCACHEPATH, CACHELIFE) diff --git a/cachescripts/wmsCapabilities_2_6_6.py b/cachescripts/wmsCapabilities_2_6_6.py deleted file mode 100644 index 6843d4f3..00000000 --- a/cachescripts/wmsCapabilities_2_6_6.py +++ /dev/null @@ -1,212 +0,0 @@ -#!/usr/bin/env python - -import os -import utils -import sys - -sys.path.append(os.path.join(sys.path[0],'..','config')) -# server list -import wmsServers -# extra info for layers -import wmsLayerTags - -# Change the python working directory to be where this script is located -abspath = os.path.abspath(__file__) -dname = os.path.dirname(abspath) -os.chdir(dname) - -CACHELIFE = 3600 #3600 # cache time in seconds, 1 hour cache -LAYERCACHEPATH = "../html/cache/layers/" -SERVERCACHEPATH = "../html/cache/" -MASTERCACHEPATH = "../html/cache/mastercache" -FILEEXTENSIONJSON = ".json" -FILEEXTENSIONXML = ".xml" -GET_CAPABILITES_PARAMS = "SERVICE=WMS&REQUEST=GetCapabilities&VERSION=1.3.0" - -NAMESPACE = '{http://www.opengis.net/wms}' -XLINKNAMESPACE = '{http://www.w3.org/1999/xlink}' - -PRODUCTFILTER = "productFilter.csv" -LAYERFILTER = "layerFilter.csv" - -dirtyCaches = [] # List of caches that may need recreating -extraInfo = wmsLayerTags.layers - -def createCache(server, xml): - import xml.etree.ElementTree as ET - import json - - # Save out the xml file for later - utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONXML, xml) - - print 'Creating caches...' - subMasterCache = {} - subMasterCache['server'] = {} - - #ET.register_namespace(NAMESPACE, NAMESPACE) - root = ET.fromstring(xml) - - if root.find('./%sCapability/%sLayer/%sLayer' % (NAMESPACE,NAMESPACE,NAMESPACE)) == None: - dirtyCaches.append(server) - return - - for service in root.findall('./%sService' % (NAMESPACE)): - serverTitle = service.find('./%sTitle' % (NAMESPACE)).text - serverAbstract = service.find('./%sAbstract' % (NAMESPACE)).text if service.find('./%sAbstract' % (NAMESPACE)) is not None else None - - for product in root.findall('./%sCapability/%sLayer/%sLayer' % (NAMESPACE,NAMESPACE,NAMESPACE)): - sensorName = product.find('./%sTitle' % (NAMESPACE)).text - - if utils.blackfilter(sensorName, productBlackList): - sensorName = utils.replaceAll(sensorName, {' ':'_', '(':'_', ')':'_', '/':'_'}) - print sensorName - layers = [] - - for layer in product.findall('./%sLayer' % (NAMESPACE)): - name = layer.find('./%sName' % (NAMESPACE)).text - title = layer.find('./%sTitle' % (NAMESPACE)).text - abstract = layer.find('./%sAbstract' % (NAMESPACE)).text - temporal = False - - exGeographicBoundingBox = {"WestBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%swestBoundLongitude' % (NAMESPACE,NAMESPACE)).text, - "EastBoundLongitude": layer.find('./%sEX_GeographicBoundingBox/%seastBoundLongitude' % (NAMESPACE,NAMESPACE)).text, - "SouthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%ssouthBoundLatitude' % (NAMESPACE,NAMESPACE)).text, - "NorthBoundLatitude": layer.find('./%sEX_GeographicBoundingBox/%snorthBoundLatitude' % (NAMESPACE,NAMESPACE)).text} - - boundingBox = {"CRS": layer.find('./%sBoundingBox' % (NAMESPACE)).get('CRS'), - "MinX": layer.find('./%sBoundingBox' % (NAMESPACE)).get('minx'), - "MaxX": layer.find('./%sBoundingBox' % (NAMESPACE)).get('maxx'), - "MinY": layer.find('./%sBoundingBox' % (NAMESPACE)).get('miny'), - "MaxY": layer.find('./%sBoundingBox' % (NAMESPACE)).get('maxy')} - - dimensions = createDimensionsArray(layer, server) - temporal = dimensions['temporal'] - styles = createStylesArray(layer) - - - - if utils.blackfilter(name, layerBlackList): - - masterLayer = {"Name": name, - "Title": title, - "Abstract": abstract, - "FirstDate": dimensions['firstDate'], - "LastDate": dimensions['lastDate'], - "EX_GeographicBoundingBox": exGeographicBoundingBox } - - if server['name'] in extraInfo: - if name in extraInfo[server['name']]: - masterLayer['tags'] = extraInfo[server['name']][name] - - # Data to be sent in the mastercache - layers.append(masterLayer) - - # Data to be saved out - layer = {#"Name": name, - #"wmsURL": server['wmsURL'], - #"wcsURL": server['wcsURL'], - #"Title": title, - #"Abstract": abstract, - "FirstDate": dimensions['firstDate'], - "LastDate": dimensions['lastDate'], - #"EX_GeographicBoundingBox": exGeographicBoundingBox, - "BoundingBox": boundingBox, - "Dimensions": dimensions['dimensions'], - "Styles": styles } - - cleanServerName = server['name'].replace('/', '-') - cleanLayerName = name.replace('/', '-') - - # Save out layer cache - utils.saveFile(LAYERCACHEPATH + cleanServerName + "_" + cleanLayerName + FILEEXTENSIONJSON, json.dumps(layer)) - - subMasterCache['server'][sensorName] = layers - - subMasterCache['options'] = server['options'] - subMasterCache['wmsURL'] = server['url'] - subMasterCache['wcsURL'] = server['wcsurl'] - subMasterCache['serverName'] = server['name'] - - print 'Cache creation complete...' - - # Return and save out the cache for this server - return utils.saveFile(SERVERCACHEPATH + server['name'] + FILEEXTENSIONJSON, json.dumps(subMasterCache)) - -def createDimensionsArray(layer, server): - import string - dimensions = {} - dimensions['dimensions'] = [] - dimensions['temporal'] = False - dimensions['firstDate'] = None - dimensions['lastDate'] = None - - # Iterate over each dimension - for dimension in layer.findall('./%sDimension' % (NAMESPACE)): - dimensionList = dimension.text.split(',') - dimensionValue = dimension.text.strip() - - # Tidy up temporal layer date-time values - if dimension.get('name') == 'time': - dimensions['temporal'] = True - # The following array will be built up with modified values as needed - newDates = [] - # Iterate through the date-time dimension array looking for errors and/or ISO8601 date ranges - for v in dimensionList: - dateTime = v.strip() - newDates.append(dateTime) - # Is there a date range present? - usually datetime/datetime/interval - if dateTime.find('/'): - debugString = "Date range found [" + dateTime + "] for layer " + server['name'] - range = dateTime.split('/') - # Check for corrupted or unexpected data range format and remove it if found - if len(range) == 3: - dateTimeRange = genDateRange(range[0], range[1], range[2]) - newDates.pop() - newDates = newDates + dateTimeRange - - # Is there a corrupted date present - if so, remove it - if dateTime.find('-') != 4: - newDates.pop() - - if len(newDates) > 0: - dimensions['firstDate'] = newDates[0].strip()[:10] - dimensions['lastDate'] = newDates[len(newDates) - 1].strip()[:10] - dimensionValue = string.join(newDates, ',').strip() - - dimensions['dimensions'].append({'Name': dimension.get('name'), - 'Units': dimension.get('units'), - 'Default': dimension.get('default'), - 'Value': dimensionValue}) - return dimensions - -def createStylesArray(layer): - styles = [] - - for style in layer.findall('./%sStyle' % (NAMESPACE)): - styles.append({"Name": style.find('./%sName' % (NAMESPACE)).text, - "Abstract": style.find('./%sAbstract' % (NAMESPACE)).text, - "LegendURL": style.find('./%sLegendURL/%sOnlineResource' % (NAMESPACE,NAMESPACE)).get('%shref' % (XLINKNAMESPACE)), - "Width": style.find('./%sLegendURL' % (NAMESPACE)).get('width'), - "Height": style.find('./%sLegendURL' % (NAMESPACE)).get('height')}) - - return styles - -def genDateRange(startDate, endDate, interval): - import isodate # https://github.com/gweis/isodate - - dates = [] - dateFrom = isodate.parse_datetime(startDate) - dateTo = isodate.parse_datetime(endDate) - dateInterval = isodate.parse_duration(interval) - currentDate = dateFrom - - while currentDate <= dateTo: - datetime = isodate.datetime_isoformat(currentDate) - dates.append(datetime) - currentDate = currentDate + dateInterval - - return dates - -layerBlackList = utils.csvToList(LAYERFILTER) -productBlackList = utils.csvToList(PRODUCTFILTER) -utils.updateCaches(createCache, dirtyCaches, wmsServers.servers, SERVERCACHEPATH, MASTERCACHEPATH, CACHELIFE) diff --git a/collaboration/views/authorised.jade b/collaboration/views/authorised.jade index a6e01f99..531c42bc 100644 --- a/collaboration/views/authorised.jade +++ b/collaboration/views/authorised.jade @@ -2,7 +2,7 @@ doctype html html(lang="en") head title= pageTitle - link(rel='stylesheet', href='/css/GISPortal.css') + link(rel='stylesheet', href='/css/GISportal.css') script(type='text/javascript' src='//ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js') script(type='text/javascript') | var p = window.opener.top; diff --git a/collaboration/views/index.jade b/collaboration/views/index.jade index b8a4c2f8..28024aaa 100644 --- a/collaboration/views/index.jade +++ b/collaboration/views/index.jade @@ -2,7 +2,7 @@ doctype html html(lang="en") head title= pageTitle - link(rel='stylesheet', href='/css/GISPortal.css') + link(rel='stylesheet', href='/css/GISportal.css') script(type='text/javascript' src='//ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js') script(type='text/javascript') |$( document ).ready(function() { diff --git a/middleware/portalflask/views/proxy.py b/middleware/portalflask/views/proxy.py index 9d5ad160..67900654 100644 --- a/middleware/portalflask/views/proxy.py +++ b/middleware/portalflask/views/proxy.py @@ -16,7 +16,6 @@ LAYERCACHEPATH = "../../../html/cache/layers/" USERCACHEPREFIX = "user_" #DO NOT CHANGE YET, USE CONFIG IN THE END! MASTERCACHEPATH = "../../../html/cache" -BASEUSERCACHEPATH = MASTERCACHEPATH +"/" + USERCACHEPREFIX FILEEXTENSIONJSON = ".json" FILEEXTENSIONXML = ".xml" @@ -666,7 +665,7 @@ def remove_server_cache(): os.makedirs(deleted_cache_path) #if the user_deleted_cache path does not exist it is created. os.rename(original_path, new_path) - return clean_filename + return CURRENT_PATH#clean_filename """ diff --git a/package.json b/package.json index 830d95be..8cdf393d 100644 --- a/package.json +++ b/package.json @@ -12,5 +12,8 @@ "autoprefixer": "~6.1.2", "pixrem": "~3.0.0", "grunt-contrib-cssmin": "~0.14.0" + }, + "dependencies": { + "express": "~4.13.3" } } diff --git a/src/js/addLayersForm.js b/src/js/addLayersForm.js index 6e23774f..f964bd67 100644 --- a/src/js/addLayersForm.js +++ b/src/js/addLayersForm.js @@ -288,7 +288,7 @@ gisportal.addLayersForm.displayForm = function(total_pages, current_page, form_d } } } - // Adds each layer to the + // Adds each layer to the cache for(layer in gisportal.addLayersForm.layers_list){ // As long as it is to be included if(gisportal.addLayersForm.layers_list[layer]['include']){ @@ -327,7 +327,6 @@ gisportal.addLayersForm.displayForm = function(total_pages, current_page, form_d }, error: function(e){ $.notify("Error submitting this information, please try again", "error"); - console.log("Error " + e.Message); } }); // Returns so that they are only sent once. diff --git a/src/js/configure.js b/src/js/configure.js index 87ac743c..5d27d2ef 100644 --- a/src/js/configure.js +++ b/src/js/configure.js @@ -377,7 +377,7 @@ gisportal.configurePanel.renderTagsAsSelectlist = function() { // set the index to 0, or if a defaultCategory is set use that instead; setting the value triggers the rendering of the drop down lists to filter by var defaultValue = { index: 0 }; var defaultCategory = gisportal.config.defaultCategory - if (typeof(defaultCategory) !== 'undefined' && defaultCategory && defaultCategory in gisportal.config.browseCategories) { + if (typeof(defaultCategory) !== 'undefined' && defaultCategory && defaultCategory in gisportal.browseCategories) { defaultValue = { value: defaultCategory }; } $('#js-category-filter-select').ddslick('select', defaultValue); diff --git a/src/js/gisportal.js b/src/js/gisportal.js index c15257f9..63a61eca 100644 --- a/src/js/gisportal.js +++ b/src/js/gisportal.js @@ -111,7 +111,7 @@ gisportal.loadLayers = function() { // Get WMS cache var user_info = gisportal.userPermissions.this_user_info $.ajax({ - url: '/service/get_cache?username=' + user_info.username + '&permission=' + user_info.permission + '&domain=' + gisportal.userPermissions.domainName, + url: 'http://127.0.0.1:1310/get_cache?username=' + user_info.username + '&permission=' + user_info.permission + '&domain=' + gisportal.userPermissions.domainName, dataType: 'json', success: gisportal.initWMSlayers, error: function(e){