From 8cf5de4094ae08648b9a985fa22f0c4a906adec0 Mon Sep 17 00:00:00 2001 From: adam-collins Date: Mon, 20 Feb 2017 09:22:42 +1000 Subject: [PATCH] Add disk caching for faster restart. Do not remove cached data when a service is unavailable during a cache refresh. --- .../au/org/ala/biocache/config/AppConfig.java | 8 + .../org/ala/biocache/dao/SearchDAOImpl.java | 49 +++-- .../au/org/ala/biocache/dto/FacetThemes.java | 5 +- .../biocache/service/AlaLayersService.java | 31 ++- .../org/ala/biocache/service/AuthService.java | 45 +++-- .../ala/biocache/service/ListsService.java | 18 +- .../biocache/service/LoggerRestService.java | 30 ++- .../biocache/service/RestartDataService.java | 181 ++++++++++++++++++ .../biocache/service/SpeciesImageService.java | 20 +- .../ala/biocache/util/CollectionsCache.java | 67 +++++-- .../org/ala/biocache/util/DownloadFields.java | 31 +-- .../biocache/web/OccurrenceController.java | 2 - src/main/resources/log4j.xml | 3 + 13 files changed, 397 insertions(+), 93 deletions(-) create mode 100644 src/main/java/au/org/ala/biocache/service/RestartDataService.java diff --git a/src/main/java/au/org/ala/biocache/config/AppConfig.java b/src/main/java/au/org/ala/biocache/config/AppConfig.java index 6c37773ed..295108889 100644 --- a/src/main/java/au/org/ala/biocache/config/AppConfig.java +++ b/src/main/java/au/org/ala/biocache/config/AppConfig.java @@ -1,5 +1,6 @@ package au.org.ala.biocache.config; +import au.org.ala.biocache.service.RestartDataService; import au.org.ala.biocache.service.SpeciesLookupIndexService; import au.org.ala.biocache.service.SpeciesLookupRestService; import au.org.ala.biocache.service.SpeciesLookupService; @@ -53,6 +54,13 @@ public class AppConfig { protected Boolean facetDefault; + + @Value("${restart.data.dir:/tmp}") + public void setDatabase(String dir) { + RestartDataService.dir = dir; + } + + protected SpeciesLookupService getSpeciesLookupRestService() { logger.info("Initialising rest-based species lookup services."); SpeciesLookupRestService service = new SpeciesLookupRestService(); diff --git a/src/main/java/au/org/ala/biocache/dao/SearchDAOImpl.java b/src/main/java/au/org/ala/biocache/dao/SearchDAOImpl.java index eb73a721f..6d73242a9 100644 --- a/src/main/java/au/org/ala/biocache/dao/SearchDAOImpl.java +++ b/src/main/java/au/org/ala/biocache/dao/SearchDAOImpl.java @@ -29,6 +29,7 @@ import au.org.ala.biocache.util.thread.EndemicCallable; import au.org.ala.biocache.vocab.ErrorCode; import au.org.ala.biocache.writer.*; +import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.googlecode.ehcache.annotations.Cacheable; import org.apache.commons.io.output.ByteArrayOutputStream; @@ -59,6 +60,7 @@ import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; +import javax.annotation.PostConstruct; import javax.inject.Inject; import javax.servlet.ServletOutputStream; import java.io.IOException; @@ -85,6 +87,7 @@ * @author "Nick dos Remedios " * @see au.org.ala.biocache.dao.SearchDAO */ + @Component("searchDao") public class SearchDAOImpl implements SearchDAO { @@ -291,12 +294,11 @@ public class SearchDAOImpl implements SearchDAO { @Value("${media.dir:/data/biocache-media/}") public static String biocacheMediaDir = "/data/biocache-media/"; - private volatile Set indexFields = null; - private volatile Map indexFieldMap = null; - + private volatile Set indexFields = RestartDataService.get(this, "indexFields", new TypeReference>(){}, TreeSet.class); + private volatile Map indexFieldMap = RestartDataService.get(this, "indexFieldMap", new TypeReference>(){}, HashMap.class); private final Map rangeFieldCache = new HashMap(); - private Set authIndexFields = null; + private Set authIndexFields = new HashSet(); /** * SOLR index version for client app caching use. @@ -367,7 +369,11 @@ private SolrServer initServer() { } // TODO: There was a note about possible issues with the following two lines Set indexedFields = getIndexedFields(); - downloadFields = new DownloadFields(indexedFields, messageSource, layersService); + if (downloadFields == null) { + downloadFields = new DownloadFields(indexedFields, messageSource, layersService); + } else { + downloadFields.update(indexedFields); + } } catch (Exception ex) { logger.error("Error initialising embedded SOLR server: " + ex.getMessage(), ex); } @@ -377,25 +383,33 @@ private SolrServer initServer() { return result; } + @PostConstruct + public void init() { + initServer(); + } + public Set getAuthIndexFields() { - if (authIndexFields == null) { + if (authIndexFields.size() == 0) { //set up the hash set of the fields that need to have the authentication service substitute if (logger.isDebugEnabled()) { logger.debug("Auth substitution fields to use: " + authServiceFields); } - authIndexFields = new java.util.HashSet(); - CollectionUtils.mergeArrayIntoCollection(authServiceFields.split(","), authIndexFields); + Set set = new java.util.HashSet(); + CollectionUtils.mergeArrayIntoCollection(authServiceFields.split(","), set); + authIndexFields = set; } return authIndexFields; } public void refreshCaches() { + initServer(); + collectionCache.updateCache(); //empties the range cache to allow the settings to be recalculated. rangeFieldCache.clear(); try { //update indexed fields - downloadFields = new DownloadFields(getIndexedFields(true), messageSource, layersService); + downloadFields.update(getIndexedFields(true)); } catch (Exception e) { logger.error("Unable to refresh cache.", e); } @@ -3718,16 +3732,19 @@ public Set getIndexedFields() throws Exception { @Cacheable(cacheName = "getIndexedFields") public Set getIndexedFields(boolean update) throws Exception { Set result = indexFields; - if (result == null || update) { + if (result.size() == 0 || update) { synchronized (solrIndexVersionLock) { result = indexFields; - if (result == null || update) { - result = indexFields = getIndexFieldDetails(null); - Map resultMap = new HashMap(); - for (IndexFieldDTO field : result) { - resultMap.put(field.getName(), field); + if (result.size() == 0 || update) { + result = getIndexFieldDetails(null); + if (result != null && result.size() > 0) { + Map resultMap = new HashMap(); + for (IndexFieldDTO field : result) { + resultMap.put(field.getName(), field); + } + indexFields = result; + indexFieldMap = resultMap; } - indexFieldMap = resultMap; } } } diff --git a/src/main/java/au/org/ala/biocache/dto/FacetThemes.java b/src/main/java/au/org/ala/biocache/dto/FacetThemes.java index 437c7bc05..4daf961a1 100644 --- a/src/main/java/au/org/ala/biocache/dto/FacetThemes.java +++ b/src/main/java/au/org/ala/biocache/dto/FacetThemes.java @@ -134,11 +134,12 @@ public static List getAllThemes() { } private void initAllFacets() { - facetsMap.clear(); + LinkedHashMap map = new LinkedHashMap(); for (FacetTheme theme : allThemes) { for(FacetDTO f : theme.getFacets()) { - facetsMap.put(f.getField(), f); + map.put(f.getField(), f); } + facetsMap = map; allFacets = facetsMap.keySet().toArray(new String[]{}); allFacetsLimited = allFacets != null && allFacets.length > facetsDefaultMax ? Arrays.copyOfRange(allFacets, 0, facetsDefaultMax) : allFacets; } diff --git a/src/main/java/au/org/ala/biocache/service/AlaLayersService.java b/src/main/java/au/org/ala/biocache/service/AlaLayersService.java index 62a59f00d..aa44b2ff0 100644 --- a/src/main/java/au/org/ala/biocache/service/AlaLayersService.java +++ b/src/main/java/au/org/ala/biocache/service/AlaLayersService.java @@ -14,6 +14,7 @@ ***************************************************************************/ package au.org.ala.biocache.service; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,8 +42,8 @@ public class AlaLayersService implements LayersService { private final static Logger logger = LoggerFactory.getLogger(AlaLayersService.class); - private Map idToNameMap = new HashMap(); - private List> layers = new ArrayList>(); + private Map idToNameMap = RestartDataService.get(this, "idToNameMap", new TypeReference>(){}, HashMap.class); + private List> layers = RestartDataService.get(this, "layers", new TypeReference>>(){}, ArrayList.class); private Map extraLayers = new HashMap(); //NC 20131018: Allow cache to be disabled via config (enabled by default) @@ -58,9 +59,9 @@ public class AlaLayersService implements LayersService { @Value("${layers.service.url:http://spatial.ala.org.au/ws}") protected String layersServiceUrl; - protected Map distributions = new HashMap(); - protected Map checklists = new HashMap(); - protected Map tracks = new HashMap(); + protected Map distributions = RestartDataService.get(this, "distributions", new TypeReference>(){}, HashMap.class); + protected Map checklists = RestartDataService.get(this, "checklists", new TypeReference>(){}, HashMap.class); + protected Map tracks = RestartDataService.get(this, "tracks", new TypeReference>(){}, HashMap.class); @Inject private RestOperations restTemplate; // NB MappingJacksonHttpMessageConverter() injected by Spring @@ -78,18 +79,28 @@ public Map getLayerNameMap() { @Scheduled(fixedDelay = 43200000)// schedule to run every 12 hours public void refreshCache(){ + if (layers.size() > 0) { + //data exists, no need to wait + wait.countDown(); + } + //initialise the cache based on the values at http://spatial.ala.org.au/ws/fields if(enabled){ //create a tmp map - Map tmpMap = new HashMap(); - layers = restTemplate.getForObject(spatialUrl, List.class); + Map tmpMap = new HashMap(); + List list = restTemplate.getForObject(spatialUrl, List.class); + if (list != null && list.size() > 0) layers = list; for(Map values : layers){ tmpMap.put((String)values.get("id"), (String)values.get("desc")); } - idToNameMap = tmpMap; - distributions = initDistribution("distributions"); - checklists = initDistribution("checklists"); + if (tmpMap.size() > 0) idToNameMap = tmpMap; + + tmpMap = initDistribution("distributions"); + if (tmpMap.size() > 0) distributions = tmpMap; + + tmpMap = initDistribution("checklists"); + if (tmpMap.size() > 0) checklists = tmpMap; //TODO: initialize tracks only when webservices are available //tracks = initDistribution("tracks"); diff --git a/src/main/java/au/org/ala/biocache/service/AuthService.java b/src/main/java/au/org/ala/biocache/service/AuthService.java index e60a61cdc..8ee262b48 100644 --- a/src/main/java/au/org/ala/biocache/service/AuthService.java +++ b/src/main/java/au/org/ala/biocache/service/AuthService.java @@ -14,7 +14,7 @@ ***************************************************************************/ package au.org.ala.biocache.service; -import org.apache.commons.collections.MapUtils; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Scheduled; @@ -59,9 +59,9 @@ public class AuthService { @Value("${caches.auth.enabled:true}") protected Boolean enabled = true; // Keep a reference to the output Map in case subsequent web service lookups fail - protected Map userNamesById = new HashMap(); - protected Map userNamesByNumericIds = new HashMap(); - protected Map userEmailToId = new HashMap(); + protected Map userNamesById = RestartDataService.get(this, "userNamesById", new TypeReference>(){}, HashMap.class); + protected Map userNamesByNumericIds = RestartDataService.get(this, "userNamesByNumericIds", new TypeReference>(){}, HashMap.class); + protected Map userEmailToId = RestartDataService.get(this, "userEmailToId", new TypeReference>(){}, HashMap.class); public AuthService() { logger.info("Instantiating AuthService: " + this); @@ -113,7 +113,8 @@ private void loadMapOfAllUserNamesById() { final String jsonUri = userDetailsUrl + userNamesForIdPath; try { logger.info("authCache requesting: " + jsonUri); - userNamesById = restTemplate.postForObject(jsonUri, null, Map.class); + Map m = restTemplate.postForObject(jsonUri, null, Map.class); + if (m != null && m.size() > 0) userNamesById = m; } catch (Exception ex) { logger.error("RestTemplate error for " + jsonUri + ": " + ex.getMessage(), ex); } @@ -123,7 +124,8 @@ private void loadMapOfAllUserNamesByNumericId() { final String jsonUri = userDetailsUrl + userNamesForNumericIdPath; try { logger.info("authCache requesting: " + jsonUri); - userNamesByNumericIds = restTemplate.postForObject(jsonUri, null, Map.class); + Map m = restTemplate.postForObject(jsonUri, null, Map.class); + if (m != null && m.size() > 0) userNamesByNumericIds = m; } catch (Exception ex) { logger.error("RestTemplate error for " + jsonUri + ": " + ex.getMessage(), ex); } @@ -133,7 +135,8 @@ private void loadMapOfEmailToUserId() { final String jsonUri = userDetailsUrl + userNamesFullPath; try { logger.info("authCache requesting: " + jsonUri); - userEmailToId = restTemplate.postForObject(jsonUri, null, Map.class); + Map m = restTemplate.postForObject(jsonUri, null, Map.class); + if (m != null && m.size() > 0) userEmailToId = m; logger.info("authCache userEmail cache: " + userEmailToId.size()); if(userEmailToId.size()>0){ String email = userEmailToId.keySet().iterator().next(); @@ -148,15 +151,27 @@ private void loadMapOfEmailToUserId() { @Scheduled(fixedDelay = 600000) // schedule to run every 10 min //@Async NC 2013-07-29: Disabled the Async so that we don't get bombarded with calls. public void reloadCaches() { - if(enabled){ - logger.info("Triggering reload of auth user names"); - loadMapOfAllUserNamesById(); - loadMapOfAllUserNamesByNumericId(); - loadMapOfEmailToUserId(); - logger.info("Finished reload of auth user names"); - } else{ - logger.info("Authentication Cache has been disabled"); + Thread thread = new Thread() { + @Override + public void run() { + if(enabled){ + logger.info("Triggering reload of auth user names"); + loadMapOfAllUserNamesById(); + loadMapOfAllUserNamesByNumericId(); + loadMapOfEmailToUserId(); + logger.info("Finished reload of auth user names"); + } else{ + logger.info("Authentication Cache has been disabled"); + } + } + }; + + if (userDetailsPath.length() > 0) { + thread.start(); + } else { + thread.run(); } + } public List getUserRoles(String userId) { diff --git a/src/main/java/au/org/ala/biocache/service/ListsService.java b/src/main/java/au/org/ala/biocache/service/ListsService.java index 175bf0547..52bbfce6e 100644 --- a/src/main/java/au/org/ala/biocache/service/ListsService.java +++ b/src/main/java/au/org/ala/biocache/service/ListsService.java @@ -14,6 +14,7 @@ ***************************************************************************/ package au.org.ala.biocache.service; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Scheduled; @@ -43,7 +44,7 @@ public class ListsService { @Value("${list.tool.url:http://lists.ala.org.au}") private String speciesListUrl; - private Map>> data = new HashMap(); + private Map>> data = RestartDataService.get(this, "data", new TypeReference>>>(){}, HashMap.class); @PostConstruct private void init() { @@ -62,14 +63,25 @@ public Map>> getValues() { @Scheduled(fixedDelay = 43200000)// schedule to run every 12 hours public void refreshCache() { + if (data.size() > 0) { + //data exists, no need to wait + wait.countDown(); + } + if (enabled) { try { + HashMap map = new HashMap(); + Map threatened = restTemplate.getForObject(new URI(speciesListUrl + "/ws/speciesList/?isThreatened=eq:true&isAuthoritative=eq:true"), Map.class); Map invasive = restTemplate.getForObject(new URI(speciesListUrl + "/ws/speciesList/?isInvasive=eq:true&isAuthoritative=eq:true"), Map.class); - data.put("Conservation", getItemsMap(threatened)); - data.put("Invasive", getItemsMap(invasive)); + if ((threatened != null && threatened.size() > 0) || + (invasive != null && invasive.size() > 0)) { + map.put("Conservation", getItemsMap(threatened)); + map.put("Invasive", getItemsMap(invasive)); + data = map; + } } catch (Exception e) { logger.error("failed to get species lists for threatened or invasive species", e); } diff --git a/src/main/java/au/org/ala/biocache/service/LoggerRestService.java b/src/main/java/au/org/ala/biocache/service/LoggerRestService.java index 458567d17..99bece889 100644 --- a/src/main/java/au/org/ala/biocache/service/LoggerRestService.java +++ b/src/main/java/au/org/ala/biocache/service/LoggerRestService.java @@ -14,6 +14,7 @@ ***************************************************************************/ package au.org.ala.biocache.service; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Scheduled; @@ -39,10 +40,10 @@ public class LoggerRestService implements LoggerService { private final static Logger logger = Logger.getLogger(LoggerRestService.class); - private List> loggerReasons; - private List> loggerSources; - private List reasonIds; - private List sourceIds; + private List> loggerReasons = RestartDataService.get(this, "loggerReasons", new TypeReference>>(){}, ArrayList.class); + private List> loggerSources = RestartDataService.get(this, "loggerSources", new TypeReference>>(){}, ArrayList.class); + private List reasonIds = RestartDataService.get(this, "reasonIds", new TypeReference>(){}, ArrayList.class); + private List sourceIds = RestartDataService.get(this, "sourceIds", new TypeReference>(){}, ArrayList.class); //Used to wait for reloadCache() to complete private CountDownLatch initialised = new CountDownLatch(1); @@ -100,13 +101,26 @@ private void init() { */ @Scheduled(fixedDelay = 43200000)// schedule to run every 12 hours public void reloadCache() { + if (loggerReasons.size() > 0) { + //data exists, no need to wait + initialised.countDown(); + } if (enabled) { logger.info("Refreshing the log sources and reasons"); - loggerReasons = getEntities(LoggerType.reasons); - loggerSources = getEntities(LoggerType.sources); + List list; + + list = getEntities(LoggerType.reasons); + if (list.size() > 0) loggerReasons = list; + + list = getEntities(LoggerType.sources); + if (list.size() > 0) loggerSources = list; + //now get the ids - reasonIds = getIdList(loggerReasons); - sourceIds = getIdList(loggerSources); + list = getIdList(loggerReasons); + if (list.size() > 0) reasonIds = list; + + list = getIdList(loggerSources); + if (list.size() > 0) sourceIds = list; } else { if (reasonIds == null) { logger.info("Providing some sensible default values for the log cache"); diff --git a/src/main/java/au/org/ala/biocache/service/RestartDataService.java b/src/main/java/au/org/ala/biocache/service/RestartDataService.java new file mode 100644 index 000000000..837d8634a --- /dev/null +++ b/src/main/java/au/org/ala/biocache/service/RestartDataService.java @@ -0,0 +1,181 @@ +/************************************************************************** + * Copyright (C) 2013 Atlas of Living Australia + * All Rights Reserved. + * + * The contents of this file are subject to the Mozilla Public + * License Version 1.1 (the "License"); you may not use this file + * except in compliance with the License. You may obtain a copy of + * the License at http://www.mozilla.org/MPL/ + * + * Software distributed under the License is distributed on an "AS + * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or + * implied. See the License for the specific language governing + * rights and limitations under the License. + ***************************************************************************/ +package au.org.ala.biocache.service; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.DeserializationFeature; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.commons.io.FileUtils; +import org.apache.log4j.Logger; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.stereotype.Component; + +import javax.annotation.PostConstruct; +import java.io.File; +import java.lang.reflect.Field; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +/** + * Manage dynamic data that is loaded during startup. + */ +@Component("restartDataService") +public class RestartDataService { + + protected static final Logger logger = Logger.getLogger(RestartDataService.class); + + private static final ObjectMapper jsonMapper = new ObjectMapper(); + + private static Map> sources = new ConcurrentHashMap>(); + private static Map values = new ConcurrentHashMap<>(); + + //dir is set by AppConfig + public static String dir; + + @Value("${restart.data.enabled:true}") + public Boolean enabled; + + private Thread loop; + + @PostConstruct + public void init() { + + if (enabled) { + new File(dir).mkdirs(); + + jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + + loop = new Thread() { + @Override + public void run() { + try { + while (true) { + + for (Object rc : sources.keySet()) { + for (String field : sources.get(rc)) { + try { + Field f = rc.getClass().getDeclaredField(field); + if (!f.isAccessible()) { + f.setAccessible(true); + } + Object value = f.get(rc); + + String key = rc.getClass().toString() + field; + if (value != values.get(key)) { + saveToDisk(key, value); + values.remove(key); + values.put(key, value); + } + } catch (Exception e) { + logger.error("error checking value: " + rc.getClass().toString() + field, e); + } + } + } + + sleep(10000); + } + } catch (InterruptedException e) { + } + } + }; + + loop.setName("restart-data-service"); + loop.start(); + } + } + + private static Object loadFromDisk(String key, TypeReference type) { + String path = dir + File.separator + key; + try { + synchronized (jsonMapper) { + //get value + File file = new File(path); + Object diskValue = null; + if (file.exists()) { + jsonMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); + jsonMapper.getTypeFactory().constructType(type); + + try { + diskValue = jsonMapper.readValue(file, type); + } catch (Exception e) {} + + if (diskValue == null) { + //try backup + file = new File(path + ".backup"); + if (file.exists()) { + diskValue = jsonMapper.readValue(file, type); + } + } + + return diskValue; + } + } + } catch (Exception e) { + logger.error("failed to read: " + path + " into type:" + (type != null ? type.toString() : "null"), e); + } + return null; + } + + private void saveToDisk(String key, Object value) { + String path = dir + File.separator + key; + try { + synchronized (jsonMapper) { + File file = new File(path); + if (file.exists()) { + File backup = new File(path + ".backup"); + if (backup.exists()) backup.delete(); + FileUtils.moveFile(file, backup); + } + jsonMapper.writeValue(file, value); + } + logger.debug("writing " + path + " to disk"); + } catch (Exception e) { + logger.error("failed to save to disk: " + path, e); + } + } + + + public static T get(Object parent, String name, TypeReference typeRef, Class defaultValue) { + if (typeRef == null) { + logger.error("defaultValue cannot be null: " + parent.toString() + " " + name); + } + if (sources.containsKey(parent)) { + sources.get(parent).add(name); + } else { + List list = new ArrayList(); + list.add(name); + sources.put(parent, list); + } + + String key = parent.getClass().toString() + name; + + T value = null; + try { + value = (T) loadFromDisk(key, typeRef); + if (value == null) { + value = defaultValue.newInstance(); + } else { + logger.debug("reading " + parent.getClass().toString() + " " + name + " from disk cache"); + } + values.put(key, value); + } catch (Exception e) { + logger.error("failed to instantiate: " + defaultValue != null ? defaultValue.toString() : "null", e); + } + + return value; + } +} diff --git a/src/main/java/au/org/ala/biocache/service/SpeciesImageService.java b/src/main/java/au/org/ala/biocache/service/SpeciesImageService.java index 8386d9019..755d36198 100644 --- a/src/main/java/au/org/ala/biocache/service/SpeciesImageService.java +++ b/src/main/java/au/org/ala/biocache/service/SpeciesImageService.java @@ -16,10 +16,12 @@ import au.org.ala.biocache.dao.SearchDAO; import au.org.ala.biocache.dto.*; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; +import javax.annotation.PostConstruct; import javax.inject.Inject; import java.util.ArrayList; import java.util.HashMap; @@ -46,7 +48,7 @@ public class SpeciesImageService { protected SearchDAO searchDAO; private Object cacheLock = new Object(); - private SpeciesImagesDTO cache; + private SpeciesImagesDTO cache = RestartDataService.get(this, "cache", new TypeReference(){}, SpeciesImagesDTO.class); private boolean updatingCache = false; Thread updateCacheThread = new CacheThread(); @@ -105,15 +107,23 @@ public void run() { //store in map synchronized (cacheLock) { updatingCache = false; - cache = speciesImages; + if (speciesImages.getSpeciesImage().length > 0) { + cache = speciesImages; + } } logger.debug("time to refresh SpeciesImageService: " + (System.currentTimeMillis() - startTime) + "ms"); } catch (Exception e) { - e.printStackTrace(); + logger.error(e.getMessage(), e); } } - }; + } + + @PostConstruct + public void init() { + resetCache(); + } + /** * Permit disabling of cached species images @@ -129,7 +139,7 @@ public void run() { public SpeciesImagesDTO getSpeciesImages() { if (!enabled) return null; - if (cache == null) { + if (cache.getSpeciesImage() == null) { synchronized (cacheLock) { if (!updatingCache && cache == null) { updatingCache = true; diff --git a/src/main/java/au/org/ala/biocache/util/CollectionsCache.java b/src/main/java/au/org/ala/biocache/util/CollectionsCache.java index d8f3c938e..584cd2a95 100644 --- a/src/main/java/au/org/ala/biocache/util/CollectionsCache.java +++ b/src/main/java/au/org/ala/biocache/util/CollectionsCache.java @@ -14,6 +14,8 @@ ***************************************************************************/ package au.org.ala.biocache.util; +import au.org.ala.biocache.service.RestartDataService; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.log4j.Logger; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Scheduled; @@ -37,13 +39,13 @@ @Component("collectionsCache") public class CollectionsCache { - protected LinkedHashMap dataResources = new LinkedHashMap(); - protected LinkedHashMap dataProviders = new LinkedHashMap(); - protected LinkedHashMap tempDataResources = new LinkedHashMap(); - protected LinkedHashMap downloadLimits = new LinkedHashMap(); - protected LinkedHashMap institutions = new LinkedHashMap(); - protected LinkedHashMap collections = new LinkedHashMap(); - protected LinkedHashMap dataHubs = new LinkedHashMap(); + protected LinkedHashMap dataResources = RestartDataService.get(this, "dataResources", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap dataProviders = RestartDataService.get(this, "dataProviders", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap tempDataResources = RestartDataService.get(this, "tempDataResources", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap downloadLimits = RestartDataService.get(this, "downloadLimits", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap institutions = RestartDataService.get(this, "institutions", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap collections = RestartDataService.get(this, "collections", new TypeReference(){}, LinkedHashMap.class); + protected LinkedHashMap dataHubs = RestartDataService.get(this, "dataHubs", new TypeReference(){}, LinkedHashMap.class); protected List institution_uid = null; protected List collection_uid = null; protected List data_resource_uid = null; @@ -55,7 +57,7 @@ public class CollectionsCache { //NC 20131018: Allow cache to be disabled via config (enabled by default) @Value("${caches.collections.enabled:true}") - protected Boolean enabled =null; + protected Boolean enabled = null; /** Spring injected RestTemplate object */ @Inject private RestOperations restTemplate; // NB MappingJacksonHttpMessageConverter() injected by Spring @@ -100,19 +102,44 @@ public LinkedHashMap getDownloadLimits(){ */ @Scheduled(fixedDelay = 3600000L) //every hour public void updateCache() { - if(enabled){ - logger.info("Updating collectory cache..."); - this.collections = getCodesMap(ResourceType.COLLECTION, collection_uid); - this.institutions = getCodesMap(ResourceType.INSTITUTION, institution_uid); - this.dataResources = getCodesMap(ResourceType.DATA_RESOURCE,data_resource_uid); - this.dataProviders = getCodesMap(ResourceType.DATA_PROVIDER, data_provider_uid); - this.tempDataResources = getCodesMap(ResourceType.TEMP_DATA_RESOURCE, null); - this.dataHubs = getCodesMap(ResourceType.DATA_HUB, data_hub_uid); - this.dataResources.putAll(tempDataResources); - } else{ - logger.info("Collectory cache has been disabled"); + Thread thread = new Thread() { + @Override + public void run() { + if(enabled){ + logger.info("Updating collectory cache..."); + LinkedHashMap m; + m = getCodesMap(ResourceType.COLLECTION, collection_uid); + if (m != null && m.size() > 0) collections = m; + + m = getCodesMap(ResourceType.INSTITUTION, institution_uid); + if (m != null && m.size() > 0) institutions = m; + + m = getCodesMap(ResourceType.DATA_RESOURCE,data_resource_uid); + if (m != null && m.size() > 0) dataResources = m; + + m = getCodesMap(ResourceType.DATA_PROVIDER, data_provider_uid); + if (m != null && m.size() > 0) dataProviders = m; + + m = getCodesMap(ResourceType.TEMP_DATA_RESOURCE, null); + if (m != null && m.size() > 0) tempDataResources = m; + + m = dataHubs = getCodesMap(ResourceType.DATA_HUB, data_hub_uid); + if (m != null && m.size() > 0) dataHubs = m; + + dataResources.putAll(tempDataResources); + } else{ + logger.info("Collectory cache has been disabled"); + } + } + }; + + if (collections.size() > 0) { + //data already exists, do not wait + thread.start(); + } else { + //wait + thread.run(); } - } /** diff --git a/src/main/java/au/org/ala/biocache/util/DownloadFields.java b/src/main/java/au/org/ala/biocache/util/DownloadFields.java index e7e33e33d..ea876621e 100644 --- a/src/main/java/au/org/ala/biocache/util/DownloadFields.java +++ b/src/main/java/au/org/ala/biocache/util/DownloadFields.java @@ -18,6 +18,8 @@ import au.org.ala.biocache.Store; import au.org.ala.biocache.dto.IndexFieldDTO; import au.org.ala.biocache.service.LayersService; +import au.org.ala.biocache.service.RestartDataService; +import com.fasterxml.jackson.core.type.TypeReference; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -39,7 +41,7 @@ public class DownloadFields { private LayersService layersService; - private Properties layerProperties = new Properties(); + private Properties layerProperties = RestartDataService.get(this, "layerProperties", new TypeReference(){}, Properties.class); private Map indexFieldMaps; public DownloadFields(Set indexFields, AbstractMessageSource messageSource, LayersService layersService){ @@ -47,17 +49,7 @@ public DownloadFields(Set indexFields, AbstractMessageSource mess this.layersService = layersService; - //initialise the properties - try { - indexFieldMaps = new TreeMap(); - for(IndexFieldDTO field: indexFields){ - indexFieldMaps.put(field.getName(), field); - } - - updateLayerNames(); - } catch(Exception e) { - logger.error(e.getMessage(), e); - } + update(indexFields); } private void updateLayerNames() { @@ -169,4 +161,19 @@ public List[] getIndexFields(String[] values, boolean dwcHeaders, String private boolean isSpatialField(String name) { return name.matches("((cl)|(el))[0-9]+"); } + + public void update(Set indexedFields) { + //initialise the properties + try { + Map map = new TreeMap(); + for(IndexFieldDTO field: indexedFields){ + map.put(field.getName(), field); + } + indexFieldMaps = map; + + updateLayerNames(); + } catch(Exception e) { + logger.error(e.getMessage(), e); + } + } } diff --git a/src/main/java/au/org/ala/biocache/web/OccurrenceController.java b/src/main/java/au/org/ala/biocache/web/OccurrenceController.java index e30cfeaff..8c9872e70 100644 --- a/src/main/java/au/org/ala/biocache/web/OccurrenceController.java +++ b/src/main/java/au/org/ala/biocache/web/OccurrenceController.java @@ -159,8 +159,6 @@ public void run() { try { while (true) { try { - searchDAO.refreshCaches(); - Set indexedFields = searchDAO.getIndexedFields(); if (indexedFields != null) { diff --git a/src/main/resources/log4j.xml b/src/main/resources/log4j.xml index 4d3dd6180..74ac9774f 100644 --- a/src/main/resources/log4j.xml +++ b/src/main/resources/log4j.xml @@ -80,6 +80,9 @@ + + +