Skip to content

Commit

Permalink
#1278 Reduce code duplication
Browse files Browse the repository at this point in the history
  • Loading branch information
heikkidoeleman committed May 1, 2013
1 parent 49d508b commit 2f81619
Show file tree
Hide file tree
Showing 30 changed files with 225 additions and 782 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -641,17 +641,22 @@ protected void doDestroy(Dbms dbms) throws SQLException {
*/
protected abstract void doUpdate(Dbms dbms, String id, Element node) throws BadInputEx, SQLException;

/**
*
* @return
*/
protected abstract Element getResult();

/**
*
* @param node
*/
protected abstract void doAddInfo(Element node);
protected void doAddInfo(Element node) {
//--- if the harvesting is not started yet, we don't have any info

if (result == null)
return;

//--- ok, add proper info

Element info = node.getChild("info");
Element res = getResult();
info.addContent(res);
}

/**
*
Expand Down Expand Up @@ -736,11 +741,10 @@ protected void storePrivileges(Dbms dbms, AbstractParams params, String path) th

for (Privileges p : params.getPrivileges()) {
String groupId = settingMan.add(dbms, "id:"+ privId, "group", p.getGroupId());

for (int oper : p.getOperations()) {
settingMan.add(dbms, "id:"+ groupId, "operation", oper);
}
}
}
}
}

/**
Expand Down Expand Up @@ -805,6 +809,41 @@ public void setParams(AbstractParams params) {
this.params = params;
}

/**
*
* @return
*/
protected Element getResult() {
Element res = new Element("result");
if (result != null) {
add(res, "added", result.addedMetadata);
add(res, "atomicDatasetRecords", result.atomicDatasetRecords);
add(res, "badFormat", result.badFormat);
add(res, "collectionDatasetRecords", result.collectionDatasetRecords);
add(res, "datasetUuidExist", result.datasetUuidExist);
add(res, "doesNotValidate", result.doesNotValidate);
add(res, "duplicatedResource", result.duplicatedResource);
add(res, "fragmentsMatched", result.fragmentsMatched);
add(res, "fragmentsReturned", result.fragmentsReturned);
add(res, "fragmentsUnknownSchema", result.fragmentsUnknownSchema);
add(res, "incompatible", result.incompatibleMetadata);
add(res, "recordsBuilt", result.recordsBuilt);
add(res, "recordsUpdated", result.recordsUpdated);
add(res, "removed", result.locallyRemoved);
add(res, "serviceRecords", result.serviceRecords);
add(res, "subtemplatesAdded", result.subtemplatesAdded);
add(res, "subtemplatesRemoved", result.subtemplatesRemoved);
add(res, "subtemplatesUpdated", result.subtemplatesUpdated);
add(res, "total", result.totalMetadata);
add(res, "unchanged", result.unchangedMetadata);
add(res, "unknownSchema",result.unknownSchema);
add(res, "unretrievable", result.unretrievable);
add(res, "updated", result.updatedMetadata);
add(res, "thumbnails", result.thumbnails);
add(res, "thumbnailsFailed", result.thumbnailsFailed);
}
return res;
}
//--------------------------------------------------------------------------
//---
//--- Variables
Expand All @@ -822,8 +861,10 @@ public void setParams(AbstractParams params) {
protected DataManager dataMan;

protected AbstractParams params;
protected HarvestResult result;

protected Logger log = Log.createLogger(Geonet.HARVESTER);

private static Map<String, Class<?>> hsHarvesters = new HashMap<String, Class<?>>();

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
package org.fao.geonet.kernel.harvest.harvester;

public class HarvestResult {
public int addedMetadata; // = total
public int atomicDatasetRecords; // = md for atomic datasets
public int badFormat; //
public int collectionDatasetRecords; // = md for collection datasets
public int couldNotInsert;
public int datasetUuidExist; // = uuid already in catalogue
public int doesNotValidate; // = 0 cos' not validated
public int duplicatedResource;
public int fragmentsMatched; // = fragments matched in md templates
public int fragmentsReturned; // = fragments generated
public int fragmentsUnknownSchema; // = fragments with unknown schema
public int incompatibleMetadata;
public int layer; // = md for data
public int layerUuidExist; // = uuid already in catalogue
public int layerUsingMdUrl; // = md for data using metadata URL document if ok
public int locallyRemoved; // = md removed
public int recordsBuilt;
public int recordsRemoved;
public int recordsUpdated;
public int schemaSkipped;
public int serviceRecords; // = md for services
public String siteId;
public int subtemplatesAdded; // = subtemplates for collection datasets
public int subtemplatesRemoved; // = fragments generated
public int subtemplatesUpdated;
public int totalMetadata; // = md for data and service (ie. data + 1)
public int unchangedMetadata;
public int unknownSchema; // = md with unknown schema (should be 0 if no layer loaded using md url)
public int unretrievable; // = http connection failed
public int updatedMetadata;
public int uuidSkipped;
public int thumbnails; // = number of thumbnail generated
public int thumbnailsFailed; // = number of thumbnail creation which failed


}
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.fao.geonet.kernel.harvest.harvester.AbstractParams;
import org.fao.geonet.kernel.harvest.harvester.CategoryMapper;
import org.fao.geonet.kernel.harvest.harvester.GroupMapper;
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.lib.Lib;
import org.fao.geonet.resources.Resources;
import org.fao.geonet.util.ISODate;
Expand All @@ -59,7 +60,7 @@
public class ArcSDEHarvester extends AbstractHarvester {

private ArcSDEParams params;
private ArcSDEResult result;
private HarvestResult result;

static final String ARCSDE_LOG_MODULE_NAME = Geonet.HARVESTER + ".arcsde";
private static final String ARC_TO_ISO19115_TRANSFORMER = "ArcCatalog8_to_ISO19115.xsl";
Expand Down Expand Up @@ -117,31 +118,17 @@ protected String doAdd(Dbms dbms, Element node) throws BadInputEx, SQLException
// }
}

@Override
protected void doAddInfo(Element node) {
//--- if the harvesting is not started yet, we don't have any info

if (result == null)
return;

//--- ok, add proper info

Element info = node.getChild("info");
Element res = getResult();
info.addContent(res);
}

@Override
protected Element getResult() {
Element res = new Element("result");

if (result != null) {
add(res, "total", result.total);
add(res, "added", result.added);
add(res, "updated", result.updated);
add(res, "unchanged", result.unchanged);
add(res, "total", result.totalMetadata);
add(res, "added", result.addedMetadata);
add(res, "updated", result.updatedMetadata);
add(res, "unchanged", result.unchangedMetadata);
add(res, "unknownSchema", result.unknownSchema);
add(res, "removed", result.removed);
add(res, "removed", result.locallyRemoved);
add(res, "unretrievable", result.unretrievable);
add(res, "badFormat", result.badFormat);
add(res, "doesNotValidate",result.doesNotValidate);
Expand All @@ -161,7 +148,7 @@ protected void doHarvest(Logger l, ResourceManager rm) throws Exception {

private void align(List<String> metadataList, ResourceManager rm) throws Exception {
Log.info(ARCSDE_LOG_MODULE_NAME, "Start of alignment for : "+ params.name);
ArcSDEResult result = new ArcSDEResult();
HarvestResult result = new HarvestResult();
Dbms dbms = (Dbms) rm.open(Geonet.Res.MAIN_DB);
//----------------------------------------------------------------
//--- retrieve all local categories and groups
Expand All @@ -173,7 +160,7 @@ private void align(List<String> metadataList, ResourceManager rm) throws Excepti
//-----------------------------------------------------------------------
//--- insert/update metadata
for(String metadata : metadataList) {
result.total++;
result.totalMetadata++;
// create JDOM element from String-XML
Element metadataElement = Xml.loadString(metadata, false);
// transform ESRI output to ISO19115
Expand Down Expand Up @@ -210,12 +197,12 @@ private void align(List<String> metadataList, ResourceManager rm) throws Excepti
if (id == null) {
Log.info(ARCSDE_LOG_MODULE_NAME, "adding new metadata");
id = addMetadata(iso19139, uuid, dbms, schema, localGroups, localCateg);
result.added++;
result.addedMetadata++;
}
else {
Log.info(ARCSDE_LOG_MODULE_NAME, "updating existing metadata, id is: " + id);
updateMetadata(iso19139, id, dbms, localGroups, localCateg);
result.updated++;
result.updatedMetadata++;
}
idsForHarvestingResult.add(id);
}
Expand All @@ -230,7 +217,7 @@ private void align(List<String> metadataList, ResourceManager rm) throws Excepti
String ex$ = existingId.getChildText("id");
if(!idsForHarvestingResult.contains(ex$)) {
dataMan.deleteMetadataGroup(context, dbms, ex$);
result.removed++;
result.locallyRemoved++;
}
}
}
Expand Down Expand Up @@ -328,15 +315,4 @@ public String getType() {
return "arcsde";
}

static class ArcSDEResult {
public int total;
public int added;
public int updated;
public int unchanged;
public int removed;
public int unknownSchema;
public int unretrievable;
public int badFormat;
public int doesNotValidate;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -185,50 +185,6 @@ protected void storeNodeExtra(Dbms dbms, AbstractParams p, String path, String s
}*/
}

//---------------------------------------------------------------------------
//---
//--- AddInfo
//---
//---------------------------------------------------------------------------

protected void doAddInfo(Element node) {
//--- if the harvesting is not started yet, we don't have any info
if (result == null) {
return;
}

//--- ok, add proper info
Element info = node.getChild("info");
Element res = getResult();
info.addContent(res);
}

//---------------------------------------------------------------------------
//---
//--- GetResult
//---
//---------------------------------------------------------------------------

/**
*
* @return
*/
protected Element getResult() {
Element res = new Element("result");
if (result != null) {
add(res, "total", result.totalMetadata);
add(res, "added", result.addedMetadata);
add(res, "updated", result.updatedMetadata);
add(res, "unchanged", result.unchangedMetadata);
add(res, "duplicatedResource",result.duplicatedResource);
add(res, "unknownSchema",result.unknownSchema);
add(res, "removed", result.locallyRemoved);
add(res, "unretrievable",result.unretrievable);
add(res, "doesNotValidate",result.doesNotValidate);
}
return res;
}

//---------------------------------------------------------------------------
//---
//--- Harvest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.fao.geonet.kernel.harvest.BaseAligner;
import org.fao.geonet.kernel.harvest.harvester.CategoryMapper;
import org.fao.geonet.kernel.harvest.harvester.GroupMapper;
import org.fao.geonet.kernel.harvest.harvester.HarvestResult;
import org.fao.geonet.kernel.harvest.harvester.RecordInfo;
import org.fao.geonet.kernel.harvest.harvester.UUIDMapper;
import org.jdom.Element;
Expand All @@ -59,7 +60,7 @@ public Aligner(Logger log, ServiceContext sc, Dbms dbms, GeoPRESTParams params)

GeonetContext gc = (GeonetContext) context.getHandlerContext(Geonet.CONTEXT_NAME);
dataMan = gc.getDataManager();
result = new GeoPRESTResult();
result = new HarvestResult();

//--- setup REST operation rest/document?id={uuid}

Expand All @@ -73,7 +74,7 @@ public Aligner(Logger log, ServiceContext sc, Dbms dbms, GeoPRESTParams params)
//---
//--------------------------------------------------------------------------

public GeoPRESTResult align(Set<RecordInfo> records) throws Exception {
public HarvestResult align(Set<RecordInfo> records) throws Exception {
log.info("Start of alignment for : "+ params.name);

//-----------------------------------------------------------------------
Expand Down Expand Up @@ -318,7 +319,7 @@ private Element retrieveMetadata(String uuid)
private CategoryMapper localCateg;
private GroupMapper localGroups;
private UUIDMapper localUuids;
private GeoPRESTResult result;
private HarvestResult result;
}

//=============================================================================
Expand Down
Loading

0 comments on commit 2f81619

Please sign in to comment.