Skip to content
This repository has been archived by the owner on May 23, 2019. It is now read-only.

Commit

Permalink
Update #87, correct bug with Origin var name and associated Gson; cor…
Browse files Browse the repository at this point in the history
…rect bug with metadata process with base item is a file.
  • Loading branch information
hdsdi3g committed Mar 12, 2015
1 parent 1f3e59c commit 348a0f4
Show file tree
Hide file tree
Showing 9 changed files with 53 additions and 35 deletions.
2 changes: 1 addition & 1 deletion app/hd3gtv/mydmam/cli/CliModuleMetadata.java
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ public void execCliModule(ApplicationArgs args) throws Exception {
return;
}
MetadataIndexer metadataIndexer = new MetadataIndexer(true);
metadataIndexer.process(root_indexing.storagename, root_indexing.currentpath, 0);
metadataIndexer.process(root_indexing, 0);
return;
} else if (args.getParamExist("-clean")) {
Log2.log.info("Start clean operations");
Expand Down
2 changes: 0 additions & 2 deletions app/hd3gtv/mydmam/manager/JobNG.java
Original file line number Diff line number Diff line change
Expand Up @@ -102,8 +102,6 @@ static ColumnPrefixDistributedRowLock<String> prepareLock() {
final static int TTL_DONE = 3600 * 24;
final static int TTL_TROUBLES = 3600 * 24 * 3;

// TODO

public enum JobStatus {
TOO_OLD, CANCELED, POSTPONED, WAITING, DONE, PROCESSING, STOPPED, ERROR, PREPARING, TOO_LONG_DURATION;
}
Expand Down
24 changes: 20 additions & 4 deletions app/hd3gtv/mydmam/metadata/MetadataIndexer.java
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ public class MetadataIndexer implements IndexingEvent {
private MetadataIndexingLimit limit_processing;

public MetadataIndexer(boolean force_refresh) throws Exception {
explorer = new Explorer();
this.force_refresh = force_refresh;
current_create_job_list = new ArrayList<FutureCreateJobs>();
}
Expand All @@ -57,11 +58,27 @@ public void setLimitProcessing(MetadataIndexingLimit limit_processing) {
/**
* @return new created jobs, never null
*/
public List<JobNG> process(String storagename, String currentpath, long min_index_date) throws Exception {
public List<JobNG> process(SourcePathIndexerElement item, long min_index_date) throws Exception {
if (item == null) {
return new ArrayList<JobNG>(1);
}

stop_analysis = false;
es_bulk = Elasticsearch.prepareBulk();
explorer = new Explorer();
explorer.getAllSubElementsFromElementKey(Explorer.getElementKey(storagename, currentpath), min_index_date, this);

Log2Dump dump = new Log2Dump();
dump.add("item", item);
dump.addDate("min_index_date", min_index_date);
Log2.log.debug("Prepare", dump);

if (item.directory) {
explorer.getAllSubElementsFromElementKey(item.prepare_key(), min_index_date, this);
} else {
if (onFoundElement(item) == false) {
return new ArrayList<JobNG>(1);
}
}

es_bulk.terminateBulk();

if (current_create_job_list.isEmpty()) {
Expand Down Expand Up @@ -215,7 +232,6 @@ public boolean onFoundElement(SourcePathIndexerElement element) throws Exception
if (limit_processing != null) {
indexing.setLimit(limit_processing);
}

ContainerOperations.save(indexing.doIndexing(), false, es_bulk);
return true;
}
Expand Down
4 changes: 2 additions & 2 deletions app/hd3gtv/mydmam/metadata/MetadataIndexingOperation.java
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ public Container doIndexing() throws Exception {
reference.parentpath = physical_source.getParentFile().getAbsolutePath();
}

ContainerOrigin container_origin = ContainerOrigin.fromSource(reference, physical_source);
Container container = new Container(container_origin.getUniqueElementKey(), container_origin);
ContainerOrigin origin = ContainerOrigin.fromSource(reference, physical_source);
Container container = new Container(origin.getUniqueElementKey(), origin);
EntrySummary entry_summary = new EntrySummary();
container.addEntry(entry_summary);

Expand Down
4 changes: 2 additions & 2 deletions app/hd3gtv/mydmam/metadata/WorkerIndexer.java
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ public class WorkerIndexer extends WorkerNG {
private volatile List<MetadataIndexer> analysis_indexers;

private HashMap<String, Long> lastindexeddatesforstoragenames;
private Explorer explorer = new Explorer();

public WorkerIndexer(AppManager manager) throws ClassNotFoundException {
if (isActivated() == false) {
Expand Down Expand Up @@ -95,8 +96,7 @@ protected void workerProcessJob(JobProgression progression, JobContext context)
} else {
lastindexeddatesforstoragenames.put(storagename, 0l);
}

metadataIndexer.process(storagename, analyst_context.currentpath, min_index_date);
metadataIndexer.process(explorer.getelementByIdkey(Explorer.getElementKey(storagename, analyst_context.currentpath)), min_index_date);
analysis_indexers.remove(metadataIndexer);
}
}
Expand Down
16 changes: 8 additions & 8 deletions app/hd3gtv/mydmam/metadata/container/ContainerEntry.java
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ public abstract class ContainerEntry implements SelfSerializing, Log2Dumpable {
ContainerEntry() {
}

private ContainerOrigin containerOrigin;
private ContainerOrigin origin;

transient Container container;

public final ContainerOrigin getOrigin() {
return containerOrigin;
return origin;
}

public abstract String getES_Type();
Expand All @@ -44,15 +44,15 @@ public Log2Dump getLog2Dump() {
Log2Dump dump = new Log2Dump();
dump.add("type", this.getClass().getName());
dump.add("ES type", getES_Type());
dump.add("origin", containerOrigin);
dump.add("origin", origin);
return dump;
}

public final void setOrigin(ContainerOrigin containerOrigin) throws NullPointerException {
if (containerOrigin == null) {
public final void setOrigin(ContainerOrigin origin) throws NullPointerException {
if (origin == null) {
throw new NullPointerException("\"origin\" can't to be null");
}
this.containerOrigin = containerOrigin;
this.origin = origin;
}

/**
Expand All @@ -69,14 +69,14 @@ public ContainerEntry deserialize(JsonObject source, Gson gson) {
JsonElement j_origin = source.get("origin");
source.remove("origin");
ContainerEntry item = internalDeserialize(source, gson);
item.containerOrigin = gson.fromJson(j_origin, ContainerOrigin.class);
item.origin = gson.fromJson(j_origin, ContainerOrigin.class);
return item;
}

public JsonObject serialize(SelfSerializing _item, Gson gson) {
ContainerEntry item = (ContainerEntry) _item;
JsonObject jo = item.internalSerialize(item, gson);
jo.add("origin", gson.toJsonTree(item.containerOrigin));
jo.add("origin", gson.toJsonTree(item.origin));
return jo;
}

Expand Down
12 changes: 6 additions & 6 deletions app/hd3gtv/mydmam/metadata/container/ContainerOperations.java
Original file line number Diff line number Diff line change
Expand Up @@ -498,14 +498,14 @@ private static class HitPurge implements ElastisearchCrawlerHit {
elementcount_by_storage = new HashMap<String, Long>();
}

boolean containsStorageInBase(ContainerOrigin containerOrigin) {
if (elementcount_by_storage.containsKey(containerOrigin.storage) == false) {
elementcount_by_storage.put(containerOrigin.storage, explorer.countStorageContentElements(containerOrigin.storage));
if (elementcount_by_storage.get(containerOrigin.storage) == 0) {
Log2.log.info("Missing storage item in datatabase", new Log2Dump("storagename", containerOrigin.storage));
boolean containsStorageInBase(ContainerOrigin origin) {
if (elementcount_by_storage.containsKey(origin.storage) == false) {
elementcount_by_storage.put(origin.storage, explorer.countStorageContentElements(origin.storage));
if (elementcount_by_storage.get(origin.storage) == 0) {
Log2.log.info("Missing storage item in datatabase", new Log2Dump("storagename", origin.storage));
}
}
return elementcount_by_storage.get(containerOrigin.storage) > 0;
return elementcount_by_storage.get(origin.storage) > 0;
}

public boolean onFoundHit(SearchHit hit) {
Expand Down
18 changes: 9 additions & 9 deletions app/hd3gtv/mydmam/metadata/container/ContainerOrigin.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,15 +55,15 @@ public String toString() {
}

public static ContainerOrigin fromSource(SourcePathIndexerElement element, File physical_source) {
ContainerOrigin containerOrigin = new ContainerOrigin();
containerOrigin.date = element.date;
containerOrigin.key = element.prepare_key();
containerOrigin.size = element.size;
containerOrigin.storage = element.storagename;
containerOrigin.currentpath = element.currentpath;
containerOrigin.pathindex_element = element;
containerOrigin.physical_source = physical_source;
return containerOrigin;
ContainerOrigin origin = new ContainerOrigin();
origin.date = element.date;
origin.key = element.prepare_key();
origin.size = element.size;
origin.storage = element.storagename;
origin.currentpath = element.currentpath;
origin.pathindex_element = element;
origin.physical_source = physical_source;
return origin;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import hd3gtv.mydmam.manager.JobNG;
import hd3gtv.mydmam.manager.JobProgression;
import hd3gtv.mydmam.metadata.MetadataIndexer;
import hd3gtv.mydmam.metadata.MetadataIndexingOperation.MetadataIndexingLimit;
import hd3gtv.mydmam.pathindexing.Explorer;
import hd3gtv.mydmam.pathindexing.SourcePathIndexerElement;
import hd3gtv.mydmam.useraction.UACapability;
Expand Down Expand Up @@ -99,6 +100,9 @@ public void process(JobProgression progression, UserProfile userprofile, UAConfi
if (conf.limit_to_recent != null) {
min_index_date = conf.limit_to_recent.toDate();
}
if (conf.limit_processing == null) {
conf.limit_processing = MetadataIndexingLimit.NOLIMITS;
}

ArrayList<JobNG> new_created_jobs = new ArrayList<JobNG>();

Expand Down Expand Up @@ -137,7 +141,7 @@ public void process(JobProgression progression, UserProfile userprofile, UAConfi

indexer = new MetadataIndexer(true);
indexer.setLimitProcessing(conf.limit_processing);
new_created_jobs.addAll(indexer.process(item.storagename, item.currentpath, min_index_date));
new_created_jobs.addAll(indexer.process(item, min_index_date));
}

if (new_created_jobs.isEmpty() == false) {
Expand Down

0 comments on commit 348a0f4

Please sign in to comment.