Skip to content

Commit

Permalink
Work towards improving mapper performance using batched requests - so…
Browse files Browse the repository at this point in the history
…me regression though
  • Loading branch information
Aklakan committed Jul 17, 2017
1 parent 06d7cf3 commit a4aabf0
Show file tree
Hide file tree
Showing 10 changed files with 489 additions and 213 deletions.
Expand Up @@ -8,16 +8,17 @@
import java.util.function.Function;
import java.util.stream.Collectors;

import javax.persistence.EntityManager;

import org.aksw.jena_sparql_api.beans.model.EntityModel;
import org.aksw.jena_sparql_api.beans.model.EntityOps;
import org.aksw.jena_sparql_api.beans.model.PropertyModel;
import org.aksw.jena_sparql_api.core.SparqlService;
import org.aksw.jena_sparql_api.mapper.annotation.DefaultIri;
import org.aksw.jena_sparql_api.mapper.annotation.Iri;
import org.aksw.jena_sparql_api.mapper.impl.engine.RdfMapperEngine;
import org.aksw.jena_sparql_api.mapper.impl.engine.RdfMapperEngineImpl;
import org.aksw.jena_sparql_api.mapper.impl.type.RdfTypeFactoryImpl;
import org.aksw.jena_sparql_api.mapper.impl.type.RdfTypeMap;
import org.aksw.jena_sparql_api.mapper.jpa.core.SparqlEntityManagerFactory;
import org.aksw.jena_sparql_api.mapper.model.RdfType;
import org.aksw.jena_sparql_api.mapper.util.BeanUtils;
import org.aksw.jena_sparql_api.sparql.ext.datatypes.RDFDatatypeDate;
Expand All @@ -35,52 +36,52 @@
public class SpringBatchMappings {


public static <I, O> Function<I, O> memoize(Function<I, O> fn) {
Map<I, O> cache = new HashMap<>();
public static <I, O> Function<I, O> memoize(Function<I, O> fn) {
Map<I, O> cache = new HashMap<>();

Function<I, O> result = (i) -> cache.computeIfAbsent(i, fn);
return result;
}
Function<I, O> result = (i) -> cache.computeIfAbsent(i, fn);
return result;
}


public static void test() {
public static void test() {
// TypeDeciderImpl typeDecider = new TypeDeciderImpl();
//typeDecider.addMapping(
//typeDecider.exposeShape(rsb);
//typeDecider.addMapping(
//typeDecider.exposeShape(rsb);
// ResourceShapeBuilder rsb = new ResourceShapeBuilder();
// typeDecider.exposeShape(rsb);
// ResourceShape rs = rsb.getResourceShape();
// ResourceShape.fetchData(qef, rs, NodeFactory.createURI("http://ex.org/11"));
//

ConversionServiceFactoryBean bean = new ConversionServiceFactoryBean();
bean.afterPropertiesSet();
ConversionServiceFactoryBean bean = new ConversionServiceFactoryBean();
bean.afterPropertiesSet();

ConversionService cs = bean.getObject();
ConversionService cs = bean.getObject();

// cs.convert(source, targetType);

Long value = 1l;
Long value = 1l;

TypeMapper tm = TypeMapper.getInstance();
RDFDatatype dt = tm.getTypeByClass(value.getClass());
TypeMapper tm = TypeMapper.getInstance();
RDFDatatype dt = tm.getTypeByClass(value.getClass());

//Object y = dt.cannonicalise(value);
//dt.getJavaClass()
//Object y = dt.cannonicalise(value);
//dt.getJavaClass()


String lex = dt.unparse(value);
Node node = NodeFactory.createLiteral(lex, dt);
Object o = dt.parse(lex);
System.out.println(o.getClass());
String lex = dt.unparse(value);
Node node = NodeFactory.createLiteral(lex, dt);
Object o = dt.parse(lex);
System.out.println(o.getClass());

Object x = node.getLiteralValue();
System.out.println("Got value: " + x.getClass() + " " + node);
Object x = node.getLiteralValue();
System.out.println("Got value: " + x.getClass() + " " + node);

}
}


public static void main(String[] args) {
public static void main(String[] args) throws Exception {
// ResourceShapeBuilder rsb = new ResourceShapeBuilder();
// ResourceShapeImpl rs = new ResourceShapeImpl();
//
Expand All @@ -90,13 +91,13 @@ public static void main(String[] args) {



EntityModel.createDefaultModel(Boolean.class, null);
EntityModel.createDefaultModel(Boolean.class, null);


ConversionServiceFactoryBean bean = new ConversionServiceFactoryBean();
bean.afterPropertiesSet();
ConversionServiceFactoryBean bean = new ConversionServiceFactoryBean();
bean.afterPropertiesSet();

ConversionService conversionService = bean.getObject();
ConversionService conversionService = bean.getObject();


// ExecutionContext ecx = new ExecutionContext();
Expand Down Expand Up @@ -235,7 +236,13 @@ public static void main(String[] args) {

SparqlService sparqlService = FluentSparqlService.forModel().create();

RdfMapperEngine engine = new RdfMapperEngineImpl(sparqlService, typeFactory);
EntityManager em = SparqlEntityManagerFactory.create()
.setSparqlService(sparqlService)
.setTypeFactory(typeFactory)
.getObject();


// RdfMapperEngineBatched engine = new RdfMapperEngineImpl(sparqlService, typeFactory);
//engine.find(clazz, rootNode);
JobExecution entity = new JobExecution(11l);
ExecutionContext ec = new ExecutionContext();
Expand All @@ -245,24 +252,24 @@ public static void main(String[] args) {
entity.setExecutionContext(ec);


engine.merge(entity);
em.merge(entity);
//engine.emitTriples(graph, entity);

Model model = sparqlService.getQueryExecutionFactory().createQueryExecution("CONSTRUCT WHERE { ?s ?p ?o }").execConstruct();
System.out.println("Graph:");
model.write(System.out, "TTL");

JobExecution lr = engine.find(JobExecution.class, NodeFactory.createURI("http://ex.org/11"));
JobExecution lr = em.find(JobExecution.class, NodeFactory.createURI("http://ex.org/11"));
System.out.println("Lookup result: " + lr);

//lr.setVersion(111);
engine.merge(lr);
em.merge(lr);


System.out.println("Graph:");
sparqlService.getQueryExecutionFactory().createQueryExecution("CONSTRUCT WHERE { ?s ?p ?o }").execConstruct().write(System.out, "TTL");

System.out.println("Lookup result: " + engine.find(JobExecution.class, NodeFactory.createURI("http://ex.org/11")));
System.out.println("Lookup result: " + em.find(JobExecution.class, NodeFactory.createURI("http://ex.org/11")));


//EntityManagerJena em = new EntityManagerJena(engine)
Expand Down
Expand Up @@ -4,17 +4,7 @@

import org.aksw.jena_sparql_api.concepts.Concept;
import org.aksw.jena_sparql_api.concepts.ConceptUtils;
import org.aksw.jena_sparql_api.utils.Vars;

import com.google.common.base.Functions;
import org.apache.jena.graph.Node;
import org.apache.jena.sparql.expr.E_OneOf;
import org.apache.jena.sparql.expr.Expr;
import org.apache.jena.sparql.expr.ExprList;
import org.apache.jena.sparql.expr.ExprVar;
import org.apache.jena.sparql.expr.NodeValue;
import org.apache.jena.sparql.syntax.Element;
import org.apache.jena.sparql.syntax.ElementFilter;

public class LookupServiceListService<V>
implements LookupService<Node, V>
Expand Down
Expand Up @@ -54,7 +54,7 @@ public class PathResolverImpl
* Similar to JPA's metamodel.
*
*/
protected RdfMapperEngine mapperEngine;
protected RdfMapperEngineBatched mapperEngine;
//protected Function<String, PathResolver>

// The property name by this path resolver was reached - null for the root
Expand All @@ -72,7 +72,7 @@ public PathFragment getPathFragment() {
return pathFragment;
}

public PathResolverImpl(PathFragment pathFragment, RdfMapperEngine mapperEngine, String reachingPropertyName, PathResolverImpl parent) {
public PathResolverImpl(PathFragment pathFragment, RdfMapperEngineBatched mapperEngine, String reachingPropertyName, PathResolverImpl parent) {
super();
this.pathFragment = pathFragment;
this.mapperEngine = mapperEngine;
Expand All @@ -95,7 +95,7 @@ public PathResolver resolve(RdfType rdfType, String propertyName) {
return result;
}

public PathResolver resolve(RdfMapperEngine mapperEngine, PathFragment pathFragment, String propertyName) {
public PathResolver resolve(RdfMapperEngineBatched mapperEngine, PathFragment pathFragment, String propertyName) {
PathResolver result;

PathResolver tmp = pathFragment.getNextResolver();
Expand Down
@@ -1,6 +1,8 @@
package org.aksw.jena_sparql_api.mapper.impl.engine;

import java.util.Collection;
import java.util.List;
import java.util.Map;

import org.aksw.jena_sparql_api.concepts.Concept;
import org.aksw.jena_sparql_api.core.SparqlService;
Expand All @@ -10,7 +12,12 @@
import org.apache.jena.graph.Node;
import org.apache.jena.sparql.core.Prologue;

public interface RdfMapperEngine
/**
* TODO Remove all non-set based methods from the interface
* @author raven
*
*/
public interface RdfMapperEngineBatched
{
SparqlService getSparqlService();

Expand All @@ -20,9 +27,13 @@ public interface RdfMapperEngine

TypeDecider getTypeDecider();

<T> T find(Class<T> clazz, Node rootNode);
// <T> T find(Class<T> clazz, Node rootNode);

// <T> List<T> find(Class<T> clazz, List<Node> rootNodes);


<T> List<T> list(Class<T> clazz, Concept concept);
<T> Map<Node, T> find(Class<T> clazz, Collection<Node> rootNodes);


<T> T merge(T entity);
Expand Down

0 comments on commit a4aabf0

Please sign in to comment.