diff --git a/etc/i5.las2peer.connectors.webConnector.WebConnector.properties b/etc/i5.las2peer.connectors.webConnector.WebConnector.properties index 37eb8a4b..f5bc8498 100644 --- a/etc/i5.las2peer.connectors.webConnector.WebConnector.properties +++ b/etc/i5.las2peer.connectors.webConnector.WebConnector.properties @@ -9,4 +9,4 @@ crossOriginResourceMaxAge = 600 maxRequestBodySize = 300000000 enableCrossOriginResourceSharing = TRUE onlyLocalServices = TRUE -oidcProviders = https://auth.las2peer.org/o/oauth2,https://accounts.google.com +oidcProviders = https://auth.las2peer.org/auth/realms/main,https://accounts.google.com \ No newline at end of file diff --git a/gradle.properties b/gradle.properties index a78c7581..8ad59ad6 100644 --- a/gradle.properties +++ b/gradle.properties @@ -1,4 +1,4 @@ -core.version=1.2.2 +core.version=1.2.3 service.name=i5.las2peer.services.ocd service.class=ServiceClass service.version=1.0.0 diff --git a/rest_ocd_services/build.gradle b/rest_ocd_services/build.gradle index 4a6bff11..301a5eef 100644 --- a/rest_ocd_services/build.gradle +++ b/rest_ocd_services/build.gradle @@ -86,6 +86,7 @@ dependencies { //arangoDB Java driver implementation "com.arangodb:arangodb-java-driver:6.18.0" implementation "com.arangodb:jackson-dataformat-velocypack:3.0.1" + } configurations { diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/ServiceClass.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/ServiceClass.java index bf0362e7..9fe65296 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/ServiceClass.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/ServiceClass.java @@ -1,54 +1,12 @@ package i5.las2peer.services.ocd; -import java.io.*; -import java.net.HttpURLConnection; -import java.net.URLDecoder; -import java.time.LocalDate; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.LinkedList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.logging.Level; - -import javax.ws.rs.Consumes; -import javax.ws.rs.DELETE; -import javax.ws.rs.DefaultValue; -import javax.ws.rs.GET; -import javax.ws.rs.POST; -import javax.ws.rs.PUT; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; -import javax.ws.rs.core.Response; -import javax.ws.rs.core.Response.Status; - -import i5.las2peer.services.ocd.centrality.data.*; -import i5.las2peer.services.ocd.graphs.*; -import i5.las2peer.services.ocd.utils.*; -import i5.las2peer.services.ocd.utils.Error; -import org.apache.commons.lang3.NotImplementedException; -import org.apache.commons.math3.linear.RealMatrix; -import org.glassfish.jersey.media.multipart.FormDataParam; -import org.graphstream.algorithm.ConnectedComponents; -import org.json.simple.JSONObject; -import org.la4j.matrix.sparse.CCSMatrix; - import i5.las2peer.api.Context; import i5.las2peer.api.ManualDeployment; -import i5.las2peer.api.security.UserAgent; -import i5.las2peer.api.execution.ServiceInvocationException; //TODO: Check import i5.las2peer.api.logging.MonitoringEvent; -import i5.las2peer.logging.L2pLogger; +import i5.las2peer.api.security.UserAgent; import i5.las2peer.p2p.AgentNotRegisteredException; import i5.las2peer.restMapper.RESTService; import i5.las2peer.restMapper.annotations.ServicePath; -import i5.las2peer.execution.ExecutionContext; import i5.las2peer.services.ocd.adapters.centralityInput.CentralityInputFormat; import i5.las2peer.services.ocd.adapters.centralityOutput.CentralityOutputFormat; import i5.las2peer.services.ocd.adapters.coverInput.CoverInputFormat; @@ -56,61 +14,26 @@ import i5.las2peer.services.ocd.adapters.graphInput.GraphInputFormat; import i5.las2peer.services.ocd.adapters.graphOutput.GraphOutputFormat; import i5.las2peer.services.ocd.adapters.visualOutput.VisualOutputFormat; -import i5.las2peer.services.ocd.algorithms.ContentBasedWeightingAlgorithm; -import i5.las2peer.services.ocd.algorithms.OcdAlgorithm; -import i5.las2peer.services.ocd.algorithms.OcdMultiplexAlgorithm; -import i5.las2peer.services.ocd.algorithms.OcdAlgorithmFactory; -import i5.las2peer.services.ocd.algorithms.OcdMultiplexAlgorithmFactory; +import i5.las2peer.services.ocd.algorithms.*; import i5.las2peer.services.ocd.benchmarks.GroundTruthBenchmark; import i5.las2peer.services.ocd.benchmarks.OcdBenchmarkFactory; -import i5.las2peer.services.ocd.centrality.data.CentralityCreationLog; -import i5.las2peer.services.ocd.centrality.data.CentralityCreationType; -import i5.las2peer.services.ocd.centrality.data.CentralityMeasureType; -import i5.las2peer.services.ocd.centrality.data.CentralitySimulationType; -import i5.las2peer.services.ocd.centrality.data.CentralityMap; -import i5.las2peer.services.ocd.centrality.data.CentralityMapId; +import i5.las2peer.services.ocd.centrality.data.*; import i5.las2peer.services.ocd.centrality.evaluation.CorrelationCoefficient; import i5.las2peer.services.ocd.centrality.evaluation.StatisticalProcessor; -import i5.las2peer.services.ocd.centrality.utils.CentralityAlgorithm; -import i5.las2peer.services.ocd.centrality.utils.CentralityAlgorithmFactory; -import i5.las2peer.services.ocd.centrality.utils.CentralitySimulation; -import i5.las2peer.services.ocd.centrality.utils.CentralitySimulationFactory; -import i5.las2peer.services.ocd.centrality.utils.MatrixOperations; +import i5.las2peer.services.ocd.centrality.utils.*; import i5.las2peer.services.ocd.cooperation.data.mapping.MappingFactory; import i5.las2peer.services.ocd.cooperation.data.mapping.SimulationGroupSetMapping; import i5.las2peer.services.ocd.cooperation.data.mapping.SimulationSeriesSetMapping; -import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeries; -import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeriesGroup; -import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeriesGroupMetaData; -import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeriesMetaData; -import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeriesParameters; +import i5.las2peer.services.ocd.cooperation.data.simulation.*; import i5.las2peer.services.ocd.cooperation.simulation.SimulationBuilder; import i5.las2peer.services.ocd.cooperation.simulation.dynamic.DynamicType; import i5.las2peer.services.ocd.cooperation.simulation.game.GameType; import i5.las2peer.services.ocd.cooperation.simulation.termination.ConditionType; -import i5.las2peer.services.ocd.graphs.Cover; -import i5.las2peer.services.ocd.graphs.CoverCreationLog; -import i5.las2peer.services.ocd.graphs.CoverCreationType; -import i5.las2peer.services.ocd.graphs.CoverId; -import i5.las2peer.services.ocd.graphs.CustomGraph; -import i5.las2peer.services.ocd.graphs.CustomGraphId; -import i5.las2peer.services.ocd.graphs.GraphCreationLog; -import i5.las2peer.services.ocd.graphs.GraphCreationType; -import i5.las2peer.services.ocd.graphs.GraphProcessor; -import i5.las2peer.services.ocd.graphs.GraphType; +import i5.las2peer.services.ocd.graphs.*; import i5.las2peer.services.ocd.graphs.properties.GraphProperty; -import i5.las2peer.services.ocd.metrics.ExecutionTime; -import i5.las2peer.services.ocd.metrics.KnowledgeDrivenMeasure; -import i5.las2peer.services.ocd.metrics.NewmanModularityCombined; -import i5.las2peer.services.ocd.metrics.OcdMetricFactory; -import i5.las2peer.services.ocd.metrics.OcdMetricLog; -import i5.las2peer.services.ocd.metrics.OcdMetricLogId; -import i5.las2peer.services.ocd.metrics.OcdMetricType; -import i5.las2peer.services.ocd.metrics.StatisticalMeasure; +import i5.las2peer.services.ocd.metrics.*; import i5.las2peer.services.ocd.utils.Error; -import i5.las2peer.services.ocd.utils.ExecutionStatus; -import i5.las2peer.services.ocd.utils.InvocationHandler; -import i5.las2peer.services.ocd.utils.ThreadHandler; +import i5.las2peer.services.ocd.utils.*; import i5.las2peer.services.ocd.viewer.LayoutHandler; import i5.las2peer.services.ocd.viewer.ViewerRequestHandler; import i5.las2peer.services.ocd.viewer.layouters.GraphLayoutType; @@ -118,14 +41,24 @@ import i5.las2peer.services.ocd.viewer.painters.CoverPainterFactory; import i5.las2peer.services.ocd.viewer.painters.CoverPaintingType; import i5.las2peer.services.ocd.viewer.utils.CentralityVisualizationType; -import io.swagger.annotations.Api; -import io.swagger.annotations.ApiOperation; -import io.swagger.annotations.ApiResponse; -import io.swagger.annotations.ApiResponses; -import io.swagger.annotations.Contact; -import io.swagger.annotations.Info; -import io.swagger.annotations.License; -import io.swagger.annotations.SwaggerDefinition; +import io.swagger.annotations.*; +import org.apache.commons.lang3.NotImplementedException; +import org.apache.commons.math3.linear.RealMatrix; +import org.glassfish.jersey.media.multipart.FormDataParam; +import org.graphstream.algorithm.ConnectedComponents; +import org.json.simple.JSONObject; +import org.la4j.matrix.sparse.CCSMatrix; + +import javax.ws.rs.*; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Response; +import javax.ws.rs.core.Response.Status; +import java.io.*; +import java.net.HttpURLConnection; +import java.net.URLDecoder; +import java.time.LocalDate; +import java.util.*; +import java.util.logging.Level; /** @@ -199,10 +132,10 @@ public ServiceClass() { */ private final static OcdAlgorithmFactory algorithmFactory = new OcdAlgorithmFactory(); - /** - * The factory used for creating algorithms. - */ - private final static OcdMultiplexAlgorithmFactory multiplexAlgorithmFactory = new OcdMultiplexAlgorithmFactory(); +// /** +// * The factory used for creating algorithms. +// */ +// private final static OcdMultiplexAlgorithmFactory multiplexAlgorithmFactory = new OcdMultiplexAlgorithmFactory(); /** * The factory used for creating centrality simulations. @@ -454,6 +387,7 @@ public Response createGraph(@DefaultValue("unnamed") @QueryParam("name") String graph.removeType(GraphType.DIRECTED); } try { + //store layers for (CustomGraph customGraph :graph.getCustomGraphs().values()) { customGraph.setUserName(username); customGraph.setCreationMethod(log); @@ -461,7 +395,18 @@ public Response createGraph(@DefaultValue("unnamed") @QueryParam("name") String database.storeGraph(customGraph); graph.addLayerKey(customGraph.getKey()); } + //store representive graph + CustomGraph representiveGraph = graph.getRepresentiveGraph(); + representiveGraph.setName(URLDecoder.decode(nameStr, "UTF-8")); + representiveGraph.setUserName(username); + representiveGraph.setCreationMethod(log); + representiveGraph.addType(GraphType.MULTIPLEX); + database.storeGraph(representiveGraph); + graph.setRepresentiveKey(representiveGraph.getKey()); + + //store multiplex graph database.storeGraph(graph); + generalLogger.getLogger().log(Level.INFO, "user " + username + ": import graph " + graph.getKey() + " in format " + graphInputFormatStr); } catch (Exception e) { return requestHandler.writeError(Error.INTERNAL, "Could not store graph"); @@ -844,6 +789,7 @@ public Response getLayersOfMultiplexGraph(@DefaultValue("0") @QueryParam("keyMul List queryResults; queryResults = database.getMultiplexGraphMetaDataOfLayersEfficiently(username, keyMultiplexStr); String responseStr = requestHandler.writeGraphMetasEfficiently(queryResults); + System.out.println(responseStr); return Response.ok(responseStr).build(); } catch (Exception e) { requestHandler.log(Level.SEVERE, "", e); @@ -1556,12 +1502,14 @@ public Response runABACUSAlgorithm(@FormDataParam("graphId") String graphIdStr, } Map multiplexParameters; - OcdMultiplexAlgorithm multiplexAlgorithm; + OcdAlgorithm multiplexAlgorithm; + //OcdMultiplexAlgorithm multiplexAlgorithm; Map parameters; OcdAlgorithm algorithm; try { multiplexParameters = requestHandler.parseParameters(multiplexContent); - multiplexAlgorithm = multiplexAlgorithmFactory.getInstance(multiplexAlgorithmType, new HashMap(multiplexParameters)); + multiplexAlgorithm = algorithmFactory.getInstance(multiplexAlgorithmType, new HashMap(multiplexParameters)); + //multiplexAlgorithm = multiplexAlgorithmFactory.getInstance(multiplexAlgorithmType, new HashMap(multiplexParameters)); parameters = requestHandler.parseParameters(content); algorithm = algorithmFactory.getInstance(algorithmType, new HashMap(parameters)); } catch (Exception e) { @@ -1621,9 +1569,11 @@ public Response runABACUSAlgorithm(@FormDataParam("graphId") String graphIdStr, generalLogger.getLogger().log(Level.INFO, "user " + username + ": run " + algorithm.getClass().getSimpleName() + " on graph " + customGraph.getKey() + ". Created cover " + cover.getKey()); } } - CustomGraph coverGraph = new CustomGraph(); - //set nodes of coverGraph to nodes of multiplexgraph - cover = new Cover(coverGraph, new CCSMatrix(graph.getNodeCount(), 0)); + /* + * Run multiplex algorithm + */ + CustomGraph representiveGraph = graph.getRepresentiveGraph(); + cover = new Cover(representiveGraph, new CCSMatrix(graph.getNodeCount(), 0)); log = new CoverCreationLog(multiplexAlgorithmType, multiplexParameters, multiplexAlgorithm.compatibleGraphTypes()); cover.setCreationMethod(log); cover.setName(URLDecoder.decode(nameStr, "UTF-8")); @@ -1631,7 +1581,8 @@ public Response runABACUSAlgorithm(@FormDataParam("graphId") String graphIdStr, /* * Registers and starts multiplex algorithm */ - threadHandler.runMultiplexAlgorithm(cover, multiplexAlgorithm, componentNodeCountFilter); + //threadHandler.runMultiplexAlgorithm(cover, multiplexAlgorithm, componentNodeCountFilter); + threadHandler.runAlgorithm(cover, multiplexAlgorithm, componentNodeCountFilter); generalLogger.getLogger().log(Level.INFO, "user " + username + ": run " + multiplexAlgorithm.getClass().getSimpleName() + " on graph " + graph.getKey() + ". Created cover " + cover.getKey()); } @@ -3265,8 +3216,8 @@ public Response getMultiplexAlgorithmDefaultParams(@PathParam("CoverCreationType return requestHandler.writeError(Error.PARAMETER_INVALID, "Specified cover creation type is not instantiatable: " + creationType.name()); } else { - OcdMultiplexAlgorithm defaultInstance = multiplexAlgorithmFactory.getInstance(creationType, - new HashMap()); + //OcdMultiplexAlgorithm defaultInstance = multiplexAlgorithmFactory.getInstance(creationType, new HashMap()); + OcdAlgorithm defaultInstance = algorithmFactory.getInstance(creationType, new HashMap()); //generalLogger.getLogger().log(Level.INFO, "user " + username + ": get default parameters of " + coverCreationTypeStr ); return Response.ok(requestHandler.writeParameters(defaultInstance.getParameters())).build(); } @@ -3565,10 +3516,10 @@ public Response getMultiplexAlgorithmNames() { @GET @Path("testforbot") - @Produces(MediaType.TEXT_XML) + @Produces(MediaType.APPLICATION_JSON) @ApiResponses(value = { @ApiResponse(code = 200, message = "Success"), @ApiResponse(code = 401, message = "Unauthorized") }) - @ApiOperation(tags = {"names"}, value = "Algorithms information", notes = "Returns all algorithm type names.") + @ApiOperation(tags = {"testforbot"}, value = "A test method for the bot", notes = "Returns a success message.") public Response testforbot() { System.out.println("testforbot"); String jsonResponse = "{\"text\": \"test successfull\", \"closeContext\": \"\"}"; diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapter.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapter.java index d344068f..14aa9061 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapter.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapter.java @@ -80,13 +80,31 @@ public MultiplexGraph readGraph() throws AdapterException { graph.setEdgeWeight(edge, 1); line = Adapters.readLine(reader); - totalNumberOfEdges++; totalNumberOfNodes.add(sourceNodeName); totalNumberOfNodes.add(targetNodeName); + totalNumberOfEdges++; } if(line.size() > 0) { throw new AdapterException("Invalid input format"); } + //make sure all nodes appear on all layers/in all CustomGraphs + for (CustomGraph graph : multiplexGraph.getCustomGraphs().values()) { + MapreverseNodeNames = graphReverseNodeNames.get(graph.getName()); + for(String nodeName: totalNumberOfNodes) { + if (!reverseNodeNames.containsKey(nodeName)) { + Node node = graph.addNode(nodeName); + graph.setNodeName(node, nodeName); + } + } + } + //add representive graph + CustomGraph representiveGraph = new CustomGraph(); + for(String nodeName: totalNumberOfNodes) { + Node node = representiveGraph.addNode(nodeName); + representiveGraph.setNodeName(node, nodeName); + } + multiplexGraph.setRepresentiveGraph(representiveGraph); + multiplexGraph.setNodeCount(totalNumberOfNodes.size()); multiplexGraph.setEdgeCount(totalNumberOfEdges); return multiplexGraph; diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapter.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapter.java index 2a84d6b0..d35dd167 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapter.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapter.java @@ -58,6 +58,7 @@ public MultiplexGraph readGraph() throws AdapterException { graphReverseNodeNames.put(layerName, new HashMap()); } CustomGraph graph = multiplexGraph.getCustomGraphs().get(layerName); + graph.setName(layerName); MapreverseNodeNames = graphReverseNodeNames.get(layerName); //read edge @@ -94,6 +95,25 @@ public MultiplexGraph readGraph() throws AdapterException { if(line.size() > 0) { throw new AdapterException("Invalid input format"); } + //make sure all nodes appear on all layers/in all CustomGraphs + for (CustomGraph graph : multiplexGraph.getCustomGraphs().values()) { + String layerName = graph.getName(); + MapreverseNodeNames = graphReverseNodeNames.get(layerName); + for(String nodeName: totalNumberOfNodes) { + if (!reverseNodeNames.containsKey(nodeName)) { + Node node = graph.addNode(nodeName); + graph.setNodeName(node, nodeName); + } + } + } + //add representive graph + CustomGraph representiveGraph = new CustomGraph(); + for(String nodeName: totalNumberOfNodes) { + Node node = representiveGraph.addNode(nodeName); + representiveGraph.setNodeName(node, nodeName); + } + multiplexGraph.setRepresentiveGraph(representiveGraph); + multiplexGraph.setNodeCount(totalNumberOfNodes.size()); multiplexGraph.setEdgeCount(totalNumberOfEdges); return multiplexGraph; diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/ABACUSAlgorithm.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/ABACUSAlgorithm.java index edff551c..470b3d78 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/ABACUSAlgorithm.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/ABACUSAlgorithm.java @@ -1,20 +1,14 @@ package i5.las2peer.services.ocd.algorithms; import i5.las2peer.services.ocd.graphs.*; -import org.graphstream.graph.Edge; -import org.graphstream.graph.Node; -import org.graphstream.graph.implementations.MultiNode; +import i5.las2peer.services.ocd.spmf.AlgoAprioriClose; +import i5.las2peer.services.ocd.spmf.Itemset; +import i5.las2peer.services.ocd.spmf.Itemsets; +import i5.las2peer.services.ocd.utils.Database; import org.la4j.matrix.Matrix; import org.la4j.matrix.dense.Basic2DMatrix; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Set; -//import ca.pfv.spmf.algorithms.frequentpatterns.apriori_close.AlgoAprioriClose; +import java.util.*; /** @@ -22,7 +16,12 @@ * https://doi.org/10.1109/ACCESS.2018.2879648 * Handles undirected and unweighted graphs. */ -public class ABACUSAlgorithm implements OcdMultiplexAlgorithm { +public class ABACUSAlgorithm implements OcdAlgorithm { + + /** + * The entity handler used for access stored entities. + */ + private static Database database; /** * The threshold value used as input for the frequent closed item set mining algorithm @@ -40,14 +39,43 @@ public class ABACUSAlgorithm implements OcdMultiplexAlgorithm { * there default values. */ public ABACUSAlgorithm() { + database = new Database(false); } @Override - public Cover detectOverlappingCommunities(MultiplexGraph graph) throws InterruptedException { + public Cover detectOverlappingCommunities(CustomGraph representiveGraph) throws InterruptedException { //run FCIM on results - Matrix membershipMatrix = new Basic2DMatrix(0,0); - CustomGraph coverGraph = new CustomGraph(); - return new Cover(coverGraph, membershipMatrix); + Map> transactions = new HashMap<>(); + + for (Cover cover : database.getLayerCovers(representiveGraph.getKey())) { + for (Community community : cover.getCommunities()) { + String communityKey = community.getKey(); + List memberIndices = community.getMemberIndices(); + for(Integer memberIndex : memberIndices) { + if (!transactions.containsKey(memberIndex)){ + transactions.put(memberIndex, new ArrayList()); + } + transactions.get(memberIndex).add(Integer.valueOf(communityKey)); + } + } + } + + AlgoAprioriClose algo = new AlgoAprioriClose(); + Itemsets itemsets = algo.runAlgorithm(this.threshold, transactions); + Matrix membershipMatrix = new Basic2DMatrix(representiveGraph.getNodeCount(), itemsets.getLevels().size()); + int communityIndex = 0; + for (List level : itemsets.getLevels()) { + for (Itemset itemset : level) { + for(Integer item : itemset.getItems()) { + membershipMatrix.set(item, communityIndex, 1); + } + for (int i = 0; i < itemset.getItems().length; i++) { + membershipMatrix.set(itemset.getItems()[i], level.indexOf(itemset), 1); + } + communityIndex++; + } + } + return new Cover(representiveGraph, membershipMatrix); } @Override diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/OcdAlgorithmExecutor.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/OcdAlgorithmExecutor.java index b01db0d0..05f6e64c 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/OcdAlgorithmExecutor.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/algorithms/OcdAlgorithmExecutor.java @@ -1,25 +1,19 @@ package i5.las2peer.services.ocd.algorithms; import i5.las2peer.services.ocd.algorithms.utils.OcdAlgorithmException; -import i5.las2peer.services.ocd.graphs.Cover; -import i5.las2peer.services.ocd.graphs.CoverCreationLog; -import i5.las2peer.services.ocd.graphs.CoverCreationType; -import i5.las2peer.services.ocd.graphs.CustomGraph; -import i5.las2peer.services.ocd.graphs.GraphProcessor; +import i5.las2peer.services.ocd.graphs.*; import i5.las2peer.services.ocd.metrics.ExecutionTime; import i5.las2peer.services.ocd.metrics.OcdMetricException; import i5.las2peer.services.ocd.utils.ExecutionStatus; import i5.las2peer.services.ocd.utils.Pair; +import org.graphstream.graph.Node; +import org.la4j.matrix.Matrix; +import org.la4j.matrix.sparse.CCSMatrix; import java.util.ArrayList; import java.util.List; import java.util.Map; -import org.la4j.matrix.Matrix; -import org.la4j.matrix.sparse.CCSMatrix; - -import org.graphstream.graph.Node; - /** * Manages the execution of an OcdAlgorithm. * @author Sebastian @@ -66,6 +60,45 @@ public Cover execute(CustomGraph graph, OcdAlgorithm algorithm, int componentNod return cover; } } + +// /** +// * Calculates a cover by executing an ocd algorithm on a graph. +// * The algorithm is run on each weakly connected component seperately. +// * Small components are automatically considered to be one community. +// * @param graph The graph. +// * @param algorithm The algorithm. +// * @param componentNodeCountFilter Weakly connected components of a size +// * lower than the filter will automatically be considered a single community. +// * @return A cover of the graph calculated by the algorithm. +// * @throws OcdAlgorithmException In case of an algorithm failure. +// * @throws InterruptedException In case of an algorithm interrupt. +// * @throws OcdMetricException if the metric execution failed +// */ +// public Cover executeMultiplex(MultiplexGraph graph, OcdMultiplexAlgorithm algorithm, int componentNodeCountFilter) throws OcdAlgorithmException, InterruptedException, OcdMetricException { +// MultiplexGraph graphCopy = new MultiplexGraph(graph); +// GraphProcessor processor = new GraphProcessor(); +// processor.makeCompatible(graphCopy, algorithm.compatibleGraphTypes()); +// if(algorithm.getAlgorithmType().toString().equalsIgnoreCase(CoverCreationType.SIGNED_PROBABILISTIC_MIXTURE_ALGORITHM.toString()) || algorithm.getAlgorithmType().toString().equalsIgnoreCase(CoverCreationType.WORD_CLUSTERING_REF_ALGORITHM.toString()) || algorithm.getAlgorithmType().toString().equalsIgnoreCase(CoverCreationType.COST_FUNC_OPT_CLUSTERING_ALGORITHM.toString()) || algorithm.getAlgorithmType().toString().equalsIgnoreCase(CoverCreationType.LOCAL_SPECTRAL_CLUSTERING_ALGORITHM.toString()) || algorithm.getAlgorithmType().toString().equalsIgnoreCase(CoverCreationType.LOUVAIN_ALGORITHM.toString())){ +// ExecutionTime executionTime = new ExecutionTime(); +// Cover cover = algorithm.detectOverlappingCommunities(graph); +// cover.setCreationMethod(new CoverCreationLog(algorithm.getAlgorithmType(), algorithm.getParameters(), algorithm.compatibleGraphTypes())); +// cover.getCreationMethod().setStatus(ExecutionStatus.COMPLETED); +// executionTime.setCoverExecutionTime(cover); +// return cover; +// }else{ +// List>> components; +// List>> componentCovers; +// components = processor.divideIntoConnectedComponents(graphCopy); +// ExecutionTime executionTime = new ExecutionTime(); +// componentCovers = calculateComponentCovers(components, algorithm, componentNodeCountFilter, executionTime); +// Cover coverCopy = processor.mergeComponentCovers(graphCopy, componentCovers); +// Cover cover = new Cover(graph, coverCopy.getMemberships()); +// cover.setCreationMethod(coverCopy.getCreationMethod()); +// cover.getCreationMethod().setStatus(ExecutionStatus.COMPLETED); +// executionTime.setCoverExecutionTime(cover); +// return cover; +// } +// } /** * Calculates the cover of each connected component. diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/graphs/MultiplexGraph.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/graphs/MultiplexGraph.java index 8c980005..dbc42dbe 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/graphs/MultiplexGraph.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/graphs/MultiplexGraph.java @@ -1,11 +1,11 @@ package i5.las2peer.services.ocd.graphs; import com.arangodb.ArangoCollection; -import com.arangodb.ArangoCursor; import com.arangodb.ArangoDatabase; import com.arangodb.entity.BaseDocument; -import com.arangodb.entity.BaseEdgeDocument; -import com.arangodb.model.*; +import com.arangodb.model.DocumentCreateOptions; +import com.arangodb.model.DocumentReadOptions; +import com.arangodb.model.DocumentUpdateOptions; import com.fasterxml.jackson.databind.ObjectMapper; import java.util.*; @@ -26,6 +26,7 @@ public class MultiplexGraph { public static final String edgeCountColumnName = "EDGE_COUNT"; public static final String layerCountColumnName = "LAYER_COUNT"; public static final String layerKeysColumnName = "LAYER_KEYS"; + public static final String representiveGraphKeyColumnName = "REPRESENTIVE_KEY"; public static final String creationMethodKeyColumnName = "CREATION_METHOD_KEY"; public static final String typesColumnName = "TYPES"; public static final String collectionName = "multiplexgraph"; @@ -73,6 +74,16 @@ public class MultiplexGraph { */ private List layerKeys = new ArrayList(); + /** + * The key of respresentive CustomGraph of the MultiplexGraph. + */ + private String representiveKey; + + /** + * The respresentive CustomGraph of the MultiplexGraph. + */ + private CustomGraph representiveGraph; + /** * The graph's types. */ @@ -210,6 +221,47 @@ public GraphCreationLog getCreationMethod() { */ public void addLayerKey(String layerKey){this.layerKeys.add(layerKey);} + /** + * Add a layer to the multiplexgraph + * @param layerName The key of the customgraph + * @param customGraph The customgraph resprensting the layer + */ + public void addLayer(String layerName, CustomGraph customGraph) { + this.mapCustomGraphs.put(layerName, customGraph); + layerCount++; + } + + public Map getCustomGraphs() { + return mapCustomGraphs; + } + + protected CustomGraph getCustomGraph(CustomGraph customGraph) { + return mapCustomGraphs.get(customGraph.getId()); + } + + /** + * Getter for the key of representive customgraph + * @return The key of the represnetive customgraph + */ + public String getRepresentiveKey(){return this.representiveKey;} + + /** + * Getter for representive customgraph + * @return The represnetive customgraph + */ + public CustomGraph getRepresentiveGraph(){return this.representiveGraph;} + + /** + * Setter for the key of representive customgraph + * @param key The key of the represnetive customgraph + */ + public void setRepresentiveKey(String key){this.representiveKey = key;} + + /** + * Setter for representive customgraph + * @param graph The represnetive customgraph + */ + public void setRepresentiveGraph(CustomGraph graph){this.representiveGraph=graph;} ////////// Graph Types ////////// /** @@ -269,22 +321,6 @@ public boolean isWeighted() { return isOfType(GraphType.WEIGHTED); } - public void addLayer(String layerName, CustomGraph customGraph) { - this.mapCustomGraphs.put(layerName, customGraph); - layerCount++; - } - - public Map getCustomGraphs() { - return mapCustomGraphs; - } - - - //public void setCustomGraphs(Map customGraphs) { - // this.mapCustomGraphs = customGraphs; - //} - protected CustomGraph getCustomGraph(CustomGraph customGraph) { - return mapCustomGraphs.get(customGraph.getId()); - } public static MultiplexGraph load(String key, ArangoDatabase db, String transId) { MultiplexGraph graph = null; @@ -314,6 +350,7 @@ public static MultiplexGraph load(String key, ArangoDatabase db, String transId) graph.creationMethod = GraphCreationLog.load(creationMethodKey, db, readOpt); Object objLayerKeys = bd.getAttribute(layerKeysColumnName); graph.layerKeys = om.convertValue(objLayerKeys, List.class); + graph.representiveKey = bd.getAttribute(representiveGraphKeyColumnName).toString(); } else { System.out.println("Empty Graph document"); @@ -334,6 +371,7 @@ public void persist(ArangoDatabase db, String transId) throws InterruptedExcepti bd.addAttribute(edgeCountColumnName, this.graphEdgeCount); bd.addAttribute(layerCountColumnName, this.layerCount); bd.addAttribute(layerKeysColumnName, this.layerKeys); + bd.addAttribute(representiveGraphKeyColumnName, this.representiveKey); this.creationMethod.persist(db, createOptions); bd.addAttribute(creationMethodKeyColumnName, this.creationMethod.getKey()); collection.insertDocument(bd, createOptions); diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractItemset.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractItemset.java new file mode 100644 index 00000000..01ce05c2 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractItemset.java @@ -0,0 +1,90 @@ +package i5.las2peer.services.ocd.spmf; + +/* This file is copyright (c) 2008-2012 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ + +import java.text.DecimalFormat; + +/** + * This is an abstract class for an itemset (a set of items. + * + * @see AbstractOrderedItemset + * @author Philippe Fournier-Viger + */ +public abstract class AbstractItemset { + + public AbstractItemset() { + super(); + } + + /** + * Get the size of this itemset + * @return the size of this itemset + */ + public abstract int size(); + + /** + * Get this itemset as a string + * @return a string representation of this itemset + */ + public abstract String toString(); + + + /** + * print this itemset to System.out. + */ + public void print() { + System.out.print(toString()); + } + + + /** + * Get the support of this itemset + * @return the support of this itemset + */ + public abstract int getAbsoluteSupport(); + + /** + * Get the relative support of this itemset (a percentage) as a double + * @param nbObject the number of transactions in the database where this itemset was found + * @return the relative support of the itemset as a double + */ + public abstract double getRelativeSupport(int nbObject); + + /** + * Get the relative support of this itemset as a string + * @param nbObject the number of transactions in the database where this itemset was found + * @return the relative support of the itemset as a string + */ + public String getRelativeSupportAsString(int nbObject) { + // get the relative support + double frequence = getRelativeSupport(nbObject); + // convert it to a string with two decimals + DecimalFormat format = new DecimalFormat(); + format.setMinimumFractionDigits(0); + format.setMaximumFractionDigits(5); + return format.format(frequence); + } + + + /** + * Check if this itemset contains a given item. + * @param item the item + * @return true if the item is contained in this itemset + */ + public abstract boolean contains(Integer item); + +} \ No newline at end of file diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractOrderedItemset.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractOrderedItemset.java new file mode 100644 index 00000000..61973ba3 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AbstractOrderedItemset.java @@ -0,0 +1,256 @@ +package i5.las2peer.services.ocd.spmf; + +/* This file is copyright (c) 2008-2012 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ + + +/** + * This is an abstract class indicating general methods + * that an ordered itemset should have, and is designed for ordered itemsets where items are sorted + * by lexical order and no item can appear twice. +* +* @see AbstractItemset + * @author Philippe Fournier-Viger + */ +public abstract class AbstractOrderedItemset extends AbstractItemset{ + + public AbstractOrderedItemset() { + super(); + } + + /** + * Get the support of this itemset + * @return the support of this itemset + */ + public abstract int getAbsoluteSupport(); + + /** + * Get the size of this itemset + * @return the size of this itemset + */ + public abstract int size(); + + /** + * Get the item at a given position of this itemset + * @param position the position of the item to be returned + * @return the item + */ + public abstract Integer get(int position); + + /** + * Get the last item. + * @return the last item. + */ + public Integer getLastItem() { + return get(size() - 1); + } + + /** + * Get this itemset as a string + * @return a string representation of this itemset + */ + public String toString(){ + if(size() == 0) { + return "EMPTYSET"; + } + // use a string buffer for more efficiency + StringBuilder r = new StringBuilder (); + // for each item, append it to the StringBuilder + for(int i=0; i< size(); i++){ + r.append(get(i)); + r.append(' '); + } + return r.toString(); // return the tring + } + + + /** + * Get the relative support of this itemset (a percentage) as a double + * @param nbObject the number of transactions in the database where this itemset was found + * @return the relative support of the itemset as a double + */ + public double getRelativeSupport(int nbObject) { + // Divide the absolute support by the number of transactions to get the relative support + return ((double)getAbsoluteSupport()) / ((double) nbObject); + } + + + /** + * Check if this itemset contains a given item. + * @param item the item + * @return true if the item is contained in this itemset + */ + public boolean contains(Integer item) { + for (int i=0; i< size(); i++) { + if (get(i).equals(item)) { + return true; + } else if (get(i) > item) { + return false; + } + } + return false; + } + + /** + * This methods checks if another itemset is contained in this one. + * The method assumed that items are lexically ordered in itemsets. + * + * @param itemset2 the other itemset + * @return true if it is contained + */ + /** + * This methods checks if another itemset is contained in this one. + * @param itemset2 the other itemset + * @return true if it is contained + */ + public boolean containsAll(AbstractOrderedItemset itemset2){ + // first we check the size + if(size() < itemset2.size()){ + return false; + } + + // we will use this variable to remember where we are in this itemset + int i = 0; + + // for each item in itemset2, we will try to find it in this itemset + for(int j =0; j < itemset2.size(); j++){ + boolean found = false; // flag to remember if we have find the item at position j + + // we search in this itemset starting from the current position i + while(found == false && i< size()){ + // if we found the current item from itemset2, we stop searching + if(get(i).equals(itemset2.get(j))){ + found = true; + }// if the current item in this itemset is larger than + // the current item from itemset2, we return false + // because the itemsets are assumed to be lexically ordered. + else if(get(i) > itemset2.get(j)){ + return false; + } + + i++; // continue searching from position i++ + } + // if the item was not found in the previous loop, return false + if(!found){ + return false; + } + } + return true; // if all items were found, return true + } + + /** + * This method compare this itemset with another itemset to see if they are + * equal. The method assume that the two itemsets are lexically ordered. + * @param itemset2 an itemset + * @return true or false + */ + public boolean isEqualTo(AbstractOrderedItemset itemset2) { + // If they don't contain the same number of items, we return false + if (this.size() != itemset2.size()) { + return false; + } + // We compare each item one by one from i to size - 1. + for (int i = 0; i < itemset2.size(); i++) { + // if different, return false + if (!itemset2.get(i).equals(this.get(i))) { + return false; + } + } + // All the items are the same, we return true. + return true; + } + + /** + * This method compare this itemset with another itemset to see if they are + * equal. The method assume that the two itemsets are lexically ordered. + * @param itemset + * @return true or false + */ + public boolean isEqualTo(int[] itemset) { + // If they don't contain the same number of items, we return false + if (this.size() != itemset.length) { + return false; + } + // We compare each item one by one from i to size - 1. + for (int i = 0; i < itemset.length; i++) { + // if different, return false + if (itemset[i] != this.get(i)) { + return false; + } + } + // All the items are the same, we return true. + return true; + } + + /** + * This method checks if this itemset is the same as another itemset + * except for the last item. + * @param itemset2 the second itemset + * @return true if they are the same except for the last item + */ + public boolean allTheSameExceptLastItemV2(AbstractOrderedItemset itemset2) { + // if they don't contain the same number of item, return false + if (itemset2.size() != this.size()) { + return false; + } + // Otherwise, we have to compare item by item + for (int i = 0; i < this.size() - 1; i++) { + // if they are not the last items, they should be the same + // otherwise return false + if (!this.get(i).equals(itemset2.get(i))) { + return false; + } + } + // All items are the same. We return true. + return true; + } + + + /** + * Check if the items from this itemset are all the same as those of another itemset + * except the last item + * and that itemset2 is lexically smaller than this itemset. If all these conditions are satisfied, + * this method return the last item of itemset2. Otherwise it returns null. + * @return the last item of itemset2, otherwise, null. + * */ + public Integer allTheSameExceptLastItem(AbstractOrderedItemset itemset2) { + // if these itemsets do not have the same size, return null + if(itemset2.size() != this.size()){ + return null; + } + // We will compare all items one by one starting from position i =0 to size -1 + for(int i=0; i< this.size(); i++){ + // if this is the last position + if(i == this.size()-1){ + // We check if the item from this itemset is be smaller (lexical order) + // and different from the one of itemset2. + // If not, return null. + if(this.get(i) >= itemset2.get(i)){ + return null; + } + } + // If this is not the last position, we check if items are the same + else if(!this.get(i).equals(itemset2.get(i))){ + // if not, return null + return null; + } + } + // otherwise, we return the position of the last item + return itemset2.get(itemset2.size()-1); + } + + +} \ No newline at end of file diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AlgoAprioriClose.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AlgoAprioriClose.java new file mode 100644 index 00000000..2e86a1b8 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/AlgoAprioriClose.java @@ -0,0 +1,472 @@ +package i5.las2peer.services.ocd.spmf; +/* This file is copyright (c) 2008-2013 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ + +import java.util.*; +import java.util.Map.Entry; + +//import ca.pfv.spmf.algorithms.ArraysAlgos; +//import ca.pfv.spmf.patterns.itemset_array_integers_with_count.Itemset; +//import ca.pfv.spmf.patterns.itemset_array_integers_with_count.Itemsets; +//import ca.pfv.spmf.tools.MemoryLogger; + +/** + * This is an implementation of the AprioriClose (a.k.a Close) algorithm as described by : + *

+ * + * Pasquier, N., Bastide, Y., Taouil, R., Lakhal, L., (1999). Efficient Mining + * of Association Rules using Closed Itemset Lattices. Information Systems, + * Elsevier Science, 24(1), pages 25-46 . + *

+ * + * The AprioriClose algorithm is an extension of the Apriori algorithm proposed in: + *

+ * + * Agrawal R, Srikant R. "Fast Algorithms for Mining Association Rules", VLDB. + * Sep 12-15 1994, Chile, 487-99. + *

+ * + * This implementation is an optimized version that saves the result to a file + * or keep it into memory if no output path is provided + * by the user to the runAlgorithm() method. + * + * @author Philippe Fournier-Viger + */ +public class AlgoAprioriClose { + + // the current level k in the breadth-first search + protected int k; + + // variables for statistics + protected int totalCandidateCount = 0; // number of candidates generated during last execution + protected long startTimestamp; // start time of last execution + protected long endTimestamp; // end time of last execution + private int itemsetCount; // itemset found during last execution + private int databaseSize; + + // the minimum support set by the user + private int minsupRelative; + + // A memory representation of the database. + // Each position in the list represents a transaction + private List database = null; + + // The patterns that are found + // (if the user wants to keep them into memory) + protected Itemsets patterns = null; + + + /** + * Default constructor + */ + public AlgoAprioriClose() { + + } + + /** + * Method to run the algorithm + * @param minsup a minimum support value as a percentage + * @param transactions a map where each key is a transaction id and each value is a list of item ids + */ + public Itemsets runAlgorithm(double minsup, Map> transactions) { + patterns = new Itemsets("FREQUENT ITEMSETS"); + + // record the start time + startTimestamp = System.currentTimeMillis(); + + // set the number of itemset found to zero + itemsetCount = 0; + // set the number of candidate found to zero + totalCandidateCount = 0; + // reset the utility for checking the memory usage + MemoryLogger.getInstance().reset(); + + // READ THE INPUT FILE + // variable to count the number of transactions + databaseSize = 0; + // Map to count the support of each item + // Key: item Value : support + Map mapItemCount = new HashMap(); // to count the support of each item + + database = new ArrayList(); // the database in memory (intially empty) + + + for (Map.Entry> entry : transactions.entrySet()) { + List values = entry.getValue(); + + // create an array of int to store the items in this transaction + int transaction[] = new int[values.size()]; + for (int i = 0; i < values.size(); i++) { + int item = values.get(i); + transaction[i] = item; + // increase the support count + Integer count = mapItemCount.get(item); + if (count == null) { + mapItemCount.put(item, 1); + } else { + mapItemCount.put(item, ++count); + } + } + // add the transaction to the database + database.add(transaction); + // increase the number of transaction + databaseSize++; + } + + // conver the minimum support as a percentage to a + // relative minimum support as an integer + this.minsupRelative = (int) Math.ceil(minsup * databaseSize); + + // we start looking for itemset of size 1 + k = 1; + + // We add all frequent items to the set of candidate of size 1 + List frequent1 = new ArrayList(); + for(Entry entry : mapItemCount.entrySet()){ + if(entry.getValue() >= minsupRelative){ + frequent1.add(entry.getKey()); +// saveItemsetToFile(entry.getKey(), entry.getValue()); + } + } + + // We sort the list of candidates by lexical order + // (Apriori need to use a total order otherwise it does not work) + Collections.sort(frequent1, new Comparator() { + public int compare(Integer o1, Integer o2) { + return o1 - o2; + } + }); + + // If no frequent item, the algorithm stops! + if(frequent1.size() == 0){ + return patterns; + } + + // add the frequent items of size 1 to the total number of candidates + totalCandidateCount += frequent1.size(); + + + // Now we will perform a loop to find all frequent itemsets of size > 1 + // starting from size k = 2. + // The loop will stop when no candidates can be generated. + + // This will store frequent itemsets from level K. + List level = null; + // This will store itemsets from the level K-1 for K>2 + List previousLevel = null; + + k = 2; + do{ + // we check the memory usage + MemoryLogger.getInstance().checkMemory(); + + // Generate candidates of size K + List candidatesK; + + // if we are at level k=2, we use an optimization to generate candidates + if(k ==2){ + candidatesK = generateCandidate2(frequent1); + }else{ + // otherwise we use the regular way to generate candidates + candidatesK = generateCandidateSizeK(level); + } + + // we add the number of candidates generated to the total + totalCandidateCount += candidatesK.size(); + + // We scan the database one time to calculate the support + // of each candidates and keep those with higher support. + // For each transaction: + for(int[] transaction: database){ + // for each candidate: + loopCand: for(Itemset candidate : candidatesK){ + // a variable that will be use to check if + // all items of candidate are in this transaction + int pos = 0; + // for each item in this transaction + for(int item: transaction){ + // if the item correspond to the current item of candidate + if(item == candidate.itemset[pos]){ + // we will try to find the next item of candidate next + pos++; + // if we found all items of candidate in this transaction + if(pos == candidate.itemset.length){ + // we increase the support of this candidate + candidate.support++; + continue loopCand; + } + // Because of lexical order, we don't need to + // continue scanning the transaction if the current item + // is larger than the one that we search in candidate. + }else if(item > candidate.itemset[pos]){ + continue loopCand; + } + + } + } + } + + // save the current level + previousLevel = level; + // We build the level k+1 with all the candidates that have + // a support higher than the minsup threshold. + level = new ArrayList(); + for (Itemset candidate : candidatesK) { + // if the support is > minsup + if (candidate.getAbsoluteSupport() >= minsupRelative) { + // add the candidate + level.add(candidate); + } + } + + + // check if subsets are closed, if yes, then save them to the file. + if(previousLevel == null){ // if k =2 + // we use this method optimized for itemsets of size 1 + checkIfItemsetsK_1AreClosed(frequent1, level, mapItemCount); + }else{ // if k > 2 + // we use the general method + checkIfItemsetsK_1AreClosed(previousLevel, level); + } + + + // we will generate larger itemsets next. + k++; + }while(level.isEmpty() == false); + + // record end time + endTimestamp = System.currentTimeMillis(); + // check the memory usage + MemoryLogger.getInstance().checkMemory(); + + return patterns; + } + + /** + * Checks if all the itemsets of size K-1 are closed by comparing + * them with itemsets of size K. + * @param levelKm1 itemsets of size k-1 + * @param levelK itemsets of size k + */ + private void checkIfItemsetsK_1AreClosed(Collection levelKm1, + List levelK) { + // for each itemset of size k-1 + for (Itemset itemset : levelKm1) { + boolean isClosed = true; + // for each itemset of level K, if it has the same + // support and contains the current itemset from k-1 + // then this latter itemset is not closed. + for (Itemset itemsetK : levelK) { + if (itemsetK.getAbsoluteSupport() == itemset + .getAbsoluteSupport() && itemsetK.containsAll(itemset)) { + isClosed = false; + break; + } + } + // if the current itemset of size k-1 is closed, then + // we save it. + if (isClosed) { + saveItemset(itemset); + } + } + } + + /** + * Checks if all the itemsets of size K-1 are closed by comparing + * them with itemsets of size K. + * @param levelKm1 itemsets of size 1 + * @param levelK itemsets of size 2 + * @param mapItemCount + */ + private void checkIfItemsetsK_1AreClosed(List levelKm1, + List levelK, Map mapItemCount) { + // for each itemset of size k-1 + for (Integer itemset : levelKm1) { + boolean isClosed = true; + int support = mapItemCount.get(itemset); + // for each itemset of level K, if it has the same + // support and contains the current itemset from k-1 + // then this latter itemset is not closed. + for (Itemset itemsetK : levelK) { + if (itemsetK.getAbsoluteSupport() == support && itemsetK.contains(itemset)) { + isClosed = false; + break; + } + } + // if the current itemset of size k-1 is closed, then + // we save it. + if (isClosed) { + saveItemset(itemset, support); + } + } + } + + + /** + * Return the number of transactions in the last database read by the algorithm. + * @return the number of transactions + */ + public int getDatabaseSize() { + return databaseSize; + } + + /** + * This method generates candidates itemsets of size 2 based on + * itemsets of size 1. + * @param frequent1 the list of frequent itemsets of size 1. + * @return a List of Itemset that are the candidates of size 2. + */ + private List generateCandidate2(List frequent1) { + List candidates = new ArrayList(); + + // For each itemset I1 and I2 of level k-1 + for (int i = 0; i < frequent1.size(); i++) { + Integer item1 = frequent1.get(i); + for (int j = i + 1; j < frequent1.size(); j++) { + Integer item2 = frequent1.get(j); + + // Create a new candidate by combining itemset1 and itemset2 + candidates.add(new Itemset(new int []{item1, item2})); + } + } + return candidates; + } + + /** + * Method to generate itemsets of size k from frequent itemsets of size K-1. + * @param levelK_1 frequent itemsets of size k-1 + * @return itemsets of size k + */ + protected List generateCandidateSizeK(List levelK_1) { + // create a variable to store candidates + List candidates = new ArrayList(); + + // For each itemset I1 and I2 of level k-1 + loop1: for (int i = 0; i < levelK_1.size(); i++) { + int[] itemset1 = levelK_1.get(i).itemset; + loop2: for (int j = i + 1; j < levelK_1.size(); j++) { + int[] itemset2 = levelK_1.get(j).itemset; + + // we compare items of itemset1 and itemset2. + // If they have all the same k-1 items and the last item of + // itemset1 is smaller than + // the last item of itemset2, we will combine them to generate a + // candidate + for (int k = 0; k < itemset1.length; k++) { + // if they are the last items + if (k == itemset1.length - 1) { + // the one from itemset1 should be smaller (lexical + // order) + // and different from the one of itemset2 + if (itemset1[k] >= itemset2[k]) { + continue loop1; + } + } + // if they are not the last items, and + else if (itemset1[k] < itemset2[k]) { + continue loop2; // we continue searching + } else if (itemset1[k] > itemset2[k]) { + continue loop1; // we stop searching: because of lexical + // order + } + } + + // Create a new candidate by combining itemset1 and itemset2 + int newItemset[] = new int[itemset1.length+1]; + System.arraycopy(itemset1, 0, newItemset, 0, itemset1.length); + newItemset[itemset1.length] = itemset2[itemset2.length -1]; + + // The candidate is tested to see if its subsets of size k-1 are + // included in + // level k-1 (they are frequent). + if (allSubsetsOfSizeK_1AreFrequent(newItemset, levelK_1)) { + candidates.add(new Itemset(newItemset)); + } + } + } + return candidates; // return the set of candidates + } + + /** + * Method to check if all the subsets of size k-1 of a candidate of size k are frequent + * @param candidate a candidate itemset of size k + * @param levelK_1 the frequent itemsets of size k-1 + * @return true if all the subsets are frequet + */ + protected boolean allSubsetsOfSizeK_1AreFrequent(int[] candidate, List levelK_1) { + // generate all subsets by always each item from the candidate, one by one + for(int posRemoved=0; posRemoved< candidate.length; posRemoved++){ + + // perform a binary search to check if the subset appears in level k-1. + int first = 0; + int last = levelK_1.size() - 1; + + // variable to remember if we found the subset + boolean found = false; + // the binary search + while( first <= last ) + { + int middle = ( first + last ) >>> 1; // divide by 2 + + int comparison = ArraysAlgos.sameAs(levelK_1.get(middle).getItems(), candidate, posRemoved); + if(comparison < 0 ){ + first = middle + 1; // the itemset compared is larger than the subset according to the lexical order + } + else if(comparison > 0 ){ + last = middle - 1; // the itemset compared is smaller than the subset is smaller according to the lexical order + } + else{ + found = true; // we have found it so we stop + break; + } + } + + if(found == false){ // if we did not find it, that means that candidate is not a frequent itemset because + // at least one of its subsets does not appear in level k-1. + return false; + } + } + return true; + } + + +void saveItemset(Itemset itemset) { + itemsetCount++; + patterns.addItemset(itemset, itemset.size()); + } + + void saveItemset(Integer item, Integer support){ + itemsetCount++; + Itemset itemset = new Itemset(item); + itemset.setAbsoluteSupport(support); + patterns.addItemset(itemset, 1); + } + + /** + * Print statistics about the algorithm execution to System.out. + */ + public void printStats() { + System.out.println("============= APRIORI - STATS ============="); + System.out.println(" Candidates count : " + totalCandidateCount); + System.out.println(" The algorithm stopped at size " + (k - 1) + + ", because there is no candidate"); + System.out.println(" Frequent closed itemsets count : " + itemsetCount); + System.out.println(" Maximum memory usage : " + MemoryLogger.getInstance().getMaxMemory() + " mb"); + System.out.println(" Total time ~ " + (endTimestamp - startTimestamp) + " ms"); + System.out.println("==================================================="); + } +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/ArraysAlgos.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/ArraysAlgos.java new file mode 100644 index 00000000..8bbde0d0 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/ArraysAlgos.java @@ -0,0 +1,520 @@ +package i5.las2peer.services.ocd.spmf; + +import java.util.Arrays; +import java.util.Comparator; +/* This file is copyright (c) 2008-2012 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ +import java.util.List; + +/** + * This class provides a set of basic methods that can be used with itemsets + * represented as arrays of integers. + * All the methods are static methods so that they can be used in any classes. + * @author Philippe Fournier-Viger + * + */ +public class ArraysAlgos { + + /** + * Make a copy of this itemset but exclude a given item + * @param itemToRemove the given item + * @return the copy + */ + public static int[] cloneItemSetMinusOneItem(int[] itemset, Integer itemToRemove) { + // create the new itemset + int[] newItemset = new int[itemset.length -1]; + int i=0; + // for each item in this itemset + for(int j =0; j < itemset.length; j++){ + // copy the item except if it is the item that should be excluded + if(itemset[j] != itemToRemove){ + newItemset[i++] = itemset[j]; + } + } + return newItemset; // return the copy + } + + /** + * Make a copy of this itemset but exclude a set of items + * @param itemsetToNotKeep the set of items to be excluded + * @return the copy + */ + public static int[] cloneItemSetMinusAnItemset(int[] itemset, int[] itemsetToNotKeep) { + // create a new itemset + int[] newItemset = new int[itemset.length - itemsetToNotKeep.length]; + int i=0; + // for each item of this itemset + for(int j = 0; j < itemset.length; j++){ + // copy the item except if it is not an item that should be excluded + if(Arrays.binarySearch(itemsetToNotKeep, itemset[j]) < 0 ){ + newItemset[i++] = itemset[j]; + } + } + return newItemset; // return the copy + } + + + /** + * This method checks if this itemset is the same as another itemset + * except for the last item. It assumes that both itemsets have the same length. + * @param itemset1 the first itemset + * @param itemset2 the second itemset + * @return true if they are the same except for the last item + */ + public static boolean allTheSameExceptLastItem(int[] itemset1, int[] itemset2) { + // Otherwise, we have to compare item by item + for (int i = 0; i < itemset1.length - 1; i++) { + // if they are not the last items, they should be the same + // otherwise return false + if (itemset1[i] != itemset2[i]) { + return false; + } + } + // All items are the same. We return true. + return true; + } + + + /** + * Method to concatenate two arrays in a new array + * @param prefix the first array + * @param suffix the second array + * @return the resulting array + */ + public static int[] concatenate(int[] prefix, int[] suffix) { + int[] concatenation = new int[prefix.length + suffix.length]; + System.arraycopy(prefix, 0, concatenation, 0, prefix.length); + System.arraycopy(suffix, 0, concatenation, prefix.length, suffix.length); + return concatenation; + } + + /** + * This method performs the intersection of two sorted arrays of integers and return a new sorted array. + * @param array1 the first array + * @param array2 the second array + * @return the resulting sorted array + */ + public static int[] intersectTwoSortedArrays(int[] array1, int[] array2){ + // create a new array having the smallest size between the two arrays + final int newArraySize = (array1.length < array2.length) ? array1.length : array2.length; + int[] newArray = new int[newArraySize]; + + int pos1 = 0; + int pos2 = 0; + int posNewArray = 0; + while(pos1 < array1.length && pos2 < array2.length) { + if(array1[pos1] < array2[pos2]) { + pos1++; + }else if(array2[pos2] < array1[pos1]) { + pos2++; + }else { // if they are the same + newArray[posNewArray] = array1[pos1]; + posNewArray++; + pos1++; + pos2++; + } + } + // return the subrange of the new array that is full. + return Arrays.copyOfRange(newArray, 0, posNewArray); + } + + + /** + * Check if an itemset contains another itemset. + * It assumes that itemsets are sorted according to the lexical order. + * @param itemset1 the first itemset + * @param itemset2 the second itemset + * @return true if the first itemset contains the second itemset + */ + public static boolean containsOrEquals(Integer itemset1 [], Integer itemset2 []){ + // for each item in the first itemset +loop1: for(int i =0; i < itemset2.length; i++){ + // for each item in the second itemset + for(int j =0; j < itemset1.length; j++){ + // if the current item in itemset1 is equal to the one in itemset2 + // search for the next one in itemset1 + if(itemset1[j].intValue() == itemset2[i].intValue()){ + continue loop1; + // if the current item in itemset1 is larger + // than the current item in itemset2, then + // stop because of the lexical order. + }else if(itemset1[j].intValue() > itemset2[i].intValue()){ + return false; + } + } + // means that an item was not found + return false; + } + // if all items were found, return true. + return true; + } + + /** + * Check if an itemset contains another itemset. It assumes that itemsets are + * sorted according to the lexical order. + * + * @param itemset1 the first itemset + * @param itemset2 the second itemset + * @return true if the first itemset contains the second itemset + */ + public static boolean containsOrEquals(Short itemset1[], Short itemset2[]) { + // for each item in the first itemset + loop1: for (int i = 0; i < itemset2.length; i++) { + // for each item in the second itemset + for (int j = 0; j < itemset1.length; j++) { + // if the current item in itemset1 is equal to the one in itemset2 + // search for the next one in itemset1 + if (itemset1[j].shortValue() == itemset2[i].shortValue()) { + continue loop1; + // if the current item in itemset1 is larger + // than the current item in itemset2, then + // stop because of the lexical order. + } else if (itemset1[j].shortValue() > itemset2[i].shortValue()) { + return false; + } + } + // means that an item was not found + return false; + } + // if all items were found, return true. + return true; + } + + /** + * Check the first itemset contains the second one + * + * @param itemset1 an itemset + * @param itemset2 another itemset + * @return true if itemset2 is a subset of itemset1, false otherwise + */ + public static boolean containsOrEquals(List itemset1, List itemset2) { + // for each item in the first itemset + loop1: for (int i = 0; i < itemset2.size(); i++) { + short val2 = itemset2.get(i); + // for each item in the second itemset + for (int j = 0; j < itemset1.size(); j++) { + short val1 = itemset1.get(j); + // if the current item in itemset1 is equal to the one in itemset2 + // search for the next one in itemset1 + if (val1 == val2) { + continue loop1; + // if the current item in itemset1 is larger + // than the current item in itemset2, then + // stop because of the lexical order. + } else if (val1 > val2) { + return false; + } + } + // means that an item was not found + return false; + } + // if all items were found, return true. + return true; + } + + /** + * This method checks if an item "item" is in the itemset "itemset". + * It assumes that items in the itemset are sorted in lexical order and + * that the largest item in the itemset is known. + * @param itemset an itemset + * @param item the item + * @param maxItemInArray the largest item in the itemset + * @return returnt true if the item appears in the itemset + */ + public static boolean containsLEX(Integer itemset[], Integer item, int maxItemInArray) { + // if the item is larger than the largest item + // in the itemset, return false + if(item > maxItemInArray){ + return false; + } + // Otherwise, for each item in itemset + for(Integer itemI : itemset){ + // check if the current item is equal to the one that is searched + if(itemI.equals(item)){ + // if yes return true + return true; + } + // if the current item is larger than the searched item, + // the method returns false because of the lexical order in the itemset. + else if(itemI > item){ + return false; // <-- xxxx + } + } + // if the searched item was not found, return false. + return false; + } + + /** + * Method to compare two sorted list of integers and see if they are the same, + * while ignoring an item from the second list of integer. + * This methods is used by some Apriori algorithms. + * @param itemset1 the first itemset + * @param itemsets2 the second itemset + * @param posRemoved the position of an item that should be ignored from "itemset2" to perform the comparison. + * @return 0 if they are the same, 1 if itemset is larger according to lexical order, + * -1 if smaller. + */ + public static int sameAs(int [] itemset1, int [] itemsets2, int posRemoved) { + // a variable to know which item from candidate we are currently searching + int j=0; + // loop on items from "itemset" + for(int i=0; i itemsets2[j]){ + return 1; + }else{ + // otherwise "itemset" is smaller so we return -1. + return -1; + } + } + return 0; + } + + /** + * Check if a sorted itemset is contained in another + * @param itemset1 the first itemset + * @param itemset2 the second itemset + * @return true if yes, otherwise false + */ + public static boolean includedIn(int[] itemset1, int[] itemset2) { + int count = 0; // the current position of itemset1 that we want to find in itemset2 + + // for each item in itemset2 + for(int i=0; i< itemset2.length; i++){ + // if we found the item + if(itemset2[i] == itemset1[count]){ + // we will look for the next item of itemset1 + count++; + // if we have found all items already, return true + if(count == itemset1.length){ + return true; + } + } + } + // it is not included, so return false! + return false; + } + + + /** + * Check if a sorted itemset is contained in another + * @param itemset1 the first itemset + * @param itemset1Length of the first itemset + * @param itemset2 the second itemset + * @return true if yes, otherwise false + */ + public static boolean includedIn(int[] itemset1 , int itemset1Length, int[] itemset2) { + int count = 0; // the current position of itemset1 that we want to find in itemset2 + + // for each item in itemset2 + for(int i=0; i< itemset2.length; i++){ + // if we found the item + if(itemset2[i] == itemset1[count]){ + // we will look for the next item of itemset1 + count++; + // if we have found all items already, return true + if(count == itemset1Length){ + return true; + } + } + } + // it is not included, so return false! + return false; + } + /** + + * This method checks if the item "item" is in the itemset. + * It asumes that items in the itemset are sorted in lexical order + * This version also checks that if the item "item" was added it would be the largest one + * according to the lexical order. + * @param itemset an itemset + * @param item the item + * @return return true if the above conditions are met, otherwise false + */ + public static boolean containsLEXPlus(int[] itemset, int item) { + // for each item in itemset + for(int i=0; i< itemset.length; i++){ + // check if the current item is equal to the one that is searched + if(itemset[i] == item){ + // if yes return true + return true; + // if the current item is larger than the item that is searched, + // then return true because if if the item "item" was added it would be the largest one + // according to the lexical order. + }else if(itemset[i] > item){ + return true; // <-- XXXX + } + } + // if the searched item was not found, return false. + return false; + } + + /** + * This method checks if the item "item" is in the itemset. + * It assumes that items in the itemset are sorted in lexical order + * @param itemset an itemset + * @param item the item + * @return return true if the item + */ + public static boolean containsLEX(int[] itemset, int item) { + // for each item in itemset + for(int i=0; i< itemset.length; i++){ + // check if the current item is equal to the one that is searched + if(itemset[i] == item){ + // if yes return true + return true; + // if the current item is larger than the item that is searched, + // then return false because of the lexical order. + }else if(itemset[i] > item){ + return false; // <-- xxxx + } + } + // if the searched item was not found, return false. + return false; + } + + + /** + * Check if an a sorted list of integers contains an integer. + * @param itemset the sorted list of integers + * @param item the integer + * @return true if the item appears in the list, false otherwise + */ + public static boolean contains(int[] itemset, int item) { + // for each item in the itemset + for(int i=0; i item){ + return false; + } + } + // not found, return false + return false; + } + + /** A Comparator for comparing two itemsets having the same size using the lexical order. */ + public static Comparator comparatorItemsetSameSize = new Comparator() { + @Override + /** + * Compare two itemsets and return -1,0 and 1 if the second itemset + * is larger, equal or smaller than the first itemset according to the lexical order. + */ + public int compare(int[] itemset1, int[] itemset2) { + // for each item in the first itemset + for(int i=0; i < itemset1.length; i++) { + // if the current item is smaller in the first itemset + if(itemset1[i] < itemset2[i]) { + return -1; // than the first itemset is smaller + // if the current item is larger in the first itemset + }else if(itemset2[i] < itemset1[i]) { + return 1; // than the first itemset is larger + } + // otherwise they are equal so the next item in both itemsets will be compared next. + } + return 0; // both itemsets are equal + } + }; + + /** + * Append an integer at the end of an array of integers. + * @param array the array + * @param integer the integer + * @return a new array + */ + public static int[] appendIntegerToArray(int[] array, int integer) { + int[] newgen = new int[array.length + 1]; + System.arraycopy(array, 0, newgen, 0, array.length); + newgen[array.length] = integer; + return newgen; + } + + /** + * Convert a string array to a double array + * @param tokens a string array + * @return a double array + */ + public static double[] convertStringArrayToDoubleArray(String[] tokens) { + double[] numbers = new double[tokens.length]; + + // for each item, we update its support + for(int i=0; i itemset1, Short[] itemset2) { + if(itemset1 == null || itemset1.size()==0) { + return true; + } + for (short val : itemset1) { + boolean found = false; + for (short value : itemset2) { + // Makes use of lexicography order to be faster + if (value > val) { + return false; + } else if (val == value) { + found = true; + break; + } + } + if(!found) + return false; + } + // itemset1 is a subset of itemset2 + return true; + } + + /** + * Concatenates two sets of items + * + * @param itemset1 first set to join + * @param itemset2 second set to join + * @return the concatenation of both sets of items + */ + public static Short[] concatenate(Short[] itemset1, Short[] itemset2) { + Short[] concatenation = new Short[itemset1.length + itemset2.length]; + System.arraycopy(itemset1, 0, concatenation, 0, itemset1.length); + System.arraycopy(itemset2, 0, concatenation, itemset1.length, itemset2.length); + return concatenation; + } + + + +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemset.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemset.java new file mode 100644 index 00000000..888eb896 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemset.java @@ -0,0 +1,175 @@ +package i5.las2peer.services.ocd.spmf; + +import java.util.Arrays; +import java.util.List; + +//import ca.pfv.spmf.algorithms.ArraysAlgos; +//import ca.pfv.spmf.patterns.AbstractOrderedItemset; + +/* This file is copyright (c) 2008-2012 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ + +/** + * This class represents an itemset (a set of items) implemented as an array of integers with + * a variable to store the support count of the itemset. +* + * @author Philippe Fournier-Viger + */ +public class Itemset extends AbstractOrderedItemset{ + /** the array of items **/ + public int[] itemset; + + /** the support of this itemset */ + public int support = 0; + + /** + * Get the items as array + * @return the items + */ + public int[] getItems() { + return itemset; + } + + /** + * Constructor + */ + public Itemset(){ + itemset = new int[]{}; + } + + /** + * Constructor + * @param item an item that should be added to the new itemset + */ + public Itemset(int item){ + itemset = new int[]{item}; + } + + /** + * Constructor + * @param items an array of items that should be added to the new itemset + */ + public Itemset(int [] items){ + this.itemset = items; + } + + /** + * Constructor + * @param itemset a list of Integer representing items in the itemset + * @param support the support of the itemset + */ + public Itemset(List itemset, int support){ + this.itemset = new int[itemset.size()]; + int i = 0; + for (Integer item : itemset) { + this.itemset[i++] = item.intValue(); + } + this.support = support; + } + + /** + * Get the support of this itemset + */ + public int getAbsoluteSupport(){ + return support; + } + + /** + * Get the size of this itemset + */ + public int size() { + return itemset.length; + } + + /** + * Get the item at a given position in this itemset + */ + public Integer get(int position) { + return itemset[position]; + } + + /** + * Set the support of this itemset + * @param support the support + */ + public void setAbsoluteSupport(Integer support) { + this.support = support; + } + + /** + * Increase the support of this itemset by 1 + */ + public void increaseTransactionCount() { + this.support++; + } + + + /** + * Make a copy of this itemset but exclude a given item + * @param itemToRemove the given item + * @return the copy + */ + public Itemset cloneItemSetMinusOneItem(Integer itemToRemove) { + // create the new itemset + int[] newItemset = new int[itemset.length -1]; + int i=0; + // for each item in this itemset + for(int j =0; j < itemset.length; j++){ + // copy the item except if it is the item that should be excluded + if(itemset[j] != itemToRemove){ + newItemset[i++] = itemset[j]; + } + } + return new Itemset(newItemset); // return the copy + } + + + /** + * Make a copy of this itemset but exclude a set of items + * @param itemsetToNotKeep the set of items to be excluded + * @return the copy + */ + public Itemset cloneItemSetMinusAnItemset(Itemset itemsetToNotKeep) { + // create a new itemset + int[] newItemset = new int[itemset.length - itemsetToNotKeep.size()]; + int i=0; + // for each item of this itemset + for(int j =0; j < itemset.length; j++){ + // copy the item except if it is not an item that should be excluded + if(itemsetToNotKeep.contains(itemset[j]) == false){ + newItemset[i++] = itemset[j]; + } + } + return new Itemset(newItemset); // return the copy + } + + /** + * This method return an itemset containing items that are included + * in this itemset and in a given itemset + * @param itemset2 the given itemset + * @return the new itemset + */ + public Itemset intersection(Itemset itemset2) { + int [] intersection = ArraysAlgos.intersectTwoSortedArrays(this.getItems(), itemset2.getItems()); + return new Itemset(intersection); + } + + @Override + public int hashCode() { + + return Arrays.hashCode(itemset); + } +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemsets.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemsets.java new file mode 100644 index 00000000..729d9ad4 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/Itemsets.java @@ -0,0 +1,115 @@ +package i5.las2peer.services.ocd.spmf; + +/* This file is copyright (c) 2008-2012 Philippe Fournier-Viger +* +* This file is part of the SPMF DATA MINING SOFTWARE +* (http://www.philippe-fournier-viger.com/spmf). +* +* SPMF is free software: you can redistribute it and/or modify it under the +* terms of the GNU General Public License as published by the Free Software +* Foundation, either version 3 of the License, or (at your option) any later +* version. +* SPMF is distributed in the hope that it will be useful, but WITHOUT ANY +* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +* A PARTICULAR PURPOSE. See the GNU General Public License for more details. +* You should have received a copy of the GNU General Public License along with +* SPMF. If not, see . +*/ + + +import java.util.ArrayList; +import java.util.List; + +/** + * This class represents a set of itemsets, where an itemset is an array of integers + * with an associated support count. Itemsets are ordered by size. For + * example, level 1 means itemsets of size 1 (that contains 1 item). +* + * @author Philippe Fournier-Viger + */ +public class Itemsets{ + /** We store the itemsets in a list named "levels". + Position i in "levels" contains the list of itemsets of size i */ + private final List> levels = new ArrayList>(); + /** the total number of itemsets **/ + private int itemsetsCount = 0; + /** a name that we give to these itemsets (e.g. "frequent itemsets") */ + private String name; + + /** + * Constructor + * @param name the name of these itemsets + */ + public Itemsets(String name) { + this.name = name; + levels.add(new ArrayList()); // We create an empty level 0 by + // default. + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#printItemsets(int) + */ + public void printItemsets(int nbObject) { + System.out.println(" ------- " + name + " -------"); + int patternCount = 0; + int levelCount = 0; + // for each level (a level is a set of itemsets having the same number of items) + for (List level : levels) { + // print how many items are contained in this level + System.out.println(" L" + levelCount + " "); + // for each itemset + for (Itemset itemset : level) { +// Arrays.sort(itemset.getItems()); + // print the itemset + System.out.print(" pattern " + patternCount + ": "); + itemset.print(); + // print the support of this itemset + System.out.print("support : " + itemset.getAbsoluteSupport()); +// + itemset.getRelativeSupportAsString(nbObject)); + patternCount++; + System.out.println(""); + } + levelCount++; + } + System.out.println(" --------------------------------"); + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#addItemset(ca.pfv.spmf.patterns.itemset_array_integers_with_count.Itemset, int) + */ + public void addItemset(Itemset itemset, int k) { + while (levels.size() <= k) { + levels.add(new ArrayList()); + } + levels.get(k).add(itemset); + itemsetsCount++; + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#getLevels() + */ + public List> getLevels() { + return levels; + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#getItemsetsCount() + */ + public int getItemsetsCount() { + return itemsetsCount; + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#setName(java.lang.String) + */ + public void setName(String newName) { + name = newName; + } + + /* (non-Javadoc) + * @see ca.pfv.spmf.patterns.itemset_array_integers_with_count.AbstractItemsets#decreaseItemsetCount() + */ + public void decreaseItemsetCount() { + itemsetsCount--; + } +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/MemoryLogger.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/MemoryLogger.java new file mode 100644 index 00000000..7398cef0 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/spmf/MemoryLogger.java @@ -0,0 +1,72 @@ +package i5.las2peer.services.ocd.spmf; +/* + * Copyright (c) 2008-2012 Philippe Fournier-Viger + * + * This file is part of the SPMF DATA MINING SOFTWARE + * (http://www.philippe-fournier-viger.com/spmf). + * + * SPMF is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * SPMF is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with SPMF. If not, see . + */ + +/** + * This class is used to record the maximum memory usaged of an algorithm during + * a given execution. + * It is implemented by using the "singleton" design pattern. + * + */ +public class MemoryLogger { + + // the only instance of this class (this is the "singleton" design pattern) + private static MemoryLogger instance = new MemoryLogger(); + + // variable to store the maximum memory usage + private double maxMemory = 0; + + /** + * Method to obtain the only instance of this class + * @return instance of MemoryLogger + */ + public static MemoryLogger getInstance(){ + return instance; + } + + /** + * To get the maximum amount of memory used until now + * @return a double value indicating memory as megabytes + */ + public double getMaxMemory() { + return maxMemory; + } + + /** + * Reset the maximum amount of memory recorded. + */ + public void reset(){ + maxMemory = 0; + } + + /** + * Check the current memory usage and record it if it is higher + * than the amount of memory previously recorded. + * @return the memory usage in megabytes + */ + public double checkMemory() { + double currentMemory = (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) + / 1024d / 1024d; + if (currentMemory > maxMemory) { + maxMemory = currentMemory; + } + return currentMemory; + } +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/Database.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/Database.java index 94a2f0fd..c6ea98f5 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/Database.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/Database.java @@ -1,53 +1,28 @@ package i5.las2peer.services.ocd.utils; -import java.util.List; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Properties; -import java.util.logging.Level; - -import java.util.Map; -import java.util.HashMap; -import java.util.Set; -import java.util.HashSet; -import java.util.Collections; - -import i5.las2peer.services.ocd.centrality.data.CentralityMeta; -import i5.las2peer.services.ocd.cooperation.data.simulation.*; -import i5.las2peer.services.ocd.metrics.OcdMetricLog; -import i5.las2peer.services.ocd.metrics.OcdMetricLogId; -import i5.las2peer.logging.L2pLogger; -import i5.las2peer.services.ocd.centrality.data.CentralityCreationLog; -import i5.las2peer.services.ocd.centrality.data.CentralityMap; -import i5.las2peer.services.ocd.centrality.data.CentralityMapId; -import i5.las2peer.services.ocd.graphs.*; - - -import com.arangodb.ArangoDB; -import com.arangodb.ArangoDatabase; -import com.arangodb.DbName; -import com.arangodb.ArangoCollection; -import com.arangodb.mapping.ArangoJack; +import com.arangodb.*; import com.arangodb.entity.BaseDocument; import com.arangodb.entity.CollectionType; import com.arangodb.entity.StreamTransactionEntity; -import com.arangodb.model.AqlQueryOptions; -import com.arangodb.model.CollectionCreateOptions; -import com.arangodb.model.StreamTransactionOptions; -import com.arangodb.model.DocumentCreateOptions; -import com.arangodb.model.DocumentReadOptions; -import com.arangodb.model.DocumentDeleteOptions; -import com.arangodb.ArangoCursor; - - +import com.arangodb.mapping.ArangoJack; +import com.arangodb.model.*; import com.fasterxml.jackson.databind.ObjectMapper; - +import i5.las2peer.logging.L2pLogger; +import i5.las2peer.services.ocd.centrality.data.CentralityCreationLog; +import i5.las2peer.services.ocd.centrality.data.CentralityMap; +import i5.las2peer.services.ocd.centrality.data.CentralityMeta; +import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationDataset; +import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeries; +import i5.las2peer.services.ocd.cooperation.data.simulation.SimulationSeriesGroup; +import i5.las2peer.services.ocd.graphs.*; +import i5.las2peer.services.ocd.metrics.OcdMetricLog; +import i5.las2peer.services.ocd.metrics.OcdMetricLogId; +import org.apache.commons.io.FileUtils; import java.io.File; import java.io.IOException; - - -import org.apache.commons.io.FileUtils; +import java.util.*; +import java.util.logging.Level; public class Database { @@ -396,7 +371,7 @@ public ArrayList getGraphMetaDataEfficiently(String username, i String queryStr = "FOR g IN " + CustomGraph.collectionName + " FOR gcl IN " + GraphCreationLog.collectionName + " FILTER g." + CustomGraph.userColumnName + " == @username AND gcl._key == g." + CustomGraph.creationMethodKeyColumnName + " AND gcl." + GraphCreationLog.statusIdColumnName +" IN " + executionStatusIds + - " AND 8 NOT IN g." + CustomGraph.typesColumnName + + " AND 7 NOT IN g." + CustomGraph.typesColumnName + " LIMIT " + firstIndex + "," + length + " RETURN "+ "{\"key\" : g._key," + "\"userName\" : g." + CustomGraph.userColumnName + "," + @@ -535,7 +510,7 @@ public List getGraphs(String username, int firstIndex, int length, AqlQueryOptions queryOpt = new AqlQueryOptions().streamTransactionId(transId); String queryStr = "FOR g IN " + CustomGraph.collectionName + " FOR gcl IN " + GraphCreationLog.collectionName + " FILTER g." + CustomGraph.userColumnName + " == @username AND gcl._key == g." + CustomGraph.creationMethodKeyColumnName + - " AND gcl." + GraphCreationLog.statusIdColumnName +" IN " + + " AND gcl." + GraphCreationLog.statusIdColumnName +" IN " + executionStatusIds + " LIMIT " + firstIndex + "," + length + " RETURN g._key"; Map bindVars = Collections.singletonMap("username",username); ArangoCursor graphKeys = db.query(queryStr, bindVars, queryOpt, String.class); @@ -720,12 +695,18 @@ public void deleteMultiplexGraph(String username, String graphKey, ThreadHandler BaseDocument bd = graphCollection.getDocument(graphKey, BaseDocument.class, readOpt); String gclKey = bd.getAttribute(MultiplexGraph.creationMethodKeyColumnName).toString(); + //delete the GraphCreationLog ArangoCollection gclCollection = db.collection(GraphCreationLog.collectionName); - gclCollection.deleteDocument(gclKey, null, deleteOpt); //delete the GraphCreationLog + gclCollection.deleteDocument(gclKey, null, deleteOpt); + //delete all layers for(String layerKey : (List)bd.getAttribute(MultiplexGraph.layerKeysColumnName)) { deleteGraph(username, layerKey, threadHandler); - } //delete all layers + } + + //delete representive custom graph + String representiveKey = bd.getAttribute(MultiplexGraph.representiveGraphKeyColumnName).toString(); + deleteGraph(username, representiveKey, threadHandler); String query = "FOR c IN " + Cover.collectionName + " FILTER c." + Cover.graphKeyColumnName + " == \"" + graphKey +"\" RETURN c._key"; @@ -734,7 +715,8 @@ public void deleteMultiplexGraph(String username, String graphKey, ThreadHandler deleteCover(coverKey, transId); } - graphCollection.deleteDocument(graphKey, null, deleteOpt); //delete the graph + //delete the multiplex graph + graphCollection.deleteDocument(graphKey, null, deleteOpt); db.commitStreamTransaction(transId); }catch(Exception e) { db.abortStreamTransaction(transId); @@ -1039,7 +1021,62 @@ public List getAllCoverKeys() { } return covers; } - + + + /** + * @param graphKey + * the key of the graph + * @return a cover list + */ + public List getLayerCovers(String graphKey) { + String transId = getTransactionId(null, false); + AqlQueryOptions queryOpt = new AqlQueryOptions().streamTransactionId(transId); + DocumentReadOptions readOpt = new DocumentReadOptions().streamTransactionId(transId); + + List covers = new ArrayList(); + Map graphMap = new HashMap(); + Set graphKeySet = new HashSet(); + try { + ArangoCollection coverColl = db.collection(Cover.collectionName); + Map bindVars; + String queryStr = " FOR c IN " + Cover.collectionName + " FOR a IN " + CoverCreationLog.collectionName + " FILTER c." + Cover.graphKeyColumnName + " == @gKey RETURN DISTINCT c._key"; + bindVars = Collections.singletonMap("gKey", graphKey); + ArangoCursor coverKeys = db.query(queryStr, bindVars, queryOpt, String.class); + List keyList = coverKeys.asListRemaining(); + + //insert graphkeys to set to ensure no graphs appear more than one time + for (String cKey : keyList) { + BaseDocument bd = coverColl.getDocument(cKey, BaseDocument.class, readOpt); + String gKey = bd.getAttribute(Cover.graphKeyColumnName).toString(); + graphKeySet.add(gKey); + } + if(graphKeySet.size()==1) { + CustomGraph g = CustomGraph.load(graphKeySet.iterator().next(), db, transId); + //if(username.equals(g.getUserName())) { + // for(String cKey : keyList) { + // covers.add(Cover.load(cKey, g, db, transId)); + // } + }else { //load cover with associated graph + for(String gk : graphKeySet) { + graphMap.put(gk, CustomGraph.load(gk, db, transId)); + + } + //for(String cKey : keyList) { + // BaseDocument bd = coverColl.getDocument(cKey, BaseDocument.class, readOpt); + // String gKey = bd.getAttribute(Cover.graphKeyColumnName).toString(); + // CustomGraph g = graphMap.get(gKey); + // covers.add(Cover.load(cKey, g, db, transId)); + //} + } + db.commitStreamTransaction(transId); + }catch(Exception e) { + db.abortStreamTransaction(transId); + System.out.println("transaction abort"); + throw e; + } + return covers; + } + /** * Updates a persisted cover by updateing attributes, creation and metric logs * and deleting and restoring the communitys diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/MultiplexAlgorithmRunnable.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/MultiplexAlgorithmRunnable.java new file mode 100644 index 00000000..dc2c1584 --- /dev/null +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/MultiplexAlgorithmRunnable.java @@ -0,0 +1,104 @@ +package i5.las2peer.services.ocd.utils; + +import i5.las2peer.services.ocd.algorithms.OcdMultiplexAlgorithm; +import i5.las2peer.services.ocd.algorithms.OcdAlgorithmExecutor; +import i5.las2peer.services.ocd.graphs.Cover; +import i5.las2peer.services.ocd.graphs.CoverId; +import i5.las2peer.services.ocd.graphs.CustomGraphId; + +import java.util.logging.Level; + +/** + * Runnable for the execution of ocd algorithms. + * @author Sebastian + * + */ +public class MultiplexAlgorithmRunnable implements Runnable { + + /** + * The persisted cover reserved for the algorithm result. + */ + private Cover cover; + /** + * The algorithm to execute. + */ + private OcdMultiplexAlgorithm algorithm; + /** + * The component node count filter used by the OcdAlgorithmExecutor. + */ + private int componentNodeCountFilter; + /** + * The thread handler in charge of the runnable execution. + */ + private ThreadHandler threadHandler; + + /** + * Creates a new instance. + * @param cover Sets the cover. + * @param algorithm Sets the algorithm. + * @param componentNodeCountFilter Sets the component node count filter. + * @param threadHandler Sets the thread handler. + */ + public MultiplexAlgorithmRunnable(Cover cover, OcdMultiplexAlgorithm algorithm, int componentNodeCountFilter, ThreadHandler threadHandler) { + this.algorithm = algorithm; + this.cover = cover; + this.componentNodeCountFilter = componentNodeCountFilter; + this.threadHandler = threadHandler; + } + + @Override + public void run() { + boolean error = false; + /* + * Set algorithm state to running. + */ + String cKey = cover.getKey(); + String gKey = cover.getGraph().getKey(); + String user = cover.getGraph().getUserName(); + CustomGraphId graphId = new CustomGraphId(gKey, user); + CoverId coverId = new CoverId(cKey, graphId); + + RequestHandler requestHandler = new RequestHandler(); + + Database database = new Database(false); + try { + Cover c = database.getCover(user, gKey, cKey); + if(c == null) { + //System.out.println("Cover in AR run was null " + user + gKey + cKey); + /* + * Should not happen. + */ + requestHandler.log(Level.SEVERE, "Cover deleted while algorithm running."); + throw new IllegalStateException(); + } + c.getCreationMethod().setStatus(ExecutionStatus.RUNNING); + database.updateCoverCreationLog(c); + } catch( Exception e ) { + error = true; + } + + /* + * Run algorithm. + */ + Cover resultCover = null; + if(!error) { + OcdAlgorithmExecutor executor = new OcdAlgorithmExecutor(); + try { + //resultCover = executor.executeMultiplex(cover.getGraph(), algorithm, componentNodeCountFilter); + if(Thread.interrupted()) { + throw new InterruptedException(); + } + } + catch (InterruptedException e) { + return; + } + catch (Exception e) { + requestHandler.log(Level.SEVERE, "Algorithm Failure.", e); + error = true; + } + } + threadHandler.createCover(resultCover, coverId, error); + + } + +} diff --git a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/ThreadHandler.java b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/ThreadHandler.java index b11af64e..516e6eb3 100644 --- a/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/ThreadHandler.java +++ b/rest_ocd_services/src/main/java/i5/las2peer/services/ocd/utils/ThreadHandler.java @@ -101,14 +101,14 @@ public void runAlgorithm(Cover cover, OcdAlgorithm algorithm, int componentNodeC * @param componentNodeCountFilter The node count filter used by the OcdAlgorithmExecutor. */ public void runMultiplexAlgorithm(Cover cover, OcdMultiplexAlgorithm algorithm, int componentNodeCountFilter) { - //CustomGraphId gId = new CustomGraphId(cover.getGraph().getKey(), cover.getGraph().getUserName()); - //CoverId coverId = new CoverId(cover.getKey(), gId); - //AlgorithmRunnable runnable = new AlgorithmRunnable(cover, algorithm, componentNodeCountFilter, this); - //CoverCreationLog log = cover.getCreationMethod(); - //synchronized (algorithms) { - // Future future = executor.submit(runnable, log); - // algorithms.put(coverId, future); - //} + CustomGraphId gId = new CustomGraphId(cover.getGraph().getKey(), cover.getGraph().getUserName()); + CoverId coverId = new CoverId(cover.getKey(), gId); + MultiplexAlgorithmRunnable runnable = new MultiplexAlgorithmRunnable(cover, algorithm, componentNodeCountFilter, this); + CoverCreationLog log = cover.getCreationMethod(); + synchronized (algorithms) { + Future future = executor.submit(runnable, log); + algorithms.put(coverId, future); + } } /** diff --git a/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapterTest.java b/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapterTest.java index 19ebf3a9..1a328d70 100644 --- a/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapterTest.java +++ b/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexUnweightedEdgeListGraphInputAdapterTest.java @@ -27,12 +27,18 @@ public void testWithStringReader2Layers() throws AdapterException { MultiplexGraph multiplexGraph = inputAdapter.readGraph(); CustomGraph customGraph1 = multiplexGraph.getCustomGraphs().get("1"); CustomGraph customGraph2 = multiplexGraph.getCustomGraphs().get("2"); + CustomGraph representiveGraph = multiplexGraph.getRepresentiveGraph(); assertEquals(2, multiplexGraph.getLayerCount()); + assertEquals(4, multiplexGraph.getNodeCount()); + assertEquals(4, customGraph1.getNodeCount()); assertEquals(3, customGraph1.getEdgeCount()); + assertEquals(4, customGraph2.getNodeCount()); assertEquals(4, customGraph2.getEdgeCount()); + + assertEquals(4, representiveGraph.getNodeCount()); } @Test @@ -47,10 +53,15 @@ public void testWithStringReader1Layer() throws AdapterException { inputAdapter.setReader(reader); MultiplexGraph multiplexGraph = inputAdapter.readGraph(); CustomGraph customGraph1 = multiplexGraph.getCustomGraphs().get("1"); + CustomGraph representiveGraph = multiplexGraph.getRepresentiveGraph(); assertEquals(1, multiplexGraph.getLayerCount()); + assertEquals(4, multiplexGraph.getNodeCount()); + assertEquals(4, customGraph1.getNodeCount()); assertEquals(3, customGraph1.getEdgeCount()); + + assertEquals(4, representiveGraph.getNodeCount()); } } diff --git a/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapterTest.java b/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapterTest.java index 8ce2530a..6d1d5433 100644 --- a/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapterTest.java +++ b/rest_ocd_services/src/test/java/i5/las2peer/services/ocd/adapters/graphInput/MultiplexWeightedEdgeListGraphInputAdapterTest.java @@ -29,12 +29,18 @@ public void testWithStringReader2Layers() throws AdapterException { MultiplexGraph multiplexGraph = inputAdapter.readGraph(); CustomGraph customGraph1 = multiplexGraph.getCustomGraphs().get("1"); CustomGraph customGraph2 = multiplexGraph.getCustomGraphs().get("2"); + CustomGraph representiveGraph = multiplexGraph.getRepresentiveGraph(); assertEquals(2, multiplexGraph.getLayerCount()); + assertEquals(4, multiplexGraph.getNodeCount()); + assertEquals(4, customGraph1.getNodeCount()); assertEquals(3, customGraph1.getEdgeCount()); + assertEquals(4, customGraph2.getNodeCount()); assertEquals(4, customGraph2.getEdgeCount()); + + assertEquals(4, representiveGraph.getNodeCount()); } @Test @@ -49,10 +55,15 @@ public void testWithStringReader1Layer() throws AdapterException { inputAdapter.setReader(reader); MultiplexGraph multiplexGraph = inputAdapter.readGraph(); CustomGraph customGraph1 = multiplexGraph.getCustomGraphs().get("1"); + CustomGraph representiveGraph = multiplexGraph.getRepresentiveGraph(); assertEquals(1, multiplexGraph.getLayerCount()); + assertEquals(4, multiplexGraph.getNodeCount()); + assertEquals(4, customGraph1.getNodeCount()); assertEquals(3, customGraph1.getEdgeCount()); + + assertEquals(4, representiveGraph.getNodeCount()); } } diff --git a/time_seed.dat b/time_seed.dat index 053dae55..d05a270a 100644 --- a/time_seed.dat +++ b/time_seed.dat @@ -1 +1 @@ -21113554 \ No newline at end of file +21113789 \ No newline at end of file