@@ -1,21 +1,27 @@
package de.lmu.ifi.dbs.knowing.core.model;

import org.eclipse.sapphire.java.JavaType;
import org.eclipse.sapphire.java.JavaTypeConstraint;
import org.eclipse.sapphire.java.JavaTypeKind;
import org.eclipse.sapphire.java.JavaTypeName;
import org.eclipse.sapphire.modeling.IModelElement;
import org.eclipse.sapphire.modeling.ListProperty;
import org.eclipse.sapphire.modeling.ModelElementList;
import org.eclipse.sapphire.modeling.ModelElementType;
import org.eclipse.sapphire.modeling.ReferenceValue;
import org.eclipse.sapphire.modeling.Value;
import org.eclipse.sapphire.modeling.ValueProperty;
import org.eclipse.sapphire.modeling.annotations.GenerateImpl;
import org.eclipse.sapphire.modeling.annotations.Label;
import org.eclipse.sapphire.modeling.annotations.Reference;
import org.eclipse.sapphire.modeling.annotations.Required;
import org.eclipse.sapphire.modeling.annotations.Type;
import org.eclipse.sapphire.modeling.xml.annotations.XmlBinding;
import org.eclipse.sapphire.modeling.xml.annotations.XmlListBinding;

@GenerateImpl
public interface INode extends IModelElement {

ModelElementType TYPE = new ModelElementType(INode.class);

/* === Node ID === */
@@ -28,10 +34,10 @@ public interface INode extends IModelElement {
Value<String> getId();

void setId(String value);

/* === Type === */

@Type( base = NodeType.class )
@Type(base = NodeType.class)
@XmlBinding(path = "@type")
@Label(standard = "type")
@Required
@@ -40,29 +46,33 @@ public interface INode extends IModelElement {
Value<NodeType> getType();

void setType(String value);

void setType(NodeType value);

/* === Factory ID === */

@Type(base = JavaTypeName.class)
@Reference(target = JavaType.class)
@JavaTypeConstraint(kind = { JavaTypeKind.CLASS, JavaTypeKind.ABSTRACT_CLASS, JavaTypeKind.INTERFACE },
type = {"de.lmu.ifi.dbs.knowing.core.processing.TProcessor", "de.lmu.ifi.dbs.knowing.core.japi.IProcessor"})
@XmlBinding(path = "@factoryId")
@Label(standard = "factoryId")
@Required
ValueProperty PROP_FACTORY_ID = new ValueProperty(TYPE, "factoryId");

Value<String> getFactoryId();
ReferenceValue<JavaTypeName, JavaType> getFactoryId();

void setFactoryId(String value);



void setFactoryId(JavaTypeName value);

/* === Properties === */

@Type( base = IProperty.class )
@XmlListBinding(path = "properties", mappings = { @XmlListBinding.Mapping( element = "property", type = IProperty.class ) } )
@Label( standard = "Properties" )
@Type(base = IProperty.class)
@XmlListBinding(path = "properties", mappings = { @XmlListBinding.Mapping(element = "property", type = IProperty.class) })
@Label(standard = "Properties")
ListProperty PROP_PROPERTIES = new ListProperty(TYPE, "properties");

ListProperty PROP_PROPERTIES = new ListProperty( TYPE, "properties" );
ModelElementList<IProperty> getProperties();

ModelElementList<IProperty> getProperties();


}
@@ -157,7 +157,7 @@ trait TProcessor extends Actor with TSender with TConfigurable {
* <p>Just puts a warning on the console and prints out the message</p>
*
*/
def messageException(message: Any) = warning(this, "Unkown Message " + message)
def messageException(message: Any) = {} //warning(this, "Unkown Message " + message)

/**
* <p>Checks the dataset for class attribute in this order
@@ -8,6 +8,7 @@ import scala.collection.mutable.Set
import de.lmu.ifi.dbs.knowing.core.events._
import TSender._
import weka.core.Instances
import de.lmu.ifi.dbs.knowing.core.model.IEdge

/**
* <p>This actor is able to send Events to registered actors</p>
@@ -137,5 +138,5 @@ trait TSender { this: Actor =>
}

object TSender {
val DEFAULT_PORT = "default"
val DEFAULT_PORT = IEdge.DEFAULT_PORT
}
@@ -1,44 +1,46 @@
package de.lmu.ifi.dbs.knowing.core.service
import java.net.URL

import org.osgi.framework.Bundle

import de.lmu.ifi.dbs.knowing.core.graph.xml.DataProcessingUnit
import javax.xml.bind.annotation.XmlAccessorType
import javax.xml.bind.annotation.XmlRootElement
import javax.xml.bind.JAXBContext
import javax.xml.bind.JAXBException
import de.lmu.ifi.dbs.knowing.core.model.IDataProcessingUnit
import org.eclipse.sapphire.modeling.xml.XmlResourceStore
import org.eclipse.sapphire.modeling.UrlResourceStore
import org.eclipse.sapphire.modeling.ResourceStoreException
import org.eclipse.sapphire.modeling.xml.RootXmlResource

trait IDPUProvider {

def getDataProcessingUnits: Array[DataProcessingUnit]
def getDataProcessingUnits: Array[IDataProcessingUnit]

def getDataProcessingUnit(name: String): IDataProcessingUnit

def getDataProcessingUnit(name: String): DataProcessingUnit

def getURL(name: String): URL

}

class BundleDPUProvider(bundle: Bundle, dir: String = "/KNOWING-INF") extends IDPUProvider {

private var dpuMap: Map[String, (DataProcessingUnit, URL)] = Map()
private var dpuMap: Map[String, (IDataProcessingUnit, URL)] = Map()
init

/**
*
*
*/
def getDataProcessingUnits: Array[DataProcessingUnit] = dpuMap map { case (_, (dpu, _)) => dpu } toArray
def getDataProcessingUnits: Array[IDataProcessingUnit] = dpuMap map { case (_, (dpu, _)) => dpu } toArray

/**
* Doesn't handle non existing DPUs yet!
*/
def getDataProcessingUnit(name: String): DataProcessingUnit = {
def getDataProcessingUnit(name: String): IDataProcessingUnit = {
dpuMap.get(name) match {
case None => null
case Some(e) => e._1
}
}

/**
* Doesn't handle non existing DPUs yet!
*/
@@ -49,25 +51,27 @@ class BundleDPUProvider(bundle: Bundle, dir: String = "/KNOWING-INF") extends ID
if (entries == null)
return


var urls: List[URL] = Nil
while (entries.hasMoreElements)
urls = entries.nextElement.asInstanceOf[URL] :: urls

try {
val context = JAXBContext.newInstance(classOf[DataProcessingUnit])
val um = context.createUnmarshaller
val dpus = urls map (url => (um.unmarshal(url).asInstanceOf[DataProcessingUnit], url))
val dpus = urls map { url =>
val store = new XmlResourceStore(new UrlResourceStore(url));
val resource = new RootXmlResource(store)
val dpu: IDataProcessingUnit = IDataProcessingUnit.TYPE.instantiate(resource)
(dpu, url)
}
//TODO BundleDPUProvider => handle dpu's with identical name
dpuMap = dpus map {case (dpu, url) => (dpu.name, (dpu, url))} toMap
dpuMap = dpus map { case (dpu, url) => (dpu.getName.getContent, (dpu, url)) } toMap
} catch {
case e: JAXBException => e.printStackTrace
case e: ResourceStoreException => e.printStackTrace
case e: Exception => e.printStackTrace
}
}
}

object BundleDPUProvider {

def newInstance(bundle: Bundle): BundleDPUProvider = new BundleDPUProvider(bundle)
}
@@ -1,12 +1,11 @@
package de.lmu.ifi.dbs.knowing.core.service

import java.net.URI

import akka.actor.ActorRef
import de.lmu.ifi.dbs.knowing.core.factory.UIFactory
import de.lmu.ifi.dbs.knowing.core.graph.xml.DataProcessingUnit
import de.lmu.ifi.dbs.knowing.core.model.IDataProcessingUnit

trait IEvaluateService {

def evaluate(dpu: DataProcessingUnit, ui: UIFactory, execPath: URI): ActorRef
def evaluate(dpu: IDataProcessingUnit, ui: UIFactory, execPath: URI): ActorRef
}
@@ -1,21 +1,20 @@
package de.lmu.ifi.dbs.knowing.core.service.impl

import java.net.URI

import akka.actor.Actor.actorOf
import akka.actor.ActorRef
import de.lmu.ifi.dbs.knowing.core.events.Start
import de.lmu.ifi.dbs.knowing.core.factory.UIFactory
import de.lmu.ifi.dbs.knowing.core.graph.xml.DataProcessingUnit
import de.lmu.ifi.dbs.knowing.core.graph.GraphSupervisor
import de.lmu.ifi.dbs.knowing.core.service.IEvaluateService
import de.lmu.ifi.dbs.knowing.core.service.IFactoryDirectory
import de.lmu.ifi.dbs.knowing.core.model.IDataProcessingUnit

class EvaluateService extends IEvaluateService {

private var factoryDirectory: IFactoryDirectory = _

def evaluate(dpu: DataProcessingUnit, ui: UIFactory, execPath: URI): ActorRef = {
def evaluate(dpu: IDataProcessingUnit, ui: UIFactory, execPath: URI): ActorRef = {
val supervisor = actorOf(new GraphSupervisor(dpu,ui, execPath, factoryDirectory)).start
supervisor ! Start
supervisor
@@ -2,6 +2,7 @@ package de.lmu.ifi.dbs.knowing.core.util

import de.lmu.ifi.dbs.knowing.core.model._
import scala.collection.JavaConversions._
import java.util.Properties

object DPUUtil {

@@ -44,9 +45,16 @@ object DPUUtil {
dpu.getNodes.toList filter (node => node.getType.equals(typ) && node.getFactoryId.equals(factory)) toArray
}

def nodeProperties(node: INode): Properties = {
val props = new Properties
node.getProperties.foldLeft(props) { (properties, p) =>
properties.setProperty(p.getKey.getContent, p.getValue.getContent)
properties
}
}

/* =========================== */
/* ==== Edge util methods ==== */
/* =========================== */


}
@@ -1,15 +1,15 @@
package de.lmu.ifi.dbs.knowing.core.util

import org.osgi.framework.BundleContext
import org.osgi.framework.ServiceRegistration
import org.osgi.framework.ServiceRegistration
import org.osgi.framework.InvalidSyntaxException
import de.lmu.ifi.dbs.knowing.core.factory.TFactory
import de.lmu.ifi.dbs.knowing.core.processing._
import de.lmu.ifi.dbs.knowing.core.graph.xml.DataProcessingUnit
import OSGIUtil._
import de.lmu.ifi.dbs.knowing.core.internal.Activator
import de.lmu.ifi.dbs.knowing.core.service.IDPUProvider
import java.net.URL
import de.lmu.ifi.dbs.knowing.core.model.IDataProcessingUnit

/**
* <p>Util for (de)register DataMining-Factory-Services</p>
@@ -78,19 +78,19 @@ object OSGIUtil {
val PROCESSOR_CLASS = classOf[TProcessor].getName
val PRESENTER_CLASS = classOf[TPresenter[_]].getName

def registeredDPUs: Array[DataProcessingUnit] = {
def registeredDPUs: Array[IDataProcessingUnit] = {
val services = Activator.tracker.getServices
if(services == null)
return Array()
val provider = services map (_.asInstanceOf[IDPUProvider])
//FoldLeft function
val f = (p1: List[DataProcessingUnit], p2: IDPUProvider) => p1 ::: p2.getDataProcessingUnits.toList
val f = (p1: List[IDataProcessingUnit], p2: IDPUProvider) => p1 ::: p2.getDataProcessingUnits.toList
//Actual foldLeft
val dpus = (List[DataProcessingUnit]() /: provider)(f)
val dpus = (List[IDataProcessingUnit]() /: provider)(f)
dpus toArray
}

def registeredDPU(name: String): DataProcessingUnit = {
def registeredDPU(name: String): IDataProcessingUnit = {
val provider = Activator.tracker.getServices map (_.asInstanceOf[IDPUProvider])
val dpus = for (p <- provider if (p.getDataProcessingUnit(name) != null)) yield p.getDataProcessingUnit(name)
if (dpus.nonEmpty) dpus(0)
@@ -27,13 +27,16 @@ class CrossValidator extends TProcessor {
var fold = 0
var standalone = true

//TODO flags are not really perfect. Some have double meaning.

private var confusionMatrix: Instances = _
private var nonZeroValues: Array[Array[Int]] = Array()
private var classLabels: Array[String] = Array()
private var classifier: Option[ActorRef] = None
private var classifierTrained = false
private var filter: Option[ActorRef] = None
private var filterTrained = false
private var queriesFiltered = false

/** Instances to train classifier with filtered train-data */
private var numInstancesTrain = 0
@@ -42,6 +45,7 @@ class CrossValidator extends TProcessor {

private var numInstancesTest = 0
private var currentInstTest = 0
private var filteredTestData: Instances = _

private var first_run = true

@@ -54,7 +58,7 @@ class CrossValidator extends TProcessor {
}

def build(instances: Instances) {
// debug(this, "Build CrossValidator " + instances.relationName)
// debug(this, "Build CrossValidator " + instances.relationName)
val index = guessAndSetClassLabel(instances)
index match {
case -1 =>
@@ -91,6 +95,7 @@ class CrossValidator extends TProcessor {
//No filter, classifier gets trained directly
debug(this, "Build CrossValidator[unfiltered] with " + instances.relationName)
classifierTrained = true
queriesFiltered = true
startValidation(instances, classifier.get)
case false =>
debug(this, "Build CrossValidator[filtered] with" + instances.relationName)
@@ -100,60 +105,124 @@ class CrossValidator extends TProcessor {
}

/**
* <p> Process results. Merge with confusionMatrix </p>
* Standalone = true => Splits instances, train and test actor
* Standalone = false => Trains actor with instances
*/
def result(result: Instances, query: Instance) {
// If training data isn't completly filtered yet
if (numInstancesTrain != currentInstTrain && !classifierTrained) {
//Create if not existed
if (filteredTrainData == null) filteredTrainData = new Instances(result, numInstancesTrain)

val enum = result.enumerateInstances
while (enum.hasMoreElements) filteredTrainData.add(enum.nextElement.asInstanceOf[Instance])
currentInstTrain += 1
private def startValidation(instances: Instances, processor: ActorRef, filtered: Boolean = false) {
standalone match {
case true =>
val train = instances.trainCV(folds, fold)
//Train filter
processor ! Results(train)
//Filter training data with trained filter
if (filtered) { processor ! Queries(train) }
val testSet = instances.testCV(folds, fold)
numInstancesTest = testSet.numInstances
processor ! Queries(testSet)
case false =>
processor ! Results(instances)
numInstancesTrain = instances.numInstances
//Filter training data with trained filter
if (filtered) { processor ! Queries(instances) }
}
}

var lastTrainResult = false
//Training data completely filtered, train classifier
if (numInstancesTrain == currentInstTrain && !classifierTrained) {
classifier.get ! Results(filteredTrainData)
filterTrained = true
classifierTrained = true
lastTrainResult = true
processStoredQueries
}
/**
* Process results. Merge with confusionMatrix.
*
* The results identified by the flags classifierTrained, queriesFiltered and
* the number of train and test instances have been received.
* If no filter is specified, filterTrained and queriesFiltered are automatically true.
*
* Data flow [unfiltered / no standalone]
* [1] classifier ! Results(train)
* [2] forward all queries directly to the classifier, without caching. Store numInstancesTest
* [3] Receive results, wait for the last instances to arrive, merge and send Results
*
* Data flow [filtered / no standalone]
* [1] filter ! Results(train) -> train filter
* [2] filter ! Queries(train) -> filter classifier train data. Store numInstancesTrain
* [3] Receive results, wait for last trained instances and than filter testInstances.
* [4] Receive results, wait for last trained instances and then query classifier.
* [5] Receive results, wait for last classified instance, merge and send Results
*
*
*/
def result(result: Instances, query: Instance) {

if (classifierTrained && !lastTrainResult) {
//Assume n*n matrix, |labels|==|instances|
if (result.size != classLabels.length)
warning(this, "ConfusionMatrix doesn't fit to result data")
val prob_attribute = result.attribute(ResultsUtil.ATTRIBUTE_PROBABILITY)

val classIndex = query.classIndex
val col = query.value(classIndex) toInt

for (row <- 0 until result.size) {
val entry = confusionMatrix.instance(row)
val new_value = result.instance(row).value(prob_attribute)
val old_value = entry.value(col)
val value = new_value match {
case 0 => old_value
case x =>
nonZeroValues(row)(col) = nonZeroValues(row)(col) + 1
x + old_value
}
entry.setValue(col, value)
val CurrentTrain = currentInstTrain + 1
val CurrentTest = currentInstTest + 1
(classifierTrained, queriesFiltered) match {

// Nothing trained or filtered yet
case (false, false) => numInstancesTrain match {
case CurrentTrain =>
debug(this, " [" + self.uuid + "] classifier train " + numInstancesTest)
classifier.get ! Results(filteredTrainData)
filterTrained = true
classifierTrained = true
numInstancesTrain = cacheSize
processStoredQueries
case _ =>
// If training data isn't completely filtered yet
// Create if not existed
if (filteredTrainData == null) filteredTrainData = new Instances(result, numInstancesTrain)

val enum = result.enumerateInstances
while (enum.hasMoreElements) filteredTrainData.add(enum.nextElement.asInstanceOf[Instance])
currentInstTrain += 1
// If training data isn't completely filtered yet
}
// debug(this, currentInst + "/" + numInstances + " of [" + fold + "/" + folds + "]")
currentInstTest += 1
//Send Results if currentInst processed is the total numInstances
if (currentInstTest == numInstancesTest) {
sendEvent(QueryResults(mergeResults, query))
// debug(this, "[" + fold + "/" + folds + "]" + confusionMatrix)
numInstancesTest = 0
currentInstTest = 0
statusChanged(Ready())

// Classifier was trained with filtered data, filter test data
case (true, false) => numInstancesTest match {
case CurrentTest =>
debug(this, " [" + self.uuid + "] classifier query " + numInstancesTrain)
queriesFiltered = true
currentInstTest = 0
numInstancesTest = filteredTestData.numInstances
classifier.get ! Queries(filteredTestData)
case _ =>
// If testing data isn't completely filtered yet
// Create if not existed
if (filteredTestData == null) filteredTestData = new Instances(result, numInstancesTest)

val enum = result.enumerateInstances
while (enum.hasMoreElements) filteredTestData.add(enum.nextElement.asInstanceOf[Instance])
currentInstTest += 1
}

// Classifier trained and test data filtered
case (true, true) =>
debug(this, "classifier result " + currentInstTest + " / " + numInstancesTest)
// Assume n*n matrix, |labels|==|instances|
if (result.size != classLabels.length)
warning(this, "ConfusionMatrix doesn't fit to result data")
val prob_attribute = result.attribute(ResultsUtil.ATTRIBUTE_PROBABILITY)

val classIndex = query.classIndex
val col = query.value(classIndex) toInt

for (row <- 0 until result.size) {
val entry = confusionMatrix.instance(row)
val new_value = result.instance(row).value(prob_attribute)
val old_value = entry.value(col)
val value = new_value match {
case 0 => old_value
case x =>
nonZeroValues(row)(col) = nonZeroValues(row)(col) + 1
x + old_value
}
entry.setValue(col, value)
}
currentInstTest += 1
// Send Results if currentInst processed is the total numInstances
if (currentInstTest == numInstancesTest) {
sendEvent(QueryResults(mergeResults, query))
numInstancesTest = 0
currentInstTest = 1
statusChanged(Ready())
}
}

}
@@ -179,14 +248,15 @@ class CrossValidator extends TProcessor {
* @returns Instances - ConfusionMatrix
*/
def query(query: Instance): Instances = {
(filter, filterTrained, classifier, classifierTrained) match {
case (_, _, None, _) => warning(this, "No classifier found")
(filter, filterTrained, classifier, classifierTrained, queriesFiltered) match {
case (_, _, None, _, _) => warning(this, "No classifier found")
//cache if filtered isn't trained yet
case (Some(f), false, Some(_), _) => cacheQuery(query)
case (Some(f), false, Some(_), _, _) => cacheQuery(query)
//forward to filter if exists
case (Some(f), true, Some(c), false) => f ! Query(query)
case (Some(f), true, Some(c), false, _) => f ! Query(query)
case (Some(f), true, Some(c), _, false) => f ! Query(query)
//forward directly to classifier
case (_, _, Some(c), true) => c ! Query(query)
case (_, _, Some(c), true, true) => c ! Query(query)
}
confusionMatrix
}
@@ -203,29 +273,6 @@ class CrossValidator extends TProcessor {
confusionMatrix
}

/**
* Standalone = true => Splits instances, train and test actor
* Standalone = false => Trains actor with instances
*/
private def startValidation(instances: Instances, processor: ActorRef, filtered: Boolean = false) {
standalone match {
case true =>
val train = instances.trainCV(folds, fold)
//Train filter
processor ! Results(train)
//Filter training data with trained filter
if (filtered) { processor ! Queries(train) }
val testSet = instances.testCV(folds, fold)
numInstancesTest = testSet.numInstances
processor ! Queries(testSet)
case false =>
processor ! Results(instances)
numInstancesTrain = instances.numInstances
//Filter training data with trained filter
if (filtered) { processor ! Queries(instances) }
}
}

/**
* Queries stored are forwared to the classifier specified
*/
@@ -243,7 +290,7 @@ class CrossValidator extends TProcessor {
e._1 match {
case None => //nothing
//TODO Id must be send
case Some(_) => queries(e._2.queries)
case Some(_) => queries(e._2.queries)
}
}
}
@@ -293,6 +340,12 @@ class CrossValidator extends TProcessor {
index
}

private def cacheSize: Int = {
val size = queriesQueue.foldLeft(0)((size, q) => size + q._2.queries.numInstances)
val completeSize = size + queryQueue.size
completeSize
}

}

class CrossValidatorFactory extends TFactory {
@@ -28,7 +28,8 @@ Require-Bundle: org.eclipse.ui,
org.eclipse.help;bundle-version="3.5.0",
org.eclipse.sapphire.workspace;bundle-version="0.3.0",
org.eclipse.sapphire.workspace.ui;bundle-version="0.3.0",
org.eclipse.ui.views.properties.tabbed;bundle-version="3.5.100"
org.eclipse.ui.views.properties.tabbed;bundle-version="3.5.100",
org.eclipse.sapphire.java;bundle-version="0.3.0"
Bundle-RequiredExecutionEnvironment: JavaSE-1.6
Bundle-ActivationPolicy: lazy
Import-Package: scala;version="2.9.0.1",

This file was deleted.

@@ -132,7 +132,7 @@
</node>
<connection>
<id>node_node_connection</id>
<tool-palette-label>node connection</tool-palette-label>
<tool-palette-label>Edge</tool-palette-label>
<endpoint2>
<type>arrow</type>
</endpoint2>

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

This file was deleted.

File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
@@ -4,38 +4,38 @@
</description>
<tags>Cross, Validation, Cross Validation, ARFF, NaiveBayes</tags>
<nodes>
<node nodeType="processor"
<node type="processor"
factoryId="de.lmu.ifi.dbs.knowing.core.validation.XCrossValidator" id="validator">
<properties>
<item value="weka.classifiers.bayes.NaiveBayes" key="classifier" />
<item value="10" key="folds" />
<property key="classifier" value="weka.classifiers.bayes.NaiveBayes" />
<property key="folds" value="10" />

<item value="false" key="kernel-estimator" />
<item value="false" key="supervised-discretization" />
<property key="kernel-estimator" value="false" />
<property key="supervised-discretization" value="false" />
</properties>
</node>
<node nodeType="loader" factoryId="weka.core.converters.ArffLoader"
<node type="loader" factoryId="weka.core.converters.ArffLoader"
id="ARFF">
<properties>
<item value="iris.arff" key="file" />
<property value="iris.arff" key="file" />
</properties>
</node>
<node nodeType="presenter" factoryId="de.lmu.ifi.dbs.knowing.core.swt.TablePresenter"
<node type="presenter" factoryId="de.lmu.ifi.dbs.knowing.core.swt.TablePresenter"
id="TablePresenter">
<properties>
<item value="100" key="rows" />
<property value="100" key="rows" />
</properties>
</node>
<node nodeType="saver" factoryId="weka.core.converters.ArffSaver"
<node type="saver" factoryId="weka.core.converters.ArffSaver"
id="ArffOutput">
<properties>
<item value="output.saver.arff" key="file" />
<property value="output.saver.arff" key="file" />
</properties>
</node>
</nodes>
<edges>
<edge weight="1" targetId="validator" sourceId="ARFF" id="arff_validator" />
<edge weight="1" targetId="TablePresenter" sourceId="validator" id="validator_presenter" />
<edge weight="1" targetId="ArffOutput" sourceId="validator" id="validator_arffSaver" />
<edge weight="1" target="validator" source="ARFF" id="arff_validator" />
<edge weight="1" target="TablePresenter" source="validator" id="validator_presenter" />
<edge weight="1" target="ArffOutput" source="validator" id="validator_arffSaver" />
</edges>
</DataProcessingUnit>
@@ -3,15 +3,17 @@ package knowing.test
import de.lmu.ifi.dbs.knowing.core.service._
import de.lmu.ifi.dbs.knowing.core.factory.TFactory
import de.lmu.ifi.dbs.knowing.core.util.OSGIUtil
import org.osgi.framework.BundleContext
import org.osgi.framework.BundleActivator

import knowing.test.loader._
import knowing.test.processor._
import knowing.test.filter._
import org.osgi.util.tracker.ServiceTrackerCustomizer

import org.osgi.framework.ServiceReference
import org.osgi.util.tracker.ServiceTracker
import org.osgi.framework.ServiceRegistration
import org.osgi.framework.BundleContext
import org.osgi.framework.BundleActivator
import org.osgi.util.tracker.ServiceTrackerCustomizer
import org.osgi.util.tracker.ServiceTracker

class Activator extends BundleActivator {

@@ -28,7 +30,7 @@ class Activator extends BundleActivator {
util.registerProcessor(new SerializableProcessorFactory)
dpuService = context.registerService(classOf[IDPUProvider].getName, BundleDPUProvider.newInstance(context.getBundle), null)
val dpus = OSGIUtil.registeredDPUs
dpus foreach (dpu => println(dpu.name))
dpus foreach (dpu => println(dpu.getName.getContent))
}

def stop(context: BundleContext) = {