+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+/**
+ *
+ * Title: PelletCmdException
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletCmdException extends RuntimeException {
+
+ /**
+ * Create an exception with the given error message.
+ *
+ * @param msg
+ */
+ public PelletCmdException(String msg) {
+ super( msg );
+ }
+
+ public PelletCmdException(Throwable cause) {
+ super( cause );
+ }
+}
diff --git a/cli/src/pellet/PelletCmdOption.java b/cli/src/pellet/PelletCmdOption.java
new file mode 100644
index 000000000..7bf05925f
--- /dev/null
+++ b/cli/src/pellet/PelletCmdOption.java
@@ -0,0 +1,280 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+/**
+ *
+ * Title: PelletCmdOption
+ *
+ *
+ * Description: Represents a pellet command line option, i.e. the option name,
+ * the long option name and the option value given on command line
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletCmdOption {
+
+ private String longOption;
+ private String shortOption;
+ private String type;
+ private String description;
+ private boolean isMandatory;
+ private Object value;
+ private Object defaultValue;
+ private boolean exists;
+ private PelletCmdOptionArg arg = PelletCmdOptionArg.NONE;
+
+ public PelletCmdOption(String longOption) {
+ if( longOption == null )
+ throw new PelletCmdException(
+ "A long option must be defined for a command line option" );
+
+ this.longOption = removeHyphen( longOption );
+ this.defaultValue = null;
+ }
+
+ public String getShortOption() {
+ return shortOption;
+ }
+
+ public String getLongOption() {
+ return longOption;
+ }
+
+ public void setShortOption(String shortOption) {
+ this.shortOption = removeHyphen( shortOption );
+ }
+
+ public String getType() {
+ return type;
+ }
+
+ public void setType(String type) {
+ this.type = type;
+ }
+
+ public void setDescription(String description) {
+ this.description = description;
+ }
+
+ public String getDescription() {
+ return description;
+ }
+
+ public void setDefaultValue(Object defaultValue) {
+ this.defaultValue = defaultValue;
+ }
+
+ public Object getDefaultValue() {
+ return defaultValue;
+ }
+
+ public Object getValue() {
+ return value;
+ }
+
+ public String getValueAsString() {
+ if( value != null )
+ return value.toString();
+
+ if( defaultValue != null )
+ return defaultValue.toString();
+
+ return null;
+ }
+
+ /**
+ * Returns the option value as an integer and verifies that the value is a
+ * positive integer (>= 1).
+ *
+ * @return an integer value
+ * @throws PelletCmdException
+ * If the option value does not exist or is a not a valid
+ * positive integer value (>= 1)
+ */
+ public int getValueAsPositiveInteger() throws PelletCmdException {
+ return getValueAsInteger( 1, Integer.MAX_VALUE );
+ }
+
+ /**
+ * Returns the option value as an integer and verifies that the value is a
+ * non-negative integer (>= 0).
+ *
+ * @return an integer value
+ * @throws PelletCmdException
+ * If the option value does not exist or is a not a valid
+ * non-negative integer value (>= 0)
+ */
+ public int getValueAsNonNegativeInteger() throws PelletCmdException {
+ return getValueAsInteger( 0, Integer.MAX_VALUE );
+ }
+
+ /**
+ * Returns the option value as an integer
+ *
+ * @return an integer value
+ * @throws PelletCmdException
+ * If the option value does not exist or is a not a valid
+ * integer value
+ */
+ public int getValueAsInteger() throws PelletCmdException {
+ return getValueAsInteger( Integer.MIN_VALUE, Integer.MAX_VALUE );
+ }
+
+ /**
+ * Returns the option value as an integer and verifies that it is in the
+ * given range.
+ *
+ * @param minAllowed
+ * Minimum allowed value for the integer (inclusive)
+ * @param maxAllowed
+ * Maximum allowed value for the integer (inclusive)
+ * @return an integer value in the specified range
+ * @throws PelletCmdException
+ * If the option value does not exist, is a not a valid integer
+ * value, or not in the specified range
+ */
+ public int getValueAsInteger(int minAllowed, int maxAllowed) throws PelletCmdException {
+ String value = getValueAsString();
+
+ if( value == null ) {
+ throw new PelletCmdException( String.format(
+ "The value for option <%s> does not exist%n", longOption ) );
+ }
+
+ try {
+ int intValue = Integer.parseInt( value );
+ if( intValue < minAllowed )
+ throw new PelletCmdException(
+ String
+ .format(
+ "The value for option <%s> should be greater than or equal to %d but was: %d%n",
+ longOption, minAllowed, intValue ) );
+
+ if( intValue > maxAllowed )
+ throw new PelletCmdException(
+ String
+ .format(
+ "The value for option <%s> should be less than or equal to %d but was: %d%n",
+ longOption, maxAllowed, intValue ) );
+ return intValue;
+ } catch( NumberFormatException e ) {
+ throw new PelletCmdException( String
+ .format( "The value for option <%s> is not a valid integer: %s%n",
+ longOption, value ) );
+ }
+ }
+
+ /**
+ * Returns the string value as a boolean. If no value exists returns false
by
+ * default.
+ *
+ * @return returns the string value as a boolean
+ */
+ public boolean getValueAsBoolean() {
+ String value = getValueAsString();
+
+ return Boolean.parseBoolean( value );
+ }
+
+ public void setValue(String value) {
+ this.value = value;
+ }
+
+ public void setValue(Boolean value) {
+ this.value = value;
+ }
+
+ public void setIsMandatory(boolean isMandatory) {
+ this.isMandatory = isMandatory;
+ }
+
+ public boolean isMandatory() {
+ return isMandatory;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if( !(o instanceof PelletCmdOption) )
+ return false;
+
+ PelletCmdOption other = (PelletCmdOption) o;
+
+ if( ( ( shortOption == null && other.getShortOption() == null )
+ || ( shortOption != null && shortOption.equals( other.getShortOption() ) ) )
+ && longOption.equals( other.getLongOption() )
+ && ( ( type == null && other.getType() == null) ||
+ ( type != null && type.equals( other.getType() ) ) )
+ && ( ( description == null && other.getDescription() == null )
+ || ( description != null && description.equals( other.getDescription() ) ) )
+ && isMandatory == other.isMandatory()
+ && ( ( value == null && other.getValue() == null )
+ || ( value != null && value.equals( other.getValue() ) ) )
+ && ( ( defaultValue == null && other.getDefaultValue() == null )
+ || ( defaultValue != null && defaultValue.equals( other.getDefaultValue() ) ) ) )
+ return true;
+
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ int code = 0;
+ if( shortOption != null )
+ code += shortOption.hashCode();
+ if( longOption != null )
+ code += longOption.hashCode();
+ return code;
+ }
+
+ public String toString() {
+ return "[ " + longOption + ", " + shortOption + ", " + type + ", " + description + ", "
+ + isMandatory + ", " + value + ", " + defaultValue + " ]";
+ }
+
+ private String removeHyphen(String option) {
+ int start = 0;
+ while( option.charAt( start ) == '-' )
+ start++;
+
+ return option.substring( start );
+ }
+
+ public void setArg(PelletCmdOptionArg arg) {
+ this.arg = arg;
+ }
+
+ public PelletCmdOptionArg getArg() {
+ return arg;
+ }
+
+ /**
+ * Returns if the option exists in the command-line arguments. If the argument for this option
+ * is mandatory then this implies {@link #getValue()} will return a non-null value. If the
+ * argument for this option is optional then {@link #getValue()} may still return null.
+ *
+ * @return if the option exists in the command-line argument
+ */
+ public boolean exists() {
+ return exists || value != null;
+ }
+
+ public void setExists(boolean exists) {
+ this.exists = exists;
+ }
+
+
+}
diff --git a/cli/src/pellet/PelletCmdOptionArg.java b/cli/src/pellet/PelletCmdOptionArg.java
new file mode 100644
index 000000000..9736199d4
--- /dev/null
+++ b/cli/src/pellet/PelletCmdOptionArg.java
@@ -0,0 +1,14 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+/**
+ * @author Evren Sirin
+ */
+public enum PelletCmdOptionArg {
+ NONE, OPTIONAL, REQUIRED
+}
diff --git a/cli/src/pellet/PelletCmdOptions.java b/cli/src/pellet/PelletCmdOptions.java
new file mode 100644
index 000000000..e5142aed0
--- /dev/null
+++ b/cli/src/pellet/PelletCmdOptions.java
@@ -0,0 +1,80 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ *
+ * Title: PelletCmdOptions
+ *
+ *
+ * Description: Essentially a set of PelletCmdOption
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletCmdOptions {
+
+ private Map options;
+ private Map shortOptions;
+ private Set mandatory;
+
+ public PelletCmdOptions() {
+ options = new LinkedHashMap();
+ shortOptions = new HashMap();
+ mandatory = new HashSet();
+ }
+
+ public void add(PelletCmdOption option) {
+ String shortOption = option.getShortOption();
+ String longOption = option.getLongOption();
+
+ if( options.containsKey( longOption ) )
+ throw new PelletCmdException( "Duplicate long option for command: " + longOption );
+ else if( shortOption != null && shortOptions.containsKey( shortOption ) )
+ throw new PelletCmdException( "Duplicate short option for command: " + shortOption );
+
+ shortOptions.put( shortOption, option );
+ options.put( longOption, option );
+
+ if( option.isMandatory() )
+ mandatory.add( option );
+ }
+
+ public PelletCmdOption getOption(String key) {
+ // If key is short option then this matches
+ PelletCmdOption option = shortOptions.get( key );
+
+ // Else, key is long option, retrieve its short option
+ if( option == null )
+ option = options.get( key );
+
+ return option;
+ }
+
+ public Set getMandatoryOptions() {
+ return mandatory;
+ }
+
+ public Collection getOptions() {
+ return options.values();
+ }
+}
diff --git a/cli/src/pellet/PelletConsistency.java b/cli/src/pellet/PelletConsistency.java
new file mode 100644
index 000000000..73d9f7add
--- /dev/null
+++ b/cli/src/pellet/PelletConsistency.java
@@ -0,0 +1,71 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import org.mindswap.pellet.KnowledgeBase;
+
+/**
+ *
+ * Title: PelletConsistency
+ *
+ *
+ * Description: Check the consistency of an ontology
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletConsistency extends PelletCmdApp {
+
+ public PelletConsistency() {
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet consistency " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletConsistency: Check the consistency of an ontology";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getLoaderOption() );
+ options.add( getIgnoreImportsOption() );
+ options.add( getInputFormatOption() );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+ KnowledgeBase kb = getKB();
+
+ startTask( "consistency check" );
+ boolean isConsistent = kb.isConsistent();
+ finishTask( "consistency check" );
+
+ if( isConsistent )
+ output( "Consistent: Yes" );
+ else {
+ output( "Consistent: No" );
+ output( "Reason: " + kb.getExplanation() );
+ }
+ }
+
+}
diff --git a/cli/src/pellet/PelletDIG.java b/cli/src/pellet/PelletDIG.java
new file mode 100644
index 000000000..506320bf9
--- /dev/null
+++ b/cli/src/pellet/PelletDIG.java
@@ -0,0 +1,85 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import org.mindswap.pellet.dig.PelletDIGServer;
+
+/**
+ *
+ * Title: PelletDIG
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ * @deprecated DIG functionality is deprecated and will be remove in the next major release
+ */
+@Deprecated
+public class PelletDIG extends PelletCmdApp {
+
+ public PelletDIG() {
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletDIG: DIG Server that is backed by Pellet reasoner *DEPRECATED*";
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet DIG " + getMandatoryOptions() + "[options]";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ PelletCmdOption option = new PelletCmdOption( "port" );
+ option.setShortOption( "p" );
+ option.setType( "positive integer" );
+ option.setDescription( "The port number user by the server" );
+ option.setIsMandatory( false );
+ option.setDefaultValue( 8081 );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ return options;
+ }
+
+ @Override
+ public boolean requiresInputFiles() {
+ return false;
+ }
+
+ @Override
+ public void run() {
+ output("*****************************************************************************");
+ output("* DEPRECATION WARNGING *");
+ output("* *");
+ output("* DIG command is deprecated and will be removed in the next major release *");
+ output("* *");
+ output("*****************************************************************************");
+ output("");
+
+ int port = options.getOption( "port" ).getValueAsInteger( 1, 65535 );
+
+ PelletDIGServer server = new PelletDIGServer(port);
+ server.run();
+ }
+
+}
diff --git a/cli/src/pellet/PelletEntailment.java b/cli/src/pellet/PelletEntailment.java
new file mode 100644
index 000000000..663edfba3
--- /dev/null
+++ b/cli/src/pellet/PelletEntailment.java
@@ -0,0 +1,144 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.io.PrintWriter;
+import java.util.Set;
+
+import org.mindswap.pellet.utils.FileUtils;
+import org.semanticweb.owlapi.model.IRI;
+import org.semanticweb.owlapi.model.OWLAxiom;
+import org.semanticweb.owlapi.model.OWLLogicalAxiom;
+import org.semanticweb.owlapi.model.OWLOntology;
+
+import com.clarkparsia.owlapi.explanation.io.manchester.ManchesterSyntaxObjectRenderer;
+import com.clarkparsia.owlapi.explanation.io.manchester.TextBlockWriter;
+import com.clarkparsia.pellet.owlapiv3.EntailmentChecker;
+import com.clarkparsia.pellet.owlapiv3.OWLAPILoader;
+import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
+
+/**
+ *
+ * Title: PelletEntailment
+ *
+ *
+ * Description: Given an input ontology check if the axioms in the output
+ * ontology are all entailed. If not, report either the first non-entailment or
+ * all non-entailments.
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletEntailment extends PelletCmdApp {
+
+ private String entailmentFile;
+ private boolean findAll;
+
+ public PelletEntailment() {
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletEntailment: Check if all axioms are entailed by the ontology";
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet entail " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getIgnoreImportsOption() );
+
+ PelletCmdOption option = new PelletCmdOption( "entailment-file" );
+ option.setShortOption( "e" );
+ option.setType( "" );
+ option.setDescription( "Entailment ontology URI" );
+ option.setIsMandatory( true );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "all" );
+ option.setShortOption( "a" );
+ option.setDefaultValue( false );
+ option.setDescription( "Show all non-entailments" );
+ option.setDefaultValue( findAll );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+ entailmentFile = options.getOption( "entailment-file" ).getValueAsString();
+ findAll = options.getOption( "all" ).getValueAsBoolean();
+
+ OWLAPILoader loader = (OWLAPILoader) getLoader( "OWLAPIv3" );
+
+ getKB();
+
+ PelletReasoner reasoner = loader.getReasoner();
+
+ OWLOntology entailmentOntology = null;
+ try {
+ verbose( "Loading entailment file: " );
+ verbose( entailmentFile );
+ IRI entailmentFileURI = IRI.create( FileUtils.toURI( entailmentFile ) );
+ entailmentOntology = loader.getManager().loadOntology( entailmentFileURI );
+ } catch( Exception e ) {
+ throw new PelletCmdException( e );
+ }
+
+ EntailmentChecker checker = new EntailmentChecker(reasoner);
+ Set axioms = entailmentOntology.getLogicalAxioms();
+
+ verbose( "Check entailments for (" + axioms.size() + ") axioms" );
+ startTask( "Checking" );
+ Set nonEntailments = checker.findNonEntailments(axioms, findAll);
+ finishTask( "Checking" );
+
+ if( nonEntailments.isEmpty() ) {
+ output( "All axioms are entailed." );
+ }
+ else {
+ output( "Non-entailments (" + nonEntailments.size() + "): " );
+
+ int index = 1;
+ TextBlockWriter writer = new TextBlockWriter( new PrintWriter( System.out ) );
+ ManchesterSyntaxObjectRenderer renderer = new ManchesterSyntaxObjectRenderer( writer );
+ writer.println();
+ for( OWLAxiom axiom : nonEntailments ) {
+ writer.print(index++);
+ writer.print(")");
+ writer.printSpace();
+
+ writer.startBlock();
+ axiom.accept( renderer );
+ writer.endBlock();
+ writer.println();
+ }
+ writer.flush();
+ }
+ }
+
+}
diff --git a/cli/src/pellet/PelletExceptionFormatter.java b/cli/src/pellet/PelletExceptionFormatter.java
new file mode 100644
index 000000000..bc9d8e444
--- /dev/null
+++ b/cli/src/pellet/PelletExceptionFormatter.java
@@ -0,0 +1,66 @@
+package pellet;
+
+import java.io.FileNotFoundException;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+
+public class PelletExceptionFormatter {
+
+ private boolean verbose = false;
+
+ public PelletExceptionFormatter() {}
+
+ /**
+ * Format a user-friendly exception
+ * @param e
+ */
+ public String formatException(Throwable e) {
+ Throwable cause = e;
+ while( cause.getCause() != null ) {
+ cause = cause.getCause();
+ }
+
+ if( !verbose ) {
+ if( cause instanceof FileNotFoundException )
+ return format( (FileNotFoundException) cause );
+ if( cause instanceof PelletCmdException )
+ return format( (PelletCmdException) cause );
+ return formatGeneric( cause );
+ }
+
+ StringWriter writer = new StringWriter();
+ PrintWriter pw = new PrintWriter( writer );
+ cause.printStackTrace( pw );
+ pw.close();
+ return writer.toString();
+
+ }
+
+ private String format(FileNotFoundException e) {
+ return "ERROR: Cannot open " + e.getMessage();
+ }
+
+ private String format(PelletCmdException e) {
+ return "ERROR: " + e.getMessage();
+ }
+
+ /**
+ * Return a generic exception message.
+ * @param e
+ */
+ private String formatGeneric(Throwable e) {
+ String msg = e.getMessage();
+ if( msg != null ) {
+ int index = msg.indexOf( '\n', 0 );
+ if( index != -1 )
+ msg = msg.substring( 0, index );
+ }
+
+ return msg + "\nUse -v for detail.";
+ }
+
+ public void setVerbose( boolean verbose ) {
+ this.verbose = verbose;
+ }
+
+}
diff --git a/cli/src/pellet/PelletExplain.java b/cli/src/pellet/PelletExplain.java
new file mode 100644
index 000000000..9dc0c3629
--- /dev/null
+++ b/cli/src/pellet/PelletExplain.java
@@ -0,0 +1,697 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.coode.owlapi.manchesterowlsyntax.ManchesterOWLSyntaxEditorParser;
+import org.mindswap.pellet.utils.Timer;
+import org.mindswap.pellet.utils.progress.ConsoleProgressMonitor;
+import org.mindswap.pellet.utils.progress.ProgressMonitor;
+import org.semanticweb.owlapi.expression.ParserException;
+import org.semanticweb.owlapi.model.OWLAxiom;
+import org.semanticweb.owlapi.model.OWLClass;
+import org.semanticweb.owlapi.model.OWLClassExpression;
+import org.semanticweb.owlapi.model.OWLDataProperty;
+import org.semanticweb.owlapi.model.OWLEntity;
+import org.semanticweb.owlapi.model.OWLException;
+import org.semanticweb.owlapi.model.OWLIndividual;
+import org.semanticweb.owlapi.model.OWLLiteral;
+import org.semanticweb.owlapi.model.OWLNamedIndividual;
+import org.semanticweb.owlapi.model.OWLObject;
+import org.semanticweb.owlapi.model.OWLObjectProperty;
+import org.semanticweb.owlapi.model.OWLProperty;
+import org.semanticweb.owlapi.model.OWLSubClassOfAxiom;
+import org.semanticweb.owlapi.reasoner.Node;
+import org.semanticweb.owlapi.reasoner.NodeSet;
+
+import com.clarkparsia.owlapi.explanation.BlackBoxExplanation;
+import com.clarkparsia.owlapi.explanation.GlassBoxExplanation;
+import com.clarkparsia.owlapi.explanation.HSTExplanationGenerator;
+import com.clarkparsia.owlapi.explanation.MultipleExplanationGenerator;
+import com.clarkparsia.owlapi.explanation.SatisfiabilityConverter;
+import com.clarkparsia.owlapi.explanation.TransactionAwareSingleExpGen;
+import com.clarkparsia.owlapi.explanation.io.ExplanationRenderer;
+import com.clarkparsia.owlapi.explanation.io.manchester.ManchesterSyntaxExplanationRenderer;
+import com.clarkparsia.owlapi.explanation.util.ExplanationProgressMonitor;
+import com.clarkparsia.owlapiv3.OWL;
+import com.clarkparsia.owlapiv3.OntologyUtils;
+import com.clarkparsia.pellet.owlapiv3.OWLAPILoader;
+import com.clarkparsia.pellet.owlapiv3.PelletReasoner;
+import com.clarkparsia.pellet.owlapiv3.PelletReasonerFactory;
+
+/**
+ *
+ * Title:
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Evren Sirin
+ * @author Markus Stocker
+ */
+public class PelletExplain extends PelletCmdApp {
+ private SatisfiabilityConverter converter;
+ /**
+ * inferences for which there was an error while generating the explanation
+ */
+ private int errorExpCount = 0;
+ private OWLAPILoader loader;
+ private int maxExplanations = 1;
+ private boolean useBlackBox = false;
+ private ProgressMonitor monitor;
+ /**
+ * inferences whose explanation contains more than on axiom
+ */
+ private int multiAxiomExpCount = 0;
+ /**
+ * inferences with multiple explanations
+ */
+ private int multipleExpCount = 0;
+
+ private PelletReasoner reasoner;
+ private OWLEntity name1;
+ private OWLEntity name2;
+ private OWLObject name3;
+
+ public PelletExplain() {
+ GlassBoxExplanation.setup();
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletExplain: Explains one or more inferences in a given ontology including ontology inconsistency";
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet explain " + getMandatoryOptions() + "[options] ...\n\n"
+ + "The options --unsat, --all-unsat, --inconsistent, --subclass, \n"
+ + "--hierarchy, and --instance are mutually exclusive. By default \n "
+ + "--inconsistent option is assumed. In the following descriptions \n"
+ + "C, D, and i can be URIs or local names.";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getIgnoreImportsOption() );
+
+ PelletCmdOption option = new PelletCmdOption( "unsat" );
+ option.setType( "C" );
+ option.setDescription( "Explain why the given class is unsatisfiable" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "all-unsat" );
+ option.setDescription( "Explain all unsatisfiable classes" );
+ option.setDefaultValue( false );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "inconsistent" );
+ option.setDescription( "Explain why the ontology is inconsistent" );
+ option.setDefaultValue( false );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "hierarchy" );
+ option.setDescription( "Print all explanations for the class hierarchy" );
+ option.setDefaultValue( false );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "subclass" );
+ option.setDescription( "Explain why C is a subclass of D" );
+ option.setType( "C,D" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "instance" );
+ option.setDescription( "Explain why i is an instance of C" );
+ option.setType( "i,C" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "property-value" );
+ option.setDescription( "Explain why s has value o for property p" );
+ option.setType( "s,p,o" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "method" );
+ option.setShortOption( "m" );
+ option.setType( "glass | black" );
+ option.setDescription( "Method that will be used to generate explanations" );
+ option.setDefaultValue( "glass" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "max" );
+ option.setShortOption( "x" );
+ option.setType( "positive integer" );
+ option.setDescription( "Maximum number of generated explanations for each inference" );
+ option.setDefaultValue( 1 );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = options.getOption( "verbose" );
+ option.setDescription( "Print detailed exceptions and messages about the progress" );
+
+ return options;
+ }
+
+ @Override
+ public void parseArgs(String[] args) {
+ super.parseArgs( args );
+
+ maxExplanations = options.getOption( "max" ).getValueAsNonNegativeInteger();
+
+ loader = (OWLAPILoader) getLoader( "OWLAPIv3" );
+
+ getKB();
+
+ converter = new SatisfiabilityConverter( loader.getManager().getOWLDataFactory() );
+
+ reasoner = loader.getReasoner();
+
+ loadMethod();
+
+ loadNames();
+ }
+
+ @Override
+ public void run() {
+ try {
+
+ if( name1 == null ) {
+ // Option --hierarchy
+ verbose( "Explain all the subclass relations in the ontology" );
+ explainClassHierarchy();
+ }
+ else if( name2 == null ) {
+ if( ((OWLClassExpression) name1).isOWLNothing() ) {
+ // Option --all-unsat
+ verbose( "Explain all the unsatisfiable classes" );
+ explainUnsatisfiableClasses();
+ }
+ else {
+ // Option --inconsistent && --unsat C
+ verbose( "Explain unsatisfiability of " + name1 );
+ explainUnsatisfiableClass( (OWLClass) name1 );
+ }
+ }
+ else if( name3 != null ) {
+ // Option --property-value s,p,o
+ verbose( "Explain property assertion " + name1 + " and " + name2 + " and " + name3 );
+
+ explainPropertyValue( (OWLIndividual) name1, (OWLProperty,?>) name2, name3 );
+ }
+ else if( name1.isOWLClass() && name2.isOWLClass() ) {
+ // Option --subclass C,D
+ verbose( "Explain subclass relation between " + name1 + " and " + name2 );
+
+ explainSubClass( (OWLClass) name1, (OWLClass) name2 );
+ }
+ else if( name1.isOWLNamedIndividual() && name2.isOWLClass() ) {
+ // Option --instance i,C
+ verbose( "Explain instance relation between " + name1 + " and " + name2 );
+
+ explainInstance( (OWLIndividual) name1, (OWLClass) name2 );
+ }
+
+ printStatistics();
+ } catch( OWLException e ) {
+ throw new RuntimeException( e );
+ }
+ }
+
+ private void explainAxiom(OWLAxiom axiom) throws OWLException {
+
+ MultipleExplanationGenerator expGen = new HSTExplanationGenerator(getSingleExplanationGenerator());
+ RendererExplanationProgressMonitor rendererMonitor = new RendererExplanationProgressMonitor(axiom);
+ expGen.setProgressMonitor(rendererMonitor);
+
+ OWLClassExpression unsatClass = converter.convert( axiom );
+ Timer timer = timers.startTimer("explain");
+ Set> explanations = expGen.getExplanations( unsatClass, maxExplanations );
+ timer.stop();
+
+ if (explanations.isEmpty()) {
+ rendererMonitor.foundNoExplanations();
+ }
+
+ if( timer.getCount() % 10 == 0 ) {
+ // printStatistics();
+ }
+
+ int expSize = explanations.size();
+ if( expSize == 0 ) {
+ errorExpCount++;
+ }
+ else if( expSize == 1 ) {
+ if( explanations.iterator().next().size() > 1 ) {
+ multiAxiomExpCount++;
+ // else
+ // return;
+ }
+ }
+ else {
+ multipleExpCount++;
+ }
+ }
+
+ public void explainClassHierarchy() throws OWLException {
+ Set visited = new HashSet();
+
+ reasoner.flush();
+
+ startTask( "Classification" );
+ reasoner.getKB().classify();
+ finishTask( "Classification" );
+
+ startTask( "Realization" );
+ reasoner.getKB().realize();
+ finishTask( "Realization" );
+
+ monitor = new ConsoleProgressMonitor();
+ monitor.setProgressTitle( "Explaining" );
+ monitor.setProgressLength( reasoner.getRootOntology().getClassesInSignature().size() );
+ monitor.taskStarted();
+
+ Node bottoms = reasoner.getEquivalentClasses( OWL.Nothing );
+ explainClassHierarchy( OWL.Nothing, bottoms, visited );
+
+ Node tops = reasoner.getEquivalentClasses( OWL.Thing );
+ explainClassHierarchy( OWL.Thing, tops, visited );
+
+ monitor.taskFinished();
+ }
+
+ public void explainEquivalentClass(OWLClass c1, OWLClass c2) throws OWLException {
+ if( c1.equals( c2 ) ) {
+ return;
+ }
+
+ OWLAxiom axiom = OWL.equivalentClasses( c1, c2 );
+
+ explainAxiom( axiom );
+ }
+
+ public void explainInstance(OWLIndividual ind, OWLClass c) throws OWLException {
+ if( c.isOWLThing() ) {
+ return;
+ }
+
+ OWLAxiom axiom = OWL.classAssertion( ind, c );
+
+ explainAxiom( axiom );
+ }
+
+ // In the following method(s) we intentionally do not use OWLPropertyExpression,?>
+ // because of a bug in some Sun's implementation of javac
+ // http://bugs.sun.com/view_bug.do?bug_id=6548436
+ // Since lack of generic type generates a warning, we suppress it
+ @SuppressWarnings("unchecked")
+ public void explainPropertyValue(OWLIndividual s, OWLProperty p, OWLObject o) throws OWLException {
+ if( p.isOWLObjectProperty() ) {
+ explainAxiom( OWL.propertyAssertion( s, (OWLObjectProperty) p, (OWLIndividual) o ) );
+ }
+ else {
+ explainAxiom( OWL.propertyAssertion( s, (OWLDataProperty) p, (OWLLiteral) o ) );
+ }
+ }
+
+ public void explainSubClass(OWLClass sub, OWLClass sup) throws OWLException {
+ if( sub.equals( sup ) ) {
+ return;
+ }
+
+ if( sub.isOWLNothing() ) {
+ return;
+ }
+
+ if( sup.isOWLThing() ) {
+ return;
+ }
+
+ OWLSubClassOfAxiom axiom = OWL.subClassOf( sub, sup );
+ explainAxiom( axiom );
+ }
+
+ public void explainUnsatisfiableClasses() throws OWLException {
+ for( OWLClass cls : reasoner.getEquivalentClasses( OWL.Nothing ) ) {
+ if( cls.isOWLNothing() ) {
+ continue;
+ }
+
+ explainUnsatisfiableClass( cls );
+ }
+ }
+
+ public void explainUnsatisfiableClass(OWLClass cls) throws OWLException {
+ explainSubClass( cls, OWL.Nothing );
+ }
+
+ private void explainClassHierarchy(OWLClass cls, Node eqClasses, Set visited)
+ throws OWLException {
+ if( visited.contains( cls ) ) {
+ return;
+ }
+
+ visited.add( cls );
+ visited.addAll( eqClasses.getEntities() );
+
+ for( OWLClass eqClass : eqClasses ) {
+ monitor.incrementProgress();
+
+ explainEquivalentClass( cls, eqClass );
+ }
+
+ for( OWLNamedIndividual ind : reasoner.getInstances( cls, true ).getFlattened() ) {
+ explainInstance( ind, cls );
+ }
+
+ NodeSet subClasses = reasoner.getSubClasses( cls, true );
+ Map> subClassEqs = new HashMap>();
+ for( Node equivalenceSet : subClasses ) {
+ if( equivalenceSet.isBottomNode() ) {
+ continue;
+ }
+
+ OWLClass subClass = equivalenceSet.getRepresentativeElement();
+ subClassEqs.put( subClass, equivalenceSet );
+ explainSubClass( subClass, cls );
+ }
+
+ for( Map.Entry> entry : subClassEqs.entrySet() ) {
+ explainClassHierarchy( entry.getKey(), entry.getValue(), visited );
+ }
+ }
+
+ private TransactionAwareSingleExpGen getSingleExplanationGenerator() {
+ if( useBlackBox ) {
+ if ( options.getOption( "inconsistent" ) != null ) {
+ if( !options.getOption( "inconsistent" ).getValueAsBoolean() ) {
+ return new BlackBoxExplanation( reasoner.getRootOntology(), PelletReasonerFactory.getInstance(), reasoner );
+ } else {
+ output( "WARNING: black method cannot be used to explain inconsistency. Switching to glass." );
+ return new GlassBoxExplanation( reasoner );
+ }
+ } else {
+ return new BlackBoxExplanation( reasoner.getRootOntology(), PelletReasonerFactory.getInstance(), reasoner );
+ }
+ } else {
+ return new GlassBoxExplanation( reasoner );
+ }
+ }
+
+ private void loadMethod() {
+ String method = options.getOption( "method" ).getValueAsString();
+
+ if( method.equalsIgnoreCase( "black" ) ) {
+ useBlackBox = true;
+ } else if( method.equalsIgnoreCase( "glass" ) ) {
+ useBlackBox = false;
+ } else {
+ throw new PelletCmdException( "Unrecognized method: " + method );
+ }
+ }
+
+ private void loadNames() {
+ PelletCmdOption option;
+
+ name1 = name2 = null;
+ name3 = null;
+
+ if( (option = options.getOption( "hierarchy" )) != null ) {
+ if( option.getValueAsBoolean() ) {
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "all-unsat" )) != null ) {
+ if( option.getValueAsBoolean() ) {
+ name1 = OWL.Nothing;
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "inconsistent" )) != null ) {
+ if( option.getValueAsBoolean() ) {
+ if( useBlackBox ) {
+ throw new PelletCmdException("Black box method cannot be used to explain ontology inconsistency");
+ }
+ name1 = OWL.Thing;
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "unsat" )) != null ) {
+ String unsatisfiable = option.getValueAsString();
+ if( unsatisfiable != null ) {
+ name1 = OntologyUtils.findEntity( unsatisfiable, loader.getAllOntologies() );
+
+ if( name1 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + unsatisfiable );
+ }
+ else if( !name1.isOWLClass() ) {
+ throw new PelletCmdException( "Not a defined class: " + unsatisfiable );
+ }
+ else if( name1.isTopEntity() && useBlackBox) {
+ throw new PelletCmdException("Black box method cannot be used to explain unsatisfiability of owl:Thing");
+ }
+
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "subclass" )) != null ) {
+ String subclass = option.getValueAsString();
+ if( subclass != null ) {
+ String[] names = subclass.split( "," );
+ if( names.length != 2 ) {
+ throw new PelletCmdException(
+ "Invalid format for subclass option: " + subclass );
+ }
+
+ name1 = OntologyUtils.findEntity( names[0], loader.getAllOntologies() );
+ name2 = OntologyUtils.findEntity( names[1], loader.getAllOntologies() );
+
+ if( name1 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[0] );
+ }
+ else if( !name1.isOWLClass() ) {
+ throw new PelletCmdException( "Not a defined class: " + names[0] );
+ }
+ if( name2 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[1] );
+ }
+ else if( !name2.isOWLClass() ) {
+ throw new PelletCmdException( "Not a defined class: " + names[1] );
+ }
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "instance" )) != null ) {
+ String instance = option.getValueAsString();
+ if( instance != null ) {
+ String[] names = instance.split( "," );
+ if( names.length != 2 ) {
+ throw new PelletCmdException( "Invalid format for instance option: " + instance );
+ }
+
+ name1 = OntologyUtils.findEntity( names[0], loader.getAllOntologies() );
+ name2 = OntologyUtils.findEntity( names[1], loader.getAllOntologies() );
+
+ if( name1 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[0] );
+ }
+ else if( !name1.isOWLNamedIndividual() ) {
+ throw new PelletCmdException( "Not a defined individual: " + names[0] );
+ }
+ if( name2 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[1] );
+ }
+ else if( !name2.isOWLClass() ) {
+ throw new PelletCmdException( "Not a defined class: " + names[1] );
+ }
+
+ return;
+ }
+ }
+
+ if( (option = options.getOption( "property-value" )) != null ) {
+ String optionValue = option.getValueAsString();
+ if( optionValue != null ) {
+ String[] names = optionValue.split( "," );
+ if( names.length != 3 ) {
+ throw new PelletCmdException( "Invalid format for property-value option: " + optionValue );
+ }
+
+ name1 = OntologyUtils.findEntity( names[0], loader.getAllOntologies() );
+ name2 = OntologyUtils.findEntity( names[1], loader.getAllOntologies() );
+
+ if( name1 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[0] );
+ }
+ else if( !name1.isOWLNamedIndividual() ) {
+ throw new PelletCmdException( "Not an individual: " + names[0] );
+ }
+ if( name2 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[1] );
+ }
+ else if( !name2.isOWLObjectProperty() && !name2.isOWLDataProperty() ) {
+ throw new PelletCmdException( "Not a defined property: " + names[1] );
+ }
+ if( name2.isOWLObjectProperty() ) {
+ name3 = OntologyUtils.findEntity( names[2], loader.getAllOntologies() );
+ if( name3 == null ) {
+ throw new PelletCmdException( "Undefined entity: " + names[2] );
+ }
+ else if( !(name3 instanceof OWLIndividual) ) {
+ throw new PelletCmdException( "Not a defined individual: " + names[2] );
+ }
+ }
+ else {
+ ManchesterOWLSyntaxEditorParser parser = new ManchesterOWLSyntaxEditorParser(
+ loader.getManager().getOWLDataFactory(), names[2] );
+ try {
+ name3 = parser.parseConstant();
+ } catch( ParserException e ) {
+ throw new PelletCmdException( "Not a valid literal: " + names[2] );
+ }
+ }
+
+ return;
+ }
+ }
+
+ // Per default we explain why the ontology is inconsistent
+ name1 = OWL.Thing;
+ if( useBlackBox ) {
+ throw new PelletCmdException("Black box method cannot be used to explain ontology inconsistency");
+ }
+
+ return;
+ }
+
+ private void printStatistics() throws OWLException {
+ if(!verbose) {
+ return;
+ }
+
+ Timer timer = timers.getTimer( "explain" );
+ if( timer != null ) {
+ verbose( "Subclass relations : " + timer.getCount() );
+ verbose( "Multiple explanations: " + multipleExpCount );
+ verbose( "Single explanation " );
+ verbose( " with multiple axioms: " + multiAxiomExpCount );
+ verbose( "Error explaining : " + errorExpCount );
+ verbose( "Average time : " + timer.getAverage() + "ms" );
+ }
+ }
+
+ private class RendererExplanationProgressMonitor implements ExplanationProgressMonitor {
+
+ private ExplanationRenderer rend = new ManchesterSyntaxExplanationRenderer();
+ private OWLAxiom axiom;
+ private Set> setExplanations;
+ private PrintWriter pw;
+
+ private RendererExplanationProgressMonitor(OWLAxiom axiom) {
+ this.axiom = axiom;
+ this.pw = new PrintWriter(System.out);
+
+ setExplanations = new HashSet>();
+ try {
+ rend.startRendering(pw);
+ }
+ catch (OWLException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ catch (IOException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ }
+
+ public void foundExplanation(Set axioms) {
+
+ if (!setExplanations.contains(axioms)) {
+ setExplanations.add(axioms);
+ pw.flush();
+ try {
+ rend.render(axiom, Collections.singleton(axioms));
+ }
+ catch (IOException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ catch (OWLException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ }
+ }
+
+ public boolean isCancelled() {
+ return false;
+ }
+
+ public void foundAllExplanations() {
+ try {
+ rend.endRendering();
+ }
+ catch (OWLException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ catch (IOException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ }
+
+ public void foundNoExplanations() {
+ try {
+ rend.render(axiom, Collections.>emptySet());
+ rend.endRendering();
+ }
+ catch (OWLException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ catch (IOException e) {
+ System.err.println("Error rendering explanation: " + e);
+ }
+ }
+ }
+}
diff --git a/cli/src/pellet/PelletExtractInferences.java b/cli/src/pellet/PelletExtractInferences.java
new file mode 100644
index 000000000..3538ddf91
--- /dev/null
+++ b/cli/src/pellet/PelletExtractInferences.java
@@ -0,0 +1,219 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.util.ArrayList;
+import java.util.EnumSet;
+import java.util.List;
+
+import org.mindswap.pellet.jena.ModelExtractor;
+import org.mindswap.pellet.jena.ModelExtractor.StatementType;
+import org.mindswap.pellet.utils.SetUtils;
+
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.ModelFactory;
+
+/**
+ *
+ * Title: PelletExtractInferences
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletExtractInferences extends PelletCmdApp {
+
+ private EnumSet selector;
+
+ public PelletExtractInferences() {
+ }
+
+ public String getAppId() {
+ return "PelletExtractInferences: Extract a set of inferences from an ontology";
+ }
+
+ public String getAppCmd() {
+ return "pellet extract " + getMandatoryOptions() + "[options] ...";
+ }
+
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ PelletCmdOption option = new PelletCmdOption( "statements" );
+ option.setShortOption( "s" );
+ option
+ .setDescription( "Statements to extract. The option accepts all axioms of the OWL functional syntax plus some additional ones. Valid arguments are: "
+ + validStatementArguments()
+ + ". Example: \"DirectSubClassOf DirectSubPropertyOf\"" );
+ option.setType( "Space separated list surrounded by quotes" );
+ option.setDefaultValue( "DefaultStatements" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ options.add( getLoaderOption() );
+ options.add( getIgnoreImportsOption() );
+ options.add( getInputFormatOption() );
+
+ return options;
+ }
+
+ public void run() {
+ mapStatementTypes();
+ extractInferences();
+ }
+
+ private void extractInferences() {
+ if( selector.size() == 0 )
+ throw new PelletCmdException( "Selector is empty, provide types to extract" );
+
+ ModelExtractor extractor = new ModelExtractor( getKB() );
+ extractor.setSelector( selector );
+
+ Model extracted = ModelFactory.createDefaultModel();
+
+ if( SetUtils.intersects( selector, ModelExtractor.StatementType.ALL_CLASS_STATEMENTS ) ) {
+ startTask( "Extracting class statements" );
+ extractor.extractClassModel( extracted );
+ finishTask( "Extracting class statements" );
+ }
+
+ if( SetUtils.intersects( selector, ModelExtractor.StatementType.ALL_PROPERTY_STATEMENTS ) ) {
+ startTask( "Extracting property statements" );
+ extractor.extractPropertyModel( extracted );
+ finishTask( "Extracting property statements" );
+ }
+
+ if( SetUtils.intersects( selector, ModelExtractor.StatementType.ALL_INDIVIDUAL_STATEMENTS ) ) {
+ startTask( "Extracting individual statements" );
+ extractor.extractIndividualModel( extracted );
+ finishTask( "Extracting individual statements" );
+ }
+
+ output( extracted );
+ }
+
+ private String validStatementArguments() {
+ List sa = new ArrayList();
+
+ sa.add( "DefaultStatements" );
+ sa.add( "AllClass" );
+ sa.add( "AllIndividual" );
+ sa.add( "AllProperty" );
+ sa.add( "AllStatements" );
+ sa.add( "AllStatementsIncludingJena" );
+ sa.add( "ClassAssertion" );
+ sa.add( "ComplementOf" );
+ sa.add( "DataPropertyAssertion" );
+ sa.add( "DifferentIndividuals" );
+ sa.add( "DirectClassAssertion" );
+ sa.add( "DirectSubClassOf" );
+ sa.add( "DirectSubPropertyOf" );
+ sa.add( "DisjointClasses" );
+ sa.add( "DisjointProperties" );
+ sa.add( "EquivalentClasses" );
+ sa.add( "EquivalentProperties" );
+ sa.add( "InverseProperties" );
+ sa.add( "ObjectPropertyAssertion" );
+ sa.add( "PropertyAssertion" );
+ sa.add( "SameIndividual" );
+ sa.add( "SubClassOf" );
+ sa.add( "SubPropertyOf" );
+
+ return sa.toString();
+ }
+
+ private void mapStatementTypes() {
+ String statements = options.getOption( "statements" ).getValueAsString();
+
+ String[] list = statements.split( " " );
+
+ if( list.length == 0 )
+ throw new PelletCmdException( "No values for statements argument given" );
+
+ for( int i = 0; i < list.length; i++ ) {
+ String l = list[i];
+ if( l.equalsIgnoreCase( "DefaultStatements" ) )
+ selectorAddAll( StatementType.DEFAULT_STATEMENTS );
+ else if( l.equalsIgnoreCase( "AllStatements" ) )
+ selectorAddAll( StatementType.ALL_STATEMENTS );
+ else if( l.equalsIgnoreCase( "AllStatementsIncludingJena" ) )
+ selectorAddAll( StatementType.ALL_STATEMENTS_INCLUDING_JENA );
+ else if( l.equalsIgnoreCase( "AllClass" ) )
+ selectorAddAll( StatementType.ALL_CLASS_STATEMENTS );
+ else if( l.equalsIgnoreCase( "AllIndividual" ) )
+ selectorAddAll( StatementType.ALL_INDIVIDUAL_STATEMENTS );
+ else if( l.equalsIgnoreCase( "AllProperty" ) )
+ selectorAddAll( StatementType.ALL_PROPERTY_STATEMENTS );
+ else if( l.equalsIgnoreCase( "ClassAssertion" ) )
+ selectorAdd( StatementType.ALL_INSTANCE );
+ else if( l.equalsIgnoreCase( "ComplementOf" ) )
+ selectorAdd( StatementType.COMPLEMENT_CLASS );
+ else if( l.equalsIgnoreCase( "DataPropertyAssertion" ) )
+ selectorAdd( StatementType.DATA_PROPERTY_VALUE );
+ else if( l.equalsIgnoreCase( "DifferentIndividuals" ) )
+ selectorAdd( StatementType.DIFFERENT_FROM );
+ else if( l.equalsIgnoreCase( "DirectClassAssertion" ) )
+ selectorAdd( StatementType.DIRECT_INSTANCE );
+ else if( l.equalsIgnoreCase( "DirectSubClassOf" ) )
+ selectorAdd( StatementType.DIRECT_SUBCLASS );
+ else if( l.equalsIgnoreCase( "DirectSubPropertyOf" ) )
+ selectorAdd( StatementType.DIRECT_SUBPROPERTY );
+ else if( l.equalsIgnoreCase( "DisjointClasses" ) )
+ selectorAdd( StatementType.DISJOINT_CLASS );
+ else if( l.equalsIgnoreCase( "DisjointProperties" ) )
+ selectorAdd( StatementType.DISJOINT_PROPERTY );
+ else if( l.equalsIgnoreCase( "EquivalentClasses" ) )
+ selectorAdd( StatementType.EQUIVALENT_CLASS );
+ else if( l.equalsIgnoreCase( "EquivalentProperties" ) )
+ selectorAdd( StatementType.EQUIVALENT_PROPERTY );
+ else if( l.equalsIgnoreCase( "InverseProperties" ) )
+ selectorAdd( StatementType.INVERSE_PROPERTY );
+ else if( l.equalsIgnoreCase( "ObjectPropertyAssertion" ) )
+ selectorAdd( StatementType.OBJECT_PROPERTY_VALUE );
+ else if( l.equalsIgnoreCase( "PropertyAssertion" ) )
+ selectorAddAll( StatementType.PROPERTY_VALUE );
+ else if( l.equalsIgnoreCase( "SameIndividual" ) )
+ selectorAdd( StatementType.SAME_AS );
+ else if( l.equalsIgnoreCase( "SubClassOf" ) )
+ selectorAdd( StatementType.ALL_SUBCLASS );
+ else if( l.equalsIgnoreCase( "SubPropertyOf" ) )
+ selectorAdd( StatementType.ALL_SUBPROPERTY );
+ else
+ throw new PelletCmdException( "Unknown statement type: " + l );
+ }
+
+ if( selector == null )
+ selector = StatementType.DEFAULT_STATEMENTS;
+ }
+
+ private void selectorAddAll(EnumSet types) {
+ if( selector == null )
+ selector = types;
+ else
+ selector.addAll( types );
+ }
+
+ private void selectorAdd(StatementType type) {
+ if( selector == null )
+ selector = EnumSet.of( type );
+ else
+ selector.add( type );
+ }
+}
diff --git a/cli/src/pellet/PelletInfo.java b/cli/src/pellet/PelletInfo.java
new file mode 100644
index 000000000..17cc613de
--- /dev/null
+++ b/cli/src/pellet/PelletInfo.java
@@ -0,0 +1,245 @@
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.Set;
+
+import org.mindswap.pellet.utils.FileUtils;
+import org.semanticweb.owlapi.apibinding.OWLManager;
+import org.semanticweb.owlapi.model.AddAxiom;
+import org.semanticweb.owlapi.model.AddImport;
+import org.semanticweb.owlapi.model.IRI;
+import org.semanticweb.owlapi.model.OWLAnnotation;
+import org.semanticweb.owlapi.model.OWLAnnotationValue;
+import org.semanticweb.owlapi.model.OWLAxiom;
+import org.semanticweb.owlapi.model.OWLImportsDeclaration;
+import org.semanticweb.owlapi.model.OWLLiteral;
+import org.semanticweb.owlapi.model.OWLOntology;
+import org.semanticweb.owlapi.model.OWLOntologyChange;
+import org.semanticweb.owlapi.model.OWLOntologyCreationException;
+import org.semanticweb.owlapi.model.OWLOntologyManager;
+import org.semanticweb.owlapi.profiles.OWL2DLProfile;
+import org.semanticweb.owlapi.profiles.OWL2ELProfile;
+import org.semanticweb.owlapi.profiles.OWL2Profile;
+import org.semanticweb.owlapi.profiles.OWL2QLProfile;
+import org.semanticweb.owlapi.profiles.OWL2RLProfile;
+import org.semanticweb.owlapi.profiles.OWLProfile;
+import org.semanticweb.owlapi.util.DLExpressivityChecker;
+import org.semanticweb.owlapi.util.NonMappingOntologyIRIMapper;
+import org.semanticweb.owlapi.vocab.OWLRDFVocabulary;
+
+import com.clarkparsia.pellet.owlapiv3.LimitedMapIRIMapper;
+
+public class PelletInfo extends PelletCmdApp {
+ private final List profiles = Arrays.asList(new OWL2ELProfile(), new OWL2QLProfile(), new OWL2RLProfile(), new OWL2DLProfile(), new OWL2Profile());
+
+ @Override
+ public String getAppCmd() {
+ return "pellet info " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletInfo: Display information and statistics about 1 or more ontologies";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = new PelletCmdOptions();
+
+ //Don't call getGlobalOptions(), since we override the behaviour of verbose
+ PelletCmdOption helpOption = new PelletCmdOption( "help" );
+ helpOption.setShortOption( "h" );
+ helpOption.setDescription( "Print this message" );
+ helpOption.setDefaultValue( false );
+ helpOption.setIsMandatory( false );
+ helpOption.setArg( NONE );
+ options.add( helpOption );
+
+ PelletCmdOption verboseOption = new PelletCmdOption( "verbose" );
+ verboseOption.setShortOption( "v" );
+ verboseOption.setDescription( "More verbose output" );
+ verboseOption.setDefaultValue( false );
+ verboseOption.setIsMandatory( false );
+ verboseOption.setArg( NONE );
+ options.add( verboseOption );
+
+ PelletCmdOption configOption = new PelletCmdOption( "config" );
+ configOption.setShortOption( "C" );
+ configOption.setDescription( "Use the selected configuration file" );
+ configOption.setIsMandatory( false );
+ configOption.setType( "configuration file" );
+ configOption.setArg( REQUIRED );
+ options.add( configOption );
+
+ PelletCmdOption option = new PelletCmdOption( "merge" );
+ option.setShortOption( "m" );
+ option.setDescription( "Merge the ontologies" );
+ option.setDefaultValue( false );
+ option.setIsMandatory( false );
+ option.setArg( PelletCmdOptionArg.NONE );
+ options.add( option );
+
+ options.add( getIgnoreImportsOption() );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+
+ try {
+ OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
+ Collection inputFiles = FileUtils.getFileURIs(getInputFiles());
+
+ LimitedMapIRIMapper iriMapper = new LimitedMapIRIMapper();
+ OWLOntology baseOntology = manager.createOntology();
+ manager.clearIRIMappers();
+
+ if(options.getOption("ignore-imports").getValueAsBoolean())
+ {
+ manager.addIRIMapper(iriMapper);
+ manager.setSilentMissingImportsHandling(true);
+ }
+ else
+ {
+ manager.addIRIMapper(new NonMappingOntologyIRIMapper());
+ manager.setSilentMissingImportsHandling(false);
+ }
+
+ if(inputFiles.size() > 1)
+ for(String inputFile: inputFiles)
+ addFile(inputFile, manager, iriMapper, baseOntology);
+ else
+ addSingleFile(inputFiles.iterator().next(), manager, iriMapper); //Prevent ugly OWLAPI messages
+
+ manager.removeOntology(baseOntology);
+
+ if(options.getOption("merge").getValueAsBoolean())
+ manager = mergeOntologiesInNewManager(manager);
+
+ printStats(manager);
+
+ }catch(Exception e) {
+ throw new PelletCmdException( e );
+ }
+ }
+
+ private void addFile(String inputFile, OWLOntologyManager manager, LimitedMapIRIMapper iriMapper, OWLOntology baseOntology)
+ {
+ try {
+ IRI iri = IRI.create(inputFile);
+ iriMapper.addAllowedIRI(iri);
+
+ OWLImportsDeclaration declaration = manager.getOWLDataFactory().getOWLImportsDeclaration( iri );
+ manager.applyChange( new AddImport( baseOntology, declaration ) );
+ manager.makeLoadImportRequest( declaration );
+ }catch (Exception e) {
+ if(verbose)
+ System.err.println(e.getLocalizedMessage());
+ }
+ }
+
+
+ private void addSingleFile(String inputFile, OWLOntologyManager manager, LimitedMapIRIMapper iriMapper)
+ {
+ try {
+ IRI iri = IRI.create(inputFile);
+ iriMapper.addAllowedIRI(iri);
+ manager.loadOntologyFromOntologyDocument(iri);
+ }catch (Exception e) {
+ if(verbose)
+ System.err.println(e.getLocalizedMessage());
+ }
+ }
+
+
+ private OWLOntologyManager mergeOntologiesInNewManager(OWLOntologyManager manager) throws OWLOntologyCreationException
+ {
+ OWLOntologyManager newManager = OWLManager.createOWLOntologyManager();
+ OWLOntology merged = newManager.createOntology();
+ List changes = new ArrayList();
+
+ for(OWLOntology ontology: manager.getOntologies())
+ {
+ for (OWLAxiom ax : ontology.getAxioms()) {
+ changes.add(new AddAxiom(merged, ax));
+ }
+ }
+ newManager.applyChanges(changes);
+ return newManager;
+ }
+
+
+ private void printStats(OWLOntologyManager manager) {
+ for(OWLOntology ontology: manager.getOntologies())
+ {
+ String ontologyLocation = manager.getOntologyDocumentIRI(ontology) != null ? manager.getOntologyDocumentIRI(ontology).toString(): "ontology";
+ String ontologyBaseURI = ontology.getOntologyID().getOntologyIRI() != null? ontology.getOntologyID().getOntologyIRI().toQuotedString() : "";
+ output("Information about "+ontologyLocation+" ("+ontologyBaseURI+")");
+ if(verbose)
+ printOntologyHeader(ontology);
+ DLExpressivityChecker expressivityChecker = new DLExpressivityChecker(Collections.singleton(ontology));
+ output("OWL Profile = "+getProfile(ontology));
+ output("DL Expressivity = "+expressivityChecker.getDescriptionLogicName());
+ output("Axioms = "+ontology.getAxiomCount());
+ output("Logical Axioms = "+ontology.getLogicalAxiomCount());
+ output("GCI Axioms = "+ontology.getGeneralClassAxioms().size());
+ output("Individuals = "+ontology.getIndividualsInSignature().size());
+ output("Classes = "+ontology.getClassesInSignature().size());
+ output("Object Properties = "+ontology.getObjectPropertiesInSignature().size());
+ output("Data Properties = "+ontology.getDataPropertiesInSignature().size());
+ output("Annotation Properties = "+ontology.getAnnotationPropertiesInSignature().size());
+
+ Set imports = ontology.getImportsDeclarations();
+ if(imports.size() > 0)
+ {
+ output("Direct Imports:");
+ int count = 1;
+ for(OWLImportsDeclaration imp: imports)
+ output(count+": "+imp.getIRI().toString());
+ count++;
+ }
+ output("");
+ }
+ }
+
+ private String getProfile(OWLOntology ontology) {
+ for(OWLProfile profile: profiles)
+ {
+ if(profile.checkOntology(ontology).isInProfile())
+ return profile.getName();
+ }
+ return "Unknown Profile";
+ }
+
+ private void printOntologyHeader(OWLOntology ontology) {
+ for(OWLAnnotation annotation: ontology.getAnnotations())
+ {
+ IRI property = annotation.getProperty().getIRI();
+ OWLAnnotationValue value = annotation.getValue();
+
+ if(property.equals(OWLRDFVocabulary.OWL_VERSION_INFO.getIRI()))
+ verbose("Version Info = "+getString(value));
+ else if(property.equals(OWLRDFVocabulary.OWL_PRIOR_VERSION.getIRI()))
+ verbose("Prior Version Info = "+getString(value));
+ else if(property.equals(OWLRDFVocabulary.OWL_BACKWARD_COMPATIBLE_WITH.getIRI()))
+ verbose("Backward Compatible With = "+getString(value));
+ else if(property.equals(OWLRDFVocabulary.OWL_INCOMPATIBLE_WITH.getIRI()))
+ verbose("Incompatible With = "+getString(value));
+ }
+ }
+
+ private String getString(OWLAnnotationValue value) {
+ if(value instanceof OWLLiteral)
+ return ((OWLLiteral)value).getLiteral();
+ else
+ return value.toString();
+ }
+}
diff --git a/cli/src/pellet/PelletModularity.java b/cli/src/pellet/PelletModularity.java
new file mode 100644
index 000000000..956e92f81
--- /dev/null
+++ b/cli/src/pellet/PelletModularity.java
@@ -0,0 +1,157 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.semanticweb.owlapi.io.RDFXMLOntologyFormat;
+import org.semanticweb.owlapi.io.SystemOutDocumentTarget;
+import org.semanticweb.owlapi.model.OWLAxiom;
+import org.semanticweb.owlapi.model.OWLEntity;
+import org.semanticweb.owlapi.model.OWLException;
+import org.semanticweb.owlapi.model.OWLOntology;
+
+import uk.ac.manchester.cs.owlapi.modularity.ModuleType;
+
+import com.clarkparsia.modularity.ModularityUtils;
+import com.clarkparsia.owlapiv3.OntologyUtils;
+import com.clarkparsia.pellet.owlapiv3.OWLAPILoader;
+
+
+/**
+ *
+ * Title: PelletModularity
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletModularity extends PelletCmdApp {
+
+ private OWLAPILoader loader;
+ private ModuleType moduleType;
+ private String[] entityNames;
+
+ public PelletModularity() {
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet modularity " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletModularity: Extract ontology modules for classes and write it to the STDOUT";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getIgnoreImportsOption() );
+
+ PelletCmdOption option = new PelletCmdOption( "signature" );
+ option.setShortOption( "s" );
+ option.setType( "Space separated list" );
+ option.setDescription( "One or more entity URI(s) or local name(s) to be extracted as a module. Example: \"Animal Wildlife Rainforest\"" );
+ option.setIsMandatory( true );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "type" );
+ option.setShortOption( "t" );
+ option.setType( "lower| upper | upper-of-lower | lower-of-upper" );
+ option.setDefaultValue( "lower" );
+ option.setDescription( "The type of the module that will be extracted. See http://bit.ly/ontology-module-types for an explanation of the module types." );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+ loadEntityNames();
+ loadModuleType();
+ loadOntology();
+ extractModule();
+ }
+
+ private void loadOntology() {
+ loader = (OWLAPILoader) getLoader( "OWLAPIv3" );
+ getKB();
+ }
+
+ private void loadEntityNames() {
+ String signature = options.getOption( "signature" ).getValueAsString();
+
+ if( signature == null )
+ throw new PelletCmdException( "No signature provided" );
+
+ entityNames = signature.split( " " );
+
+ if( entityNames.length == 0 )
+ throw new PelletCmdException( "No signature provided" );
+ }
+
+ private void loadModuleType() {
+ String type = options.getOption( "type" ).getValueAsString();
+
+ if( type.equalsIgnoreCase( "lower" ) ) {
+ moduleType = ModuleType.TOP;
+ }
+ else if( type.equalsIgnoreCase( "upper" ) ) {
+ moduleType = ModuleType.BOT;
+ }
+ else if( type.equalsIgnoreCase( "upper-of-lower" ) ) {
+ moduleType = ModuleType.BOT_OF_TOP;
+ }
+ else if( type.equalsIgnoreCase( "lower-of-upper" ) ) {
+ moduleType = ModuleType.TOP_OF_BOT;
+ }
+ else {
+ throw new PelletCmdException( "Unknown module type: " + type );
+ }
+ }
+
+ private void extractModule() {
+ Set entities = new HashSet();
+ for( String entityName : entityNames ) {
+ OWLEntity entity = OntologyUtils.findEntity( entityName, loader.getAllOntologies() );
+
+ if( entity == null )
+ throw new PelletCmdException( "Entity not found in ontology: " + entityName );
+
+ entities.add( entity );
+ }
+
+ Set module = ModularityUtils.extractModule( loader.getOntology(), entities, moduleType );
+
+ try {
+ OWLOntology moduleOnt = loader.getManager().createOntology( module );
+ loader.getManager().saveOntology( moduleOnt, new RDFXMLOntologyFormat(),
+ new SystemOutDocumentTarget() );
+ } catch( OWLException e ) {
+ throw new RuntimeException( e );
+ }
+ }
+}
diff --git a/cli/src/pellet/PelletQuery.java b/cli/src/pellet/PelletQuery.java
new file mode 100644
index 000000000..fa303fc94
--- /dev/null
+++ b/cli/src/pellet/PelletQuery.java
@@ -0,0 +1,395 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.io.StringWriter;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.PelletOptions;
+import org.mindswap.pellet.exceptions.InconsistentOntologyException;
+import org.mindswap.pellet.jena.JenaLoader;
+import org.mindswap.pellet.jena.NodeFormatter;
+import org.mindswap.pellet.output.TableData;
+
+import com.clarkparsia.pellet.sparqldl.jena.SparqlDLExecutionFactory;
+import com.clarkparsia.pellet.sparqldl.jena.SparqlDLExecutionFactory.QueryEngineType;
+import com.clarkparsia.sparqlowl.parser.arq.ARQTerpParser;
+import com.hp.hpl.jena.query.Dataset;
+import com.hp.hpl.jena.query.DatasetFactory;
+import com.hp.hpl.jena.query.Query;
+import com.hp.hpl.jena.query.QueryExecution;
+import com.hp.hpl.jena.query.QueryFactory;
+import com.hp.hpl.jena.query.QueryParseException;
+import com.hp.hpl.jena.query.QuerySolution;
+import com.hp.hpl.jena.query.ResultSet;
+import com.hp.hpl.jena.query.ResultSetFactory;
+import com.hp.hpl.jena.query.ResultSetFormatter;
+import com.hp.hpl.jena.query.Syntax;
+import com.hp.hpl.jena.rdf.model.Model;
+import com.hp.hpl.jena.rdf.model.RDFNode;
+import com.hp.hpl.jena.shared.NotFoundException;
+import com.hp.hpl.jena.util.FileManager;
+
+/**
+ *
+ * Title: PelletQuery
+ *
+ *
+ * Description: This is the command-line version of Pellet for querying. It is
+ * provided as a stand-alone program and should not be directly used in
+ * applications.
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ * @author Evren Sirin
+ */
+public class PelletQuery extends PelletCmdApp {
+
+ private String queryFile;
+ private String queryString;
+ private Query query;
+ private JenaLoader loader;
+ private ResultSet queryResults;
+ private Model constructQueryModel;
+ private boolean askQueryResult;
+ private Syntax queryFormat = Syntax.syntaxSPARQL;
+ private OutputFormat outputFormat = OutputFormat.TABULAR;
+ private QueryEngineType queryEngine = null;
+
+ static {
+ /*
+ * Register the Terp parser with ARQ
+ */
+ ARQTerpParser.registerFactory();
+ }
+
+ private enum OutputFormat {
+ TABULAR, XML, JSON
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletQuery: SPARQL-DL Query Engine";
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet query " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ PelletCmdOption option = new PelletCmdOption( "query-file" );
+ option.setShortOption( "q" );
+ option.setType( "" );
+ option.setDescription( "Read the SPARQL query from the given file" );
+ option.setIsMandatory( true );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "output-format" );
+ option.setShortOption( "o" );
+ option.setType( "Tabular | XML | JSON" );
+ option.setDescription( "Format of result set (SELECT queries)" );
+ option.setDefaultValue( "Tabular" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "query-format" );
+ option.setType( "SPARQL | ARQ | TERP" );
+ option.setDescription( "The query format" );
+ option.setDefaultValue( "SPARQL" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ options.add( getIgnoreImportsOption() );
+ options.add( getInputFormatOption() );
+
+ option = new PelletCmdOption( "query-engine" );
+ option.setType( "Pellet | ARQ | Mixed" );
+ option.setShortOption( "e" );
+ option.setDescription(
+ "The query engine that will be used. Default behavior "
+ + "is to auto select the engine that can handle the given "
+ + "query with best performance. Pellet query "
+ + "engine is the typically fastest but cannot handle "
+ + "FILTER, OPTIONAL, UNION, DESCRIBE or named graphs. "
+ + "Mixed engine uses ARQ to handle SPARQL algebra and "
+ + "uses Pellet to answer Basic Graph Patterns (BGP) "
+ + "which can be expressed in SPARQL-DL. ARQ engine uses "
+ + "Pellet to answer single triple patterns and can handle "
+ + "queries that do not fit into SPARQL-DL. As a "
+ + "consequence SPARQL-DL extensions and complex class "
+ + "expressions encoded inside the SPARQL query are not "
+ + "supported." );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "bnode" );
+ option.setDescription(
+ "Treat bnodes in the query as undistinguished variables. Undistinguished "
+ + "variables can match individuals whose existence is inferred by the "
+ + "reasoner, e.g. due to a someValuesFrom restriction. This option has "
+ + "no effect if ARQ engine is selected." );
+ option.setDefaultValue( false );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ return options;
+ }
+
+ public PelletQuery() {
+ }
+
+ @Override
+ public void parseArgs(String[] args) {
+ super.parseArgs( args );
+
+ setQueryFile( options.getOption( "query-file" ).getValueAsString() );
+ setOutputFormat( options.getOption( "output-format" ).getValueAsString() );
+ setQueryFormat( options.getOption( "query-format" ).getValueAsString() );
+ setQueryEngine( options.getOption( "query-engine" ).getValueAsString() );
+ PelletOptions.TREAT_ALL_VARS_DISTINGUISHED = !options.getOption( "bnode" )
+ .getValueAsBoolean();
+ }
+
+ @Override
+ public void run() {
+ loadQuery();
+ loadInput();
+ execQuery();
+ printQueryResults();
+ }
+
+ public void setQueryFile(String s) {
+ queryFile = s;
+ }
+
+ public void setOutputFormat(String s) {
+ if( s == null )
+ outputFormat = OutputFormat.TABULAR;
+ else if( s.equalsIgnoreCase( "Tabular" ) )
+ outputFormat = OutputFormat.TABULAR;
+ else if( s.equalsIgnoreCase( "XML" ) )
+ outputFormat = OutputFormat.XML;
+ else if( s.equalsIgnoreCase( "JSON" ) )
+ outputFormat = OutputFormat.JSON;
+ else
+ throw new PelletCmdException( "Invalid output format: " + outputFormat );
+ }
+
+ public ResultSet getQueryResults() {
+ return queryResults;
+ }
+
+ public Model getConstructQueryModel() {
+ return constructQueryModel;
+ }
+
+ public boolean getAskQueryResult() {
+ return askQueryResult;
+ }
+
+ public void setQueryFormat(String s) {
+ if( s == null )
+ throw new PelletCmdException( "Query format is null");
+
+ if( s.equalsIgnoreCase( "SPARQL" ) )
+ queryFormat = Syntax.lookup( "SPARQL" );
+ else if( s.equalsIgnoreCase( "ARQ" ) )
+ queryFormat = Syntax.lookup( "ARQ" );
+ else if( s.equalsIgnoreCase( "TERP" ) )
+ queryFormat = Syntax.lookup( "TERP" );
+ else
+ throw new PelletCmdException( "Unknown query format: " + s );
+
+ if( queryFormat == null )
+ throw new PelletCmdException( "Query format is null: " + s );
+ }
+
+
+ public void setQueryEngine(String s) {
+ if( s == null ) {
+ queryEngine = null;
+ return;
+ }
+
+ try {
+ queryEngine = QueryEngineType.valueOf( s.toUpperCase() );
+ } catch( IllegalArgumentException e ) {
+ throw new PelletCmdException( "Unknown query engine: " + s );
+ }
+ }
+
+ private void loadInput() {
+ try {
+ loader = (JenaLoader) getLoader( "Jena" );
+
+ KnowledgeBase kb = getKB( loader );
+
+ startTask( "consistency check" );
+ boolean isConsistent = kb.isConsistent();
+ finishTask( "consistency check" );
+
+ if( !isConsistent )
+ throw new PelletCmdException( "Ontology is inconsistent, run \"pellet explain\" to get the reason" );
+
+ } catch( NotFoundException e ) {
+ throw new PelletCmdException( e );
+ } catch( QueryParseException e ) {
+ throw new PelletCmdException( e );
+ } catch( InconsistentOntologyException e ) {
+ throw new PelletCmdException( "Cannot query inconsistent ontology!" );
+ }
+ }
+
+ private void loadQuery() {
+ try {
+ verbose( "Query file: " + queryFile );
+ startTask( "parsing query file" );
+
+ queryString = FileManager.get().readWholeFileAsUTF8( queryFile ) ;
+ query = QueryFactory.create( queryString, queryFormat );
+
+ finishTask( "parsing query file" );
+
+ verbose( "Query: " );
+ verbose( "-----------------------------------------------------" );
+ verbose( queryString.trim() );
+ verbose( "-----------------------------------------------------" );
+ } catch( NotFoundException e ) {
+ throw new PelletCmdException( e );
+ } catch( QueryParseException e ) {
+ throw new PelletCmdException( e );
+ }
+ }
+
+ private void execQuery() {
+ Dataset dataset = DatasetFactory.create( loader.getModel() );
+ QueryExecution qe = (queryEngine == null)
+ ? SparqlDLExecutionFactory.create( query, dataset )
+ : SparqlDLExecutionFactory.create( query, dataset, null, queryEngine );
+
+ verbose( "Created query engine: " + qe.getClass().getName() );
+
+ startTask( "query execution" );
+ if( query.isSelectType() )
+ queryResults = ResultSetFactory.makeRewindable( qe.execSelect() );
+ else if( query.isConstructType() )
+ constructQueryModel = qe.execConstruct();
+ else if( query.isAskType() )
+ askQueryResult = qe.execAsk();
+ else
+ throw new UnsupportedOperationException( "Unsupported query type" );
+ finishTask( "query execution" );
+ }
+
+ private void printQueryResults() {
+ if( query.isSelectType() )
+ printSelectQueryResuts();
+ else if( query.isConstructType() )
+ printConstructQueryResults();
+ else if( query.isAskType() )
+ printAskQueryResult();
+
+ }
+
+ private void printSelectQueryResuts() {
+ if( queryResults.hasNext() ) {
+ if( outputFormat == OutputFormat.TABULAR )
+ printTabularQueryResults();
+ else if( outputFormat == OutputFormat.XML )
+ printXMLQueryResults();
+ else if( outputFormat == OutputFormat.JSON )
+ printJSONQueryResults();
+ else
+ printTabularQueryResults();
+ }
+ else {
+ output( "Query Results (0 answers): " );
+ output( "NO RESULTS" );
+ }
+ }
+
+ private void printTabularQueryResults() {
+ // number of distinct bindings
+ int count = 0;
+
+ NodeFormatter formatter = new NodeFormatter( loader.getModel() );
+
+ // variables used in select
+ List> resultVars = query.getResultVars();
+
+ List> data = new ArrayList>();
+ while( queryResults.hasNext() ) {
+ QuerySolution binding = queryResults.nextSolution();
+ List formattedBinding = new ArrayList();
+ for( int i = 0; i < resultVars.size(); i++ ) {
+ String var = (String) resultVars.get( i );
+ RDFNode result = binding.get( var );
+
+ // format the result
+ formattedBinding.add( formatter.format( result ) );
+ }
+
+ if( data.add( formattedBinding ) )
+ count++;
+ }
+
+ output( "Query Results (" + count + " answers): " );
+
+ TableData table = new TableData( data, resultVars );
+ StringWriter tableSW = new StringWriter();
+ table.print( tableSW );
+ output( tableSW.toString() );
+ }
+
+ private void printXMLQueryResults() {
+ ResultSetFormatter.outputAsXML( queryResults );
+ }
+
+ private void printJSONQueryResults() {
+ if( verbose ) {
+ System.out.println( "/* " );
+ System.out.println( queryString.replace( "*/", "* /" ) );
+ System.out.println( "*/ ");
+ }
+ ResultSetFormatter.outputAsJSON( queryResults );
+ }
+
+ private void printConstructQueryResults() {
+ StringWriter modelSW = new StringWriter();
+ constructQueryModel.write( modelSW );
+ output( modelSW.toString() );
+ }
+
+ private void printAskQueryResult() {
+ output( "ASK query result: " );
+ output( askQueryResult
+ ? "yes"
+ : "no" );
+ }
+}
diff --git a/cli/src/pellet/PelletRealize.java b/cli/src/pellet/PelletRealize.java
new file mode 100644
index 000000000..2a811f821
--- /dev/null
+++ b/cli/src/pellet/PelletRealize.java
@@ -0,0 +1,83 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.taxonomy.printer.ClassTreePrinter;
+import org.mindswap.pellet.taxonomy.printer.TaxonomyPrinter;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title: PelletRealize
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletRealize extends PelletCmdApp {
+
+ public PelletRealize() {
+ super( );
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet realize " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletRealize: Compute and display the most specific instances for each class";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getLoaderOption() );
+ options.add( getIgnoreImportsOption() );
+ options.add( getInputFormatOption() );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+ KnowledgeBase kb = getKB();
+
+ startTask( "consistency check" );
+ boolean isConsistent = kb.isConsistent();
+ finishTask( "consistency check" );
+
+ if( !isConsistent )
+ throw new PelletCmdException( "Ontology is inconsistent, run \"pellet explain\" to get the reason" );
+
+ startTask( "classification" );
+ kb.classify();
+ finishTask( "classification" );
+
+ startTask( "realization" );
+ kb.realize();
+ finishTask( "realization" );
+
+ TaxonomyPrinter printer = new ClassTreePrinter();
+ printer.print( kb.getTaxonomy() );
+ }
+
+}
diff --git a/cli/src/pellet/PelletTransTree.java b/cli/src/pellet/PelletTransTree.java
new file mode 100644
index 000000000..23f8322f6
--- /dev/null
+++ b/cli/src/pellet/PelletTransTree.java
@@ -0,0 +1,230 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.taxonomy.POTaxonomyBuilder;
+import org.mindswap.pellet.taxonomy.SubsumptionComparator;
+import org.mindswap.pellet.taxonomy.Taxonomy;
+import org.mindswap.pellet.taxonomy.printer.ClassTreePrinter;
+import org.mindswap.pellet.utils.ATermUtils;
+import org.semanticweb.owlapi.model.OWLClass;
+import org.semanticweb.owlapi.model.OWLEntity;
+import org.semanticweb.owlapi.model.OWLObjectProperty;
+
+import aterm.AFun;
+import aterm.ATermAppl;
+
+import com.clarkparsia.owlapiv3.OntologyUtils;
+import com.clarkparsia.pellet.owlapiv3.OWLAPILoader;
+
+/**
+ *
+ * Title: PelletTransTree
+ *
+ *
+ * Description: Compute the hierarchy for part-of classes (or individuals) given
+ * a (transitive) property.
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletTransTree extends PelletCmdApp {
+
+ private String propertyName;
+ private boolean showClasses;
+ private boolean showIndividuals;
+
+ public PelletTransTree() {
+ super( );
+ }
+
+ public String getAppId() {
+ return "PelletTransTree: Compute a transitive-tree closure";
+ }
+
+ public String getAppCmd() {
+ return "pellet trans-tree " + getMandatoryOptions() + "[options] ...";
+ }
+
+ public PelletCmdOptions getOptions() {
+ showClasses = true;
+ showIndividuals = false;
+
+ PelletCmdOptions options = getGlobalOptions();
+
+ PelletCmdOption option = new PelletCmdOption( "property" );
+ option.setShortOption( "p" );
+ option.setType( "" );
+ option.setDescription( "The part-of (transitive) property" );
+ option.setIsMandatory( true );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "classes" );
+ option.setShortOption( "c" );
+ option.setDescription( "Show parts hierarchy for classes" );
+ option.setDefaultValue( showClasses );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "individuals" );
+ option.setShortOption( "i" );
+ option.setDescription( "Show parts hierarchy for individuals" );
+ option.setDefaultValue( showIndividuals );
+ option.setIsMandatory( false );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "filter" );
+ option.setShortOption( "f" );
+ option.setType( "" );
+ option.setDescription( "The class to filter" );
+ option.setIsMandatory( false );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ return options;
+ }
+
+ public void run() {
+ propertyName = options.getOption( "property" ).getValueAsString();
+
+ OWLAPILoader loader = new OWLAPILoader();
+ KnowledgeBase kb = loader.createKB( getInputFiles() );
+
+ OWLEntity entity = OntologyUtils.findEntity( propertyName, loader.getAllOntologies() );
+
+ if( entity == null )
+ throw new PelletCmdException( "Property not found: " + propertyName );
+
+ if( !(entity instanceof OWLObjectProperty) )
+ throw new PelletCmdException( "Not an object property: " + propertyName );
+
+ if( !((OWLObjectProperty) entity).isTransitive( loader.getAllOntologies() ) )
+ throw new PelletCmdException( "Not a transitive property: " + propertyName );
+
+ ATermAppl p = ATermUtils.makeTermAppl( entity.getIRI().toString() );
+
+ ATermAppl c = null;
+ boolean filter = false;
+
+ if(options.getOption( "filter" ).exists())
+ {
+ String filterName = options.getOption( "filter" ).getValueAsString();
+ OWLEntity filterClass = OntologyUtils.findEntity( filterName, loader.getAllOntologies() );
+ if(filterClass == null)
+ throw new PelletCmdException( "Filter class not found: " + filterName );
+ if(!(filterClass instanceof OWLClass))
+ throw new PelletCmdException( "Not a class: " + filterName );
+
+ c = ATermUtils.makeTermAppl( filterClass.getIRI().toString() );
+
+ filter = true;
+ }
+
+ POTaxonomyBuilder builder = null;
+
+ // Test first the individuals parameter, as per default the --classes
+ // option is true
+ if( options.getOption( "individuals" ).getValueAsBoolean() ) {
+ // Parts for individuals
+ builder = new POTaxonomyBuilder( kb, new PartIndividualsComparator( kb, p ) );
+
+ Set individuals;
+ if(filter)
+ individuals = kb.getInstances(c);
+ else
+ individuals = kb.getIndividuals(); // Note: this is not an optimal solution
+
+ for( ATermAppl individual : individuals)
+ if (!ATermUtils.isBnode( individual ))
+ builder.classify( individual );
+ }
+ else {
+ builder = new POTaxonomyBuilder( kb, new PartClassesComparator( kb, p ) );
+
+ if(filter)
+ {
+ for(ATermAppl cl: getDistinctSubclasses(kb, c))
+ builder.classify(cl);
+ }
+ else
+ builder.classify();
+ }
+
+ Taxonomy taxonomy = builder.getTaxonomy();
+
+ ClassTreePrinter printer = new ClassTreePrinter();
+ printer.print( taxonomy );
+
+ publicTaxonomy = taxonomy;
+ }
+
+ /** Unit testing access only*/
+ public Taxonomy publicTaxonomy;
+
+ private Set getDistinctSubclasses(KnowledgeBase kb, ATermAppl c){
+ Set filteredClasses = new HashSet();
+ Set> subclasses = kb.getSubClasses(c);
+ for(Set s: subclasses)
+ filteredClasses.addAll(s);
+ filteredClasses.add(c);
+
+ //Remove not(TOP), since taxonomy builder complains otherwise...
+ filteredClasses.remove(ATermUtils.negate(ATermUtils.TOP));
+
+ return filteredClasses;
+ }
+
+ private static class PartClassesComparator extends SubsumptionComparator {
+
+ private ATermAppl p;
+
+ public PartClassesComparator(KnowledgeBase kb, ATermAppl p) {
+ super( kb );
+ this.p = p;
+ }
+
+ @Override
+ protected boolean isSubsumedBy(ATermAppl a, ATermAppl b) {
+ ATermAppl someB = ATermUtils.makeSomeValues( p, b );
+
+ return kb.isSubClassOf( a, someB );
+ }
+ }
+
+ private static class PartIndividualsComparator extends SubsumptionComparator {
+
+ private ATermAppl p;
+
+ public PartIndividualsComparator(KnowledgeBase kb, ATermAppl p) {
+ super( kb );
+ this.p = p;
+ }
+
+ @Override
+ protected boolean isSubsumedBy(ATermAppl a, ATermAppl b) {
+ return kb.hasPropertyValue( a, p, b );
+ }
+ }
+}
diff --git a/cli/src/pellet/PelletUnsatisfiable.java b/cli/src/pellet/PelletUnsatisfiable.java
new file mode 100644
index 000000000..f6511e141
--- /dev/null
+++ b/cli/src/pellet/PelletUnsatisfiable.java
@@ -0,0 +1,112 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import java.util.Iterator;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.PelletOptions;
+import org.mindswap.pellet.utils.Comparators;
+import org.mindswap.pellet.utils.QNameProvider;
+import org.mindswap.pellet.utils.progress.ProgressMonitor;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title: PelletClassify
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Markus Stocker
+ */
+public class PelletUnsatisfiable extends PelletCmdApp {
+
+ public PelletUnsatisfiable() {
+ super( );
+ }
+
+ @Override
+ public String getAppCmd() {
+ return "pellet unsatisfiable " + getMandatoryOptions() + "[options] ...";
+ }
+
+ @Override
+ public String getAppId() {
+ return "PelletUnsatisfiable: Find the unsatisfiable classes in the ontology";
+ }
+
+ @Override
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ options.add( getLoaderOption() );
+ options.add( getIgnoreImportsOption() );
+ options.add( getInputFormatOption() );
+
+ return options;
+ }
+
+ @Override
+ public void run() {
+ KnowledgeBase kb = getKB();
+
+ startTask( "consistency check" );
+ boolean isConsistent = kb.isConsistent();
+ finishTask( "consistency check" );
+
+ if( !isConsistent )
+ throw new PelletCmdException( "Ontology is inconsistent, run \"pellet explain\" to get the reason" );
+
+ QNameProvider qnames = new QNameProvider();
+ Set unsatisfiableClasses = new TreeSet( Comparators.stringComparator );
+
+ ProgressMonitor monitor = PelletOptions.USE_CLASSIFICATION_MONITOR.create();
+ monitor.setProgressTitle( "Finding unsatisfiable" );
+ monitor.setProgressLength( kb.getClasses().size() );
+
+ startTask( "find unsatisfiable" );
+ monitor.taskStarted();
+
+ Iterator i = kb.getClasses().iterator();
+ while( i.hasNext() ) {
+ monitor.incrementProgress();
+ ATermAppl c = i.next();
+ if( !kb.isSatisfiable( c ) ) {
+ unsatisfiableClasses.add( qnames.shortForm( c.getName() ) );
+ }
+ }
+
+ monitor.taskFinished();
+ finishTask( "find unsatisfiable" );
+
+ output("");
+ if( unsatisfiableClasses.isEmpty() ) {
+ output( "Found no unsatisfiable concepts." );
+ }
+ else {
+ output( "Found " + unsatisfiableClasses.size() + " unsatisfiable concept(s):" );
+
+ for( String c : unsatisfiableClasses ) {
+ output( c );
+ }
+ }
+ }
+
+}
diff --git a/cli/src/pellet/Pellint.java b/cli/src/pellet/Pellint.java
new file mode 100644
index 000000000..5766bbdc9
--- /dev/null
+++ b/cli/src/pellet/Pellint.java
@@ -0,0 +1,479 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public
+// License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of
+// proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package pellet;
+
+import static pellet.PelletCmdOptionArg.NONE;
+import static pellet.PelletCmdOptionArg.REQUIRED;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.Serializable;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.mindswap.pellet.utils.FileUtils;
+import org.semanticweb.owlapi.apibinding.OWLManager;
+import org.semanticweb.owlapi.io.StreamDocumentTarget;
+import org.semanticweb.owlapi.model.IRI;
+import org.semanticweb.owlapi.model.OWLAxiom;
+import org.semanticweb.owlapi.model.OWLClass;
+import org.semanticweb.owlapi.model.OWLOntology;
+import org.semanticweb.owlapi.model.OWLOntologyChangeException;
+import org.semanticweb.owlapi.model.OWLOntologyCreationException;
+import org.semanticweb.owlapi.model.OWLOntologyManager;
+import org.semanticweb.owlapi.model.OWLOntologyStorageException;
+import org.semanticweb.owlapi.model.UnknownOWLOntologyException;
+import org.semanticweb.owlapi.profiles.OWL2DLProfile;
+import org.semanticweb.owlapi.profiles.OWLProfileReport;
+import org.semanticweb.owlapi.profiles.OWLProfileViolation;
+import org.semanticweb.owlapi.util.OWLOntologyImportsClosureSetProvider;
+import org.semanticweb.owlapi.util.OWLOntologyMerger;
+
+import com.clarkparsia.pellint.lintpattern.LintPattern;
+import com.clarkparsia.pellint.lintpattern.LintPatternLoader;
+import com.clarkparsia.pellint.lintpattern.axiom.AxiomLintPattern;
+import com.clarkparsia.pellint.lintpattern.ontology.OntologyLintPattern;
+import com.clarkparsia.pellint.model.Lint;
+import com.clarkparsia.pellint.model.OntologyLints;
+import com.clarkparsia.pellint.model.Severity;
+import com.clarkparsia.pellint.rdfxml.OWLSyntaxChecker;
+import com.clarkparsia.pellint.rdfxml.RDFLints;
+import com.clarkparsia.pellint.rdfxml.RDFModel;
+import com.clarkparsia.pellint.rdfxml.RDFModelReader;
+import com.clarkparsia.pellint.rdfxml.RDFModelWriter;
+import com.clarkparsia.pellint.util.CollectionUtil;
+import com.clarkparsia.pellint.util.IllegalPellintArgumentException;
+import com.hp.hpl.jena.rdf.model.Statement;
+
+/**
+ *
+ * Title: Pellint Main class
+ *
+ *
+ * Description: Provides CLI and API interfaces for the Pellint program
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Harris Lin
+ */
+public class Pellint extends PelletCmdApp {
+
+ private static final String CONFIGURATION_PROPERTY_NAME = "pellint.configuration";
+ private static final String DEFAULT_CONFIGURATION_FILE_NAME = "pellint.properties";
+ private static final IRI MERGED_ONTOLOGY_URI = IRI.create( "tag:clarkparsia.com,2008:pellint:merged" );
+ private static final Logger LOGGER = Logger.getLogger( Pellint.class
+ .getName() );
+
+ private boolean m_DoRDF = true;
+ private boolean m_DoOWL = true;
+ private boolean m_DoRootOnly = false;
+ private String m_InputOntologyPath;
+ private String m_OutputOntologyPath;
+
+ public Pellint() {
+ }
+
+
+ public void parseArgs(String[] args) {
+ super.parseArgs( args );
+
+ String only = options.getOption( "only" ).getValueAsString();
+ if( only == null ) {
+ setDoRDF( true );
+ setDoOWL( true );
+ }
+ else {
+ if( only.equalsIgnoreCase( "RDF" ) ) {
+ setDoRDF( true );
+ setDoOWL( false );
+ }
+ else if( only.equalsIgnoreCase( "OWL" ) ) {
+ setDoRDF( false );
+ setDoOWL( true );
+ }
+ else
+ throw new PelletCmdException( "Invalid argument to lint --only: " + only );
+ }
+
+ setDoRootOnly( options.getOption( "root-only" ).getValueAsBoolean() );
+
+ if( getInputFiles().length > 1 )
+ throw new PelletCmdException( "lint doesn't handle multiple input files" );
+
+ setInputOntologyPath( getInputFiles()[0] );
+
+ setOutputOntologyPath( options.getOption( "fix" ).getValueAsString() );
+ }
+
+ public String getAppId() {
+ return "Pellint: Lint tool for OWL ontologies";
+ }
+
+ public String getAppCmd() {
+ String s1 = "pellet lint [options] ...\n";
+ String s2 = "Note: pellet lint without arguments prints the lint report to STDOUT.";
+ String lb = System.getProperty("line.separator");
+ String s = s1 + lb + lb + s2;
+ return s;
+ }
+
+ void f(Map arg) {
+
+ }
+ public PelletCmdOptions getOptions() {
+ PelletCmdOptions options = getGlobalOptions();
+
+ PelletCmdOption option = new PelletCmdOption( "fix" );
+ option.setShortOption( "f" );
+ option.setIsMandatory( false );
+ option.setType( "File" );
+ option.setDescription( "Apply any applicable fixes to ontology lints and save a new ontology to file in RDF/XML format." );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+ option = new PelletCmdOption( "root-only" );
+ option.setIsMandatory( false );
+ option.setDefaultValue( false );
+ option.setType( "boolean" );
+ option.setDescription( "Lint the root ontology only; ignore its imports." );
+ option.setArg( NONE );
+ options.add( option );
+
+ option = new PelletCmdOption( "only" );
+ option.setShortOption( "o" );
+ option.setIsMandatory( false );
+ option.setType( "RDF | OWL" );
+ option.setDescription( "Analyze only RDF declarations or OWL axioms, not both." );
+ option.setArg( REQUIRED );
+ options.add( option );
+
+
+
+ option = new PelletCmdOption( "exclude-valid-punning" );
+ option.setIsMandatory( false );
+ option.setDefaultValue( false );
+ option.setDescription("Excludes valid punnings to be reported by lint. OWL 2 allows resources\n" +
+ "to have certain multiple types (known as punning), e.g. a resource can\n" +
+ "be both a class and an individual. However, certain punnings are not\n" +
+ "allowed under any condition, e.g. a resource cannot be both a datatype\n" +
+ "property and an object property. All punnings are reported by default\n" +
+ "but if this option is used punnings valid for OWL 2 will be excluded\n" +
+ "from the report.");
+ option.setArg( NONE );
+ options.add( option );
+
+ return options;
+ }
+
+ public void setDoRDF(boolean v) {
+ m_DoRDF = v;
+ }
+
+ public void setDoOWL(boolean v) {
+ m_DoOWL = v;
+ }
+
+ public void setDoRootOnly(boolean v) {
+ m_DoRootOnly = v;
+ }
+
+ public void setInputOntologyPath(String v) {
+ m_InputOntologyPath = v;
+ }
+
+ public void setOutputOntologyPath(String v) {
+ m_OutputOntologyPath = v;
+ }
+
+ public static OntologyLints lint(List axiomLintPatterns,
+ List ontologyLintPatterns, OWLOntology ontology) {
+ OntologyLints ontologyLints = new OntologyLints( ontology );
+ for( OWLAxiom axiom : ontology.getAxioms() ) {
+ for( AxiomLintPattern pattern : axiomLintPatterns ) {
+ Lint lint = pattern.match( ontology, axiom );
+ if( lint != null ) {
+ ontologyLints.addLint( pattern, lint );
+ }
+ }
+ }
+
+ for( OntologyLintPattern pattern : ontologyLintPatterns ) {
+ List lints = pattern.match( ontology );
+ if( !lints.isEmpty() ) {
+ ontologyLints.addLints( pattern, lints );
+ }
+ }
+
+ ontologyLints.sort( new Comparator() {
+ public int compare(Lint lint0, Lint lint1) {
+ Severity severity0 = lint0.getSeverity();
+ Severity severity1 = lint1.getSeverity();
+ if( severity0 != null && severity1 != null ) {
+ return -severity0.compareTo( severity1 );
+ }
+
+ Set classes0 = lint0.getParticipatingClasses();
+ Set classes1 = lint1.getParticipatingClasses();
+ if( classes0 == null || classes1 == null )
+ return 0;
+ if( classes0.size() != 1 || classes1.size() != 1 )
+ return 0;
+
+ IRI uri0 = classes0.iterator().next().getIRI();
+ IRI uri1 = classes1.iterator().next().getIRI();
+ if( uri0 == null || uri1 == null )
+ return 0;
+
+ String fragment0 = uri0.getFragment();
+ String fragment1 = uri1.getFragment();
+ if( fragment0 == null || fragment1 == null )
+ return 0;
+
+ return fragment0.compareTo( fragment1 );
+ }
+ } );
+
+ return ontologyLints;
+ }
+
+ public void run() {
+ try {
+ if( m_InputOntologyPath == null )
+ throw new IllegalPellintArgumentException( "Input ontology is not specified" );
+
+ if( m_DoRDF )
+ runLintForRDFXML();
+
+ if( m_DoOWL )
+ runLintForOWL();
+ } catch( IllegalPellintArgumentException e ) {
+ e.printStackTrace();
+ } catch( MalformedURLException e ) {
+ e.printStackTrace();
+ } catch( IOException e ) {
+ e.printStackTrace();
+ } catch( OWLOntologyCreationException e ) {
+ e.printStackTrace();
+ } catch( OWLOntologyStorageException e ) {
+ e.printStackTrace();
+ } catch( OWLOntologyChangeException e ) {
+ e.printStackTrace();
+ }
+ }
+
+ private void runLintForRDFXML() throws MalformedURLException, IOException {
+ RDFModelReader reader = new RDFModelReader();
+ RDFModel rootModel = null;
+ try {
+ rootModel = reader.read( m_InputOntologyPath, !m_DoRootOnly );
+ } catch( Exception e ) {
+ throw new PelletCmdException( e );
+ }
+ OWLSyntaxChecker checker = new OWLSyntaxChecker();
+
+ checker.setExcludeValidPunnings(options.getOption("exclude-valid-punning").getValueAsBoolean());
+
+ RDFLints lints = checker.validate( rootModel );
+
+ output( lints.toString() );
+
+ if( m_OutputOntologyPath != null && !m_DoOWL ) {
+ List missingStmts = lints.getMissingStatements();
+
+ rootModel.addAllStatementsWithExistingBNodesOnly( missingStmts );
+
+ RDFModelWriter writer = new RDFModelWriter();
+ writer.writePretty( new FileOutputStream( new File( m_OutputOntologyPath ) ),
+ rootModel );
+ output("Saved to " + m_OutputOntologyPath );
+ }
+ }
+
+ private void runLintForOWL() throws OWLOntologyCreationException, OWLOntologyChangeException,
+ UnknownOWLOntologyException, OWLOntologyStorageException, FileNotFoundException {
+ LintPatternLoader patternLoader = new LintPatternLoader( loadProperties() );
+ List axiomLintPatterns = patternLoader.getAxiomLintPatterns();
+ List ontologyLintPatterns = patternLoader.getOntologyLintPatterns();
+ logLoadedPatterns( axiomLintPatterns, ontologyLintPatterns );
+
+ OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
+
+ OWLOntology rootOntology = null;
+ try {
+ String inputOntologyURI = FileUtils.toURI( m_InputOntologyPath );
+ rootOntology = manager.loadOntology( IRI.create( inputOntologyURI ) );
+ } catch( Exception e ) {
+ throw new PelletCmdException( e );
+ }
+
+ output( getOWL2DLProfileViolations( rootOntology ) );
+
+ OntologyLints rootOntologyLints = lint( axiomLintPatterns, ontologyLintPatterns,
+ rootOntology );
+ output( rootOntologyLints.toString() );
+
+ if( !m_DoRootOnly ) {
+ Set importClosures = CollectionUtil.copy( manager
+ .getImportsClosure( rootOntology ) );
+ importClosures.remove( rootOntology );
+
+ if( importClosures.isEmpty() ) {
+ output( "\n" + rootOntology.getOntologyID() + " does not import other ontologies." );
+ }
+ else {
+ for( OWLOntology importedOntology : importClosures ) {
+ output( getOWL2DLProfileViolations( importedOntology ) );
+
+ OntologyLints importedOntologyLints = lint( axiomLintPatterns,
+ ontologyLintPatterns, importedOntology );
+ output( importedOntologyLints.toString() );
+ }
+
+ OWLOntology mergedImportClosure = buildMergedImportClosure( manager, rootOntology );
+ OntologyLints mergedOntologyLints = lint( axiomLintPatterns, ontologyLintPatterns,
+ mergedImportClosure );
+ mergedOntologyLints.setRootOntology( rootOntology );
+ output( mergedOntologyLints.toString() );
+ }
+ }
+
+
+ if( m_OutputOntologyPath != null ) {
+ Set unreparableLints = rootOntologyLints.applyFix( manager );
+ if( !unreparableLints.isEmpty() ) {
+ output("Unreparable lints:");
+ for( Lint lint : unreparableLints ) {
+ output( lint.toString() );
+ }
+ }
+ manager.saveOntology( rootOntologyLints.getOntology(), new StreamDocumentTarget( new FileOutputStream( m_OutputOntologyPath ) ) );
+ output( "Saved to " + m_OutputOntologyPath );
+
+ }
+ }
+
+ private String getOWL2DLProfileViolations( OWLOntology ontology ) {
+ OWL2DLProfile owl2Profile = new OWL2DLProfile();
+ OWLProfileReport profileReport = owl2Profile.checkOntology( ontology );
+
+ if( profileReport.isInProfile() ) {
+ return "No OWL 2 DL violations found for ontology " + ontology.getOntologyID().toString();
+ }
+
+ StringBuffer result = new StringBuffer();
+ result.append( "\n=========================================================\n" );
+ result.append("OWL 2 DL violations found for ontology ").append(ontology.getOntologyID().toString()).append(":\n");
+
+ for ( OWLProfileViolation violation : profileReport.getViolations() ) {
+ result.append( violation.toString() );
+ result.append( "\n" );
+ }
+
+ return result.toString();
+ }
+
+ private void logLoadedPatterns(List axiomLintPatterns,
+ List ontologyLintPatterns) {
+ if( !LOGGER.isLoggable( Level.FINE ) )
+ return;
+
+ List allPatterns = CollectionUtil. copy( axiomLintPatterns );
+ allPatterns.addAll( ontologyLintPatterns );
+ Collections.sort( allPatterns, new Comparator() {
+ public int compare(LintPattern p0, LintPattern p1) {
+ return p0.getName().compareTo( p1.getName() );
+ }
+ } );
+
+ LOGGER.fine( "Loaded lint patterns:" );
+ for( LintPattern pattern : allPatterns ) {
+ StringBuilder builder = new StringBuilder();
+ builder.append( " " );
+ if( pattern.isFixable() ) {
+ builder.append( "[fixable] " );
+ }
+ else {
+ builder.append( " " );
+ }
+ builder.append( pattern.getName() );
+ LOGGER.fine( builder.toString() );
+ }
+ }
+
+ private static OWLOntology buildMergedImportClosure(OWLOntologyManager manager,
+ OWLOntology rootOntology) throws OWLOntologyCreationException,
+ OWLOntologyChangeException {
+
+ OWLOntologyImportsClosureSetProvider importClosureSetProvider = new OWLOntologyImportsClosureSetProvider(
+ manager, rootOntology );
+ OWLOntologyMerger merger = new OWLOntologyMerger( importClosureSetProvider );
+ return merger.createMergedOntology( manager, MERGED_ONTOLOGY_URI );
+ }
+
+ private static Properties loadProperties() {
+ Properties properties = new Properties();
+
+ String configFile = System.getProperty( CONFIGURATION_PROPERTY_NAME );
+ URL configURL = null;
+
+ // if the user has not specified the pellint.configuration property,
+ // we search for the file "pellint.properties"
+ if( configFile == null ) {
+ configURL = Pellint.class.getClassLoader()
+ .getResource( DEFAULT_CONFIGURATION_FILE_NAME );
+
+ if( configURL == null ) {
+ LOGGER.severe( "Cannot find Pellint configuration file "
+ + DEFAULT_CONFIGURATION_FILE_NAME );
+ }
+ }
+ else {
+ try {
+ configURL = new URL( configFile );
+ } catch( MalformedURLException e ) {
+ e.printStackTrace();
+
+ // so, resource is not a URL - attempt to get the resource from
+ // the class path
+ configURL = Pellint.class.getClassLoader().getResource( configFile );
+ }
+
+ if( configURL == null ) {
+ LOGGER.severe( "Cannot find Pellint configuration file " + configFile );
+ }
+ }
+
+ if( configURL != null ) {
+ try {
+ properties.load( configURL.openStream() );
+ } catch( FileNotFoundException e ) {
+ LOGGER.severe( "Pellint configuration file cannot be found" );
+ } catch( IOException e ) {
+ LOGGER.severe( "I/O error while reading Pellet configuration file" );
+ }
+ }
+
+ return properties;
+ }
+}
diff --git a/core/lib/aterm-java-1.6.jar b/core/lib/aterm-java-1.6.jar
new file mode 100644
index 000000000..c56edd997
Binary files /dev/null and b/core/lib/aterm-java-1.6.jar differ
diff --git a/core/src/com/clarkparsia/pellet/BranchEffectTracker.java b/core/src/com/clarkparsia/pellet/BranchEffectTracker.java
new file mode 100644
index 000000000..68db36164
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/BranchEffectTracker.java
@@ -0,0 +1,77 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet;
+
+import java.util.Set;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title: Branch effect tracker
+ *
+ *
+ * Description: Tracks the nodes changed by a branch
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Mike Smith
+ */
+public interface BranchEffectTracker {
+
+ /**
+ * Record that a node is affected by a branch
+ *
+ * @param branch
+ * Branch integer identifier
+ * @param a
+ * Node name
+ * @return boolean {@code true} if effect not already noted for branch+node
+ * pair, {@code false} else
+ */
+ public boolean add(int branch, ATermAppl a);
+
+ /**
+ * Copy branch tracker
+ */
+ public BranchEffectTracker copy();
+
+ /**
+ * Retrieve nodes affected by a branch and all subsequent branches
+ *
+ * @param branch
+ * Branch integer identifier
+ * @return Names of all nodes affected by branch and subsequent branches
+ */
+ public Set getAll(int branch);
+
+ /**
+ * Remove a branch from the tracker. Note that this causes the branch to
+ * effects association to change for all subsequent branches and should only
+ * be used if the branch indices are changed in ABox and all other
+ * structures.
+ *
+ * @param branch
+ * Branch integer identifier
+ * @return Names of all nodes affected by branch
+ */
+ public Set remove(int branch);
+
+ /**
+ * Remove a branch and all subsequent branches from the tracker.
+ *
+ * @param branch
+ * Branch integer identifier
+ * @return Names of all nodes affected by branch and subsequent branches
+ */
+ public Set removeAll(int branch);
+}
diff --git a/core/src/com/clarkparsia/pellet/IncrementalChangeTracker.java b/core/src/com/clarkparsia/pellet/IncrementalChangeTracker.java
new file mode 100644
index 000000000..739b7a033
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/IncrementalChangeTracker.java
@@ -0,0 +1,167 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+
+import org.mindswap.pellet.ABox;
+import org.mindswap.pellet.Edge;
+import org.mindswap.pellet.Individual;
+import org.mindswap.pellet.Node;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title: Incremental change tracker
+ *
+ *
+ * Description: Tracks the changes for incremental ABox reasoning services
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Mike Smith
+ */
+public interface IncrementalChangeTracker {
+
+ /**
+ * Record that a new edge has been deleted from the ABox
+ *
+ * @param e
+ * the Edge
+ * @return boolean {@code true} if delete is not already noted for edge,
+ * {@code false} else
+ */
+ public boolean addDeletedEdge(Edge e);
+
+ /**
+ * Record that a type was deleted from an individual
+ *
+ * @param n
+ * the Node
+ * @param type
+ * the type
+ * @return boolean {@code true} if delete is not already noted for node,
+ * type pair {@code false} else
+ */
+ public boolean addDeletedType(Node n, ATermAppl type);
+
+ /**
+ * Record that a new edge has been added to the ABox
+ *
+ * @param e
+ * the Edge
+ * @return boolean {@code true} if addition is not already noted for edge,
+ * {@code false} else
+ */
+ public boolean addNewEdge(Edge e);
+
+ /**
+ * Record that a new individual has been added to the ABox
+ *
+ * @param i
+ * the Individual
+ * @return boolean {@code true} if addition is not already noted for
+ * individual, {@code false} else
+ */
+ public boolean addNewIndividual(Individual i);
+
+ /**
+ * Record that a node has been "unpruned" because a merge was reverted
+ * during restore
+ *
+ * @param n
+ * the Node
+ * @return boolean {@code true} if unpruning is not already noted for node,
+ * {@code false} else
+ */
+ public boolean addUnprunedNode(Node n);
+
+ /**
+ * Record that an individual has been updated
+ *
+ * @param i
+ * the Individual
+ * @return boolean {@code true} if addition is not already noted for
+ * individual, {@code false} else
+ */
+ public boolean addUpdatedIndividual(Individual i);
+
+ /**
+ * Clear all recorded changes
+ */
+ public void clear();
+
+ /**
+ * Copy change tracker for use with a new ABox (presumably as part of
+ * {@code ABox.copy()})
+ *
+ * @param target
+ * The ABox for the copy
+ * @return a copy, with individuals in the target ABox
+ */
+ public IncrementalChangeTracker copy(ABox target);
+
+ /**
+ * Iterate over all edges deleted (see {@link #addDeletedEdge(Edge)}) since
+ * the previous {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator deletedEdges();
+
+ /**
+ * Iterate over all nodes with deleted types (and those types) (see
+ * {@link #addDeletedType(Node, ATermAppl)}) since the previous
+ * {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator>> deletedTypes();
+
+ /**
+ * Iterate over all edges added (see {@link #addNewEdge(Edge)}) since the
+ * previous {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator newEdges();
+
+ /**
+ * Iterate over all individuals added (see
+ * {@link #addNewIndividual(Individual)}) since the previous
+ * {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator newIndividuals();
+
+ /**
+ * Iterate over all nodes unpruned (see
+ * {@link #addUnprunedIndividual(Individual)}) since the previous
+ * {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator unprunedNodes();
+
+ /**
+ * Iterate over all individuals updated (see
+ * {@link #addUpdatedIndividual(Individual)}) since the previous
+ * {@link #clear()}
+ *
+ * @return Iterator
+ */
+ public Iterator updatedIndividuals();
+}
diff --git a/core/src/com/clarkparsia/pellet/expressivity/DLExpressivityChecker.java b/core/src/com/clarkparsia/pellet/expressivity/DLExpressivityChecker.java
new file mode 100644
index 000000000..02a797be8
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/expressivity/DLExpressivityChecker.java
@@ -0,0 +1,291 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.expressivity;
+
+import java.util.Iterator;
+import java.util.Set;
+
+import org.mindswap.pellet.Individual;
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.Role;
+import org.mindswap.pellet.exceptions.InternalReasonerException;
+import org.mindswap.pellet.output.ATermBaseVisitor;
+import org.mindswap.pellet.tbox.TBox;
+import org.mindswap.pellet.tbox.impl.Unfolding;
+import org.mindswap.pellet.utils.ATermUtils;
+import org.mindswap.pellet.utils.SetUtils;
+
+import aterm.ATermAppl;
+import aterm.ATermInt;
+import aterm.ATermList;
+
+/**
+ *
+ * Title:
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Harris Lin
+ * @author Evren Sirin
+ */
+public class DLExpressivityChecker extends ProfileBasedExpressivityChecker {
+ private static Set TOP_SET = SetUtils.singleton( ATermUtils.TOP );
+
+ private Visitor m_Visitor;
+
+ private Expressivity m_Expressivity;
+
+ public DLExpressivityChecker(KnowledgeBase kb) {
+ super(kb);
+ m_Visitor = new Visitor();
+ }
+
+ public boolean compute(Expressivity expressivity) {
+ m_Expressivity = expressivity;
+
+ processIndividuals();
+ processClasses();
+ processRoles();
+
+ return true;
+ }
+
+ public boolean updateWith(Expressivity expressivity, ATermAppl term) {
+ m_Expressivity = expressivity;
+ m_Visitor.visit(term);
+ return true;
+ }
+
+ private void processIndividuals() {
+ if (!m_KB.getABox().isEmpty()) {
+ m_Expressivity.setHasIndividual(true);
+ }
+
+ Iterator i = m_KB.getABox().getIndIterator();
+ while( i.hasNext() ) {
+ Individual ind = i.next();
+ ATermAppl nominal = ATermUtils.makeValue( ind.getName() );
+ Iterator j = ind.getTypes().iterator();
+ while( j.hasNext() ) {
+ ATermAppl term = j.next();
+
+ if( term.equals( nominal ) )
+ continue;
+ m_Visitor.visit( term );
+ }
+ }
+ }
+
+ private void processClasses() {
+ TBox tbox = m_KB.getTBox();
+
+ for( ATermAppl c : m_KB.getAllClasses() ) {
+ Iterator unfoldC = tbox.unfold( c );
+ while( unfoldC.hasNext() ) {
+ Unfolding unf = unfoldC.next();
+ m_Visitor.visit( unf.getResult() );
+ }
+ }
+ }
+
+ private void processRoles() {
+ for( Role r : m_KB.getRBox().getRoles() ) {
+ if( r.isBuiltin() )
+ continue;
+
+ if( r.isDatatypeRole() ) {
+ m_Expressivity.setHasDatatype(true);
+ if( r.isInverseFunctional() )
+ m_Expressivity.setHasKeys(true);
+ }
+
+ if( r.isAnon() ) {
+ for( Role subRole : r.getSubRoles() ) {
+ if( !subRole.isAnon() && !subRole.isBottom() )
+ m_Expressivity.setHasInverse(true);
+ }
+ }
+
+ // InverseFunctionalProperty declaration may mean that a named
+ // property has an anonymous inverse property which is functional
+ // The following condition checks this case
+ if( r.isAnon() && r.isFunctional() )
+ m_Expressivity.setHasInverse(true);
+ if( r.isFunctional() ) {
+ if( r.isDatatypeRole() )
+ m_Expressivity.setHasFunctionalityD(true);
+ else if( r.isObjectRole() )
+ m_Expressivity.setHasFunctionality(true);
+ }
+ if( r.isTransitive() )
+ m_Expressivity.setHasTransitivity(true);
+ if( r.isReflexive() )
+ m_Expressivity.setHasReflexivity(true);
+ if( r.isIrreflexive() )
+ m_Expressivity.setHasIrreflexivity(true);
+ if( r.isAsymmetric() )
+ m_Expressivity.setHasAsymmetry(true);
+ if( !r.getDisjointRoles().isEmpty() )
+ m_Expressivity.setHasDisjointRoles(true);
+ if( r.hasComplexSubRole() )
+ m_Expressivity.setHasComplexSubRoles(true);
+
+ // Each property has itself included in the subroles set. We need
+ // at least two properties in the set to conclude there is a role
+ // hierarchy defined in the ontology
+ if( r.getSubRoles().size() > 1 )
+ m_Expressivity.setHasRoleHierarchy(true);
+
+ Set domains = r.getDomains();
+ if( !domains.isEmpty() && !domains.equals( TOP_SET ) ) {
+ m_Expressivity.setHasDomain(true);
+ for( ATermAppl domain : domains ) {
+ m_Visitor.visit( domain );
+ }
+ }
+
+ Set ranges = r.getRanges();
+ if( !ranges.isEmpty() && !ranges.equals( TOP_SET ) ) {
+ m_Expressivity.setHasRange(true);
+ for( ATermAppl range : ranges ) {
+ m_Visitor.visit( range );
+ }
+ }
+ }
+ }
+
+
+ class Visitor extends ATermBaseVisitor {
+ public void visitTerm(ATermAppl term) {
+ }
+
+ void visitRole(ATermAppl p) {
+ if( !ATermUtils.isPrimitive( p ) ) {
+ m_Expressivity.setHasInverse(true);
+ m_Expressivity.addAnonInverse((ATermAppl) p.getArgument( 0 ));
+ }
+ }
+
+ public void visitAnd(ATermAppl term) {
+ visitList( (ATermList) term.getArgument( 0 ) );
+ }
+
+ public void visitOr(ATermAppl term) {
+ m_Expressivity.setHasNegation(true);
+ visitList( (ATermList) term.getArgument( 0 ) );
+ }
+
+ public void visitNot(ATermAppl term) {
+ m_Expressivity.setHasNegation(true);
+ visit( (ATermAppl) term.getArgument( 0 ) );
+ }
+
+ public void visitSome(ATermAppl term) {
+ visitRole( (ATermAppl) term.getArgument( 0 ) );
+ visit( (ATermAppl) term.getArgument( 1 ) );
+ }
+
+ public void visitAll(ATermAppl term) {
+ m_Expressivity.setHasAllValues(true);
+ visitRole( (ATermAppl) term.getArgument( 0 ) );
+ visit( (ATermAppl) term.getArgument( 1 ) );
+ }
+
+ public void visitCard(ATermAppl term) {
+ visitMin( term );
+ visitMax( term );
+ }
+
+ public void visitMin(ATermAppl term) {
+ visitRole( (ATermAppl) term.getArgument( 0 ) );
+ Role role = m_KB.getRole( term.getArgument( 0 ) );
+ ATermAppl c = (ATermAppl) term.getArgument( 2 );
+ if( !ATermUtils.isTop( c ) ) {
+ if( role.isDatatypeRole() )
+ m_Expressivity.setHasCardinalityD(true);
+ else
+ m_Expressivity.setHasCardinalityQ(true);
+ }
+ else {
+ if( role.isDatatypeRole() )
+ m_Expressivity.setHasCardinalityD(true);
+ else
+ m_Expressivity.setHasCardinality(true);
+ }
+ }
+
+ public void visitMax(ATermAppl term) {
+ visitRole( (ATermAppl) term.getArgument( 0 ) );
+ Role role = m_KB.getRole( term.getArgument( 0 ) );
+ int cardinality = ((ATermInt) term.getArgument( 1 )).getInt();
+ ATermAppl c = (ATermAppl) term.getArgument( 2 );
+ if( !ATermUtils.isTop( c ) ) {
+ if( role.isDatatypeRole() )
+ m_Expressivity.setHasCardinalityD(true);
+ else
+ m_Expressivity.setHasCardinalityQ(true);
+ }
+ else if( cardinality > 1 ) {
+ if( role.isDatatypeRole() )
+ m_Expressivity.setHasCardinalityD(true);
+ else
+ m_Expressivity.setHasCardinality(true);
+ }
+ }
+
+ public void visitHasValue(ATermAppl term) {
+ visitRole( (ATermAppl) term.getArgument( 0 ) );
+ visitValue( (ATermAppl) term.getArgument( 1 ) );
+ }
+
+ public void visitValue(ATermAppl term) {
+ ATermAppl nom = (ATermAppl) term.getArgument( 0 );
+ if( !ATermUtils.isLiteral( nom ) )
+ m_Expressivity.addNominal(nom);
+ else
+ m_Expressivity.setHasUserDefinedDatatype( true );
+ }
+
+ public void visitOneOf(ATermAppl term) {
+ m_Expressivity.setHasNegation(true);
+ visitList( (ATermList) term.getArgument( 0 ) );
+ }
+
+ public void visitLiteral(ATermAppl term) {
+ // nothing to do here
+ }
+
+ public void visitSelf(ATermAppl term) {
+ m_Expressivity.setHasReflexivity(true);
+ m_Expressivity.setHasIrreflexivity(true);
+ }
+
+ public void visitSubClass(ATermAppl term) {
+ throw new InternalReasonerException( "This function should never be called: " + term );
+ }
+
+ public void visitInverse(ATermAppl p) {
+ m_Expressivity.setHasInverse(true);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void visitRestrictedDatatype(ATermAppl dt) {
+ m_Expressivity.setHasDatatype( true );
+ m_Expressivity.setHasUserDefinedDatatype( true );
+ }
+ }
+}
diff --git a/core/src/com/clarkparsia/pellet/expressivity/Expressivity.java b/core/src/com/clarkparsia/pellet/expressivity/Expressivity.java
new file mode 100644
index 000000000..88b91e2f0
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/expressivity/Expressivity.java
@@ -0,0 +1,456 @@
+// Portions Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// Clark & Parsia, LLC parts of this source code are available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+//
+// ---
+// Portions Copyright (c) 2003 Ron Alford, Mike Grove, Bijan Parsia, Evren Sirin
+// Alford, Grove, Parsia, Sirin parts of this source code are available under the terms of the MIT License.
+//
+// The MIT License
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to
+// deal in the Software without restriction, including without limitation the
+// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
+// sell copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+// IN THE SOFTWARE.
+
+package com.clarkparsia.pellet.expressivity;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title:
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Evren Sirin, Harris Lin
+ */
+public class Expressivity {
+ /**
+ * not (owl:complementOf) is used directly or indirectly
+ */
+ private boolean hasNegation = false;
+ private boolean hasAllValues = false;
+ private boolean hasDisjointClasses = false;
+
+ /**
+ * An inverse property has been defined or a property has been defined as
+ * InverseFunctional
+ */
+ private boolean hasInverse = false;
+ private boolean hasFunctionality = false;
+ private boolean hasCardinality = false;
+ private boolean hasCardinalityQ = false;
+ private boolean hasFunctionalityD = false;
+ private boolean hasCardinalityD = false;
+ private boolean hasTransitivity = false;
+ private boolean hasRoleHierarchy = false;
+ private boolean hasReflexivity = false;
+ private boolean hasIrreflexivity = false;
+ private boolean hasDisjointRoles = false;
+ private boolean hasAsymmetry = false;
+ private boolean hasComplexSubRoles = false;
+ private boolean hasDatatype = false;
+ private boolean hasUserDefinedDatatype = false;
+
+ private boolean hasKeys = false;
+
+ private boolean hasDomain = false;
+ private boolean hasRange = false;
+
+ private boolean hasIndividual = false;
+ /**
+ * The set of individuals in the ABox that have been used as nominals, i.e.
+ * in an owl:oneOf enumeration or target of owl:hasValue restriction
+ */
+ private Set nominals = new HashSet();
+
+
+ private Set anonInverses = new HashSet();
+
+ public Expressivity() {
+ }
+
+ public Expressivity(Expressivity other) {
+ hasNegation = other.hasNegation;
+ hasAllValues = other.hasAllValues;
+ hasDisjointClasses = other.hasDisjointClasses;
+ hasInverse = other.hasInverse;
+ hasFunctionality = other.hasFunctionality;
+ hasCardinality = other.hasCardinality;
+ hasCardinalityQ = other.hasCardinalityQ;
+ hasFunctionalityD = other.hasFunctionalityD;
+ hasCardinalityD = other.hasCardinalityD;
+ hasTransitivity = other.hasTransitivity;
+ hasRoleHierarchy = other.hasRoleHierarchy;
+ hasReflexivity = other.hasReflexivity;
+ hasIrreflexivity = other.hasIrreflexivity;
+ hasDisjointRoles = other.hasDisjointRoles;
+ hasAsymmetry = other.hasAsymmetry;
+ hasComplexSubRoles = other.hasComplexSubRoles;
+ hasDatatype = other.hasDatatype;
+ hasKeys = other.hasKeys;
+ hasDomain = other.hasDomain;
+ hasRange = other.hasRange;
+ hasIndividual = other.hasIndividual;
+ nominals = new HashSet(other.nominals);
+ anonInverses = new HashSet(other.anonInverses);
+ }
+
+ public boolean isEL() {
+ return !hasNegation
+ && !hasAllValues
+ && !hasInverse
+ && !hasFunctionality
+ && !hasCardinality
+ && !hasCardinalityQ
+ && !hasFunctionalityD
+ && !hasCardinalityD
+ && !hasIrreflexivity
+ && !hasDisjointRoles
+ && !hasAsymmetry
+ && !hasDatatype
+ && !hasKeys
+ && !hasIndividual
+ && nominals.isEmpty();
+ }
+
+ public String toString() {
+ String dl = "";
+
+ if( isEL() ) {
+ dl = "EL";
+
+ if( hasComplexSubRoles
+ || hasReflexivity
+ || hasDomain
+ || hasRange
+ || hasDisjointClasses ) {
+ dl += "+";
+ } else if (hasRoleHierarchy) {
+ dl += "H";
+ }
+ } else {
+ dl = "AL";
+
+ if( hasNegation )
+ dl = "ALC";
+
+ if( hasTransitivity )
+ dl += "R+";
+
+ if( dl.equals( "ALCR+" ) )
+ dl = "S";
+
+ if( hasComplexSubRoles )
+ dl = "SR";
+ else if( hasRoleHierarchy )
+ dl += "H";
+
+ if( hasNominal() )
+ dl += "O";
+
+ if( hasInverse )
+ dl += "I";
+
+ if( hasCardinalityQ )
+ dl += "Q";
+ else if( hasCardinality )
+ dl += "N";
+ else if( hasFunctionality )
+ dl += "F";
+
+ if( hasDatatype ) {
+ if( hasKeys )
+ dl += "(Dk)";
+ else
+ dl += "(D)";
+ }
+ }
+
+ return dl;
+ }
+
+
+ /**
+ * @return Returns the hasNegation.
+ */
+ public boolean hasNegation() {
+ return hasNegation;
+ }
+
+ public void setHasNegation(boolean v) {
+ hasNegation = v;
+ }
+
+ /**
+ * @return Returns the hasAllValues.
+ */
+ public boolean hasAllValues() {
+ return hasAllValues;
+ }
+
+ public void setHasAllValues(boolean v) {
+ hasAllValues = v;
+ }
+
+ /**
+ * @return Returns the hasDisjointClasses.
+ */
+ public boolean hasDisjointClasses() {
+ return hasDisjointClasses;
+ }
+
+ public void setHasDisjointClasses(boolean v) {
+ hasDisjointClasses = v;
+ }
+
+ /**
+ * @return Returns the hasInverse.
+ */
+ public boolean hasInverse() {
+ return hasInverse;
+ }
+
+ public void setHasInverse(boolean v) {
+ hasInverse = v;
+ }
+
+ /**
+ * @return Returns the hasFunctionality.
+ */
+ public boolean hasFunctionality() {
+ return hasFunctionality;
+ }
+
+ public void setHasFunctionality(boolean v) {
+ hasFunctionality = v;
+ }
+
+ /**
+ * @return Returns the hasCardinality.
+ */
+ public boolean hasCardinality() {
+ return hasCardinality;
+ }
+
+ public void setHasCardinality(boolean v) {
+ hasCardinality = v;
+ }
+
+ /**
+ * @return Returns the hasCardinality.
+ */
+ public boolean hasCardinalityQ() {
+ return hasCardinalityQ;
+ }
+
+ public void setHasCardinalityQ(boolean v) {
+ hasCardinalityQ = v;
+ }
+
+ /**
+ * Returns true if a cardinality restriction (less than or equal to 1) is
+ * defined on any datatype property
+ */
+ public boolean hasFunctionalityD() {
+ return hasFunctionalityD;
+ }
+
+ public void setHasFunctionalityD(boolean v) {
+ hasFunctionalityD = v;
+ }
+
+ /**
+ * Returns true if a cardinality restriction (greater than 1) is defined on
+ * any datatype property
+ */
+ public boolean hasCardinalityD() {
+ return hasCardinalityD;
+ }
+
+ public void setHasCardinalityD(boolean v) {
+ hasCardinalityD = v;
+ }
+
+ /**
+ * @return Returns the hasTransitivity.
+ */
+ public boolean hasTransitivity() {
+ return hasTransitivity;
+ }
+
+ public void setHasTransitivity(boolean v) {
+ hasTransitivity = v;
+ }
+
+ /**
+ * @return Returns the hasRoleHierarchy.
+ */
+ public boolean hasRoleHierarchy() {
+ return hasRoleHierarchy;
+ }
+
+ public void setHasRoleHierarchy(boolean v) {
+ hasRoleHierarchy = v;
+ }
+
+ public boolean hasReflexivity() {
+ return hasReflexivity;
+ }
+
+ public void setHasReflexivity(boolean v) {
+ hasReflexivity = v;
+ }
+
+ public boolean hasIrreflexivity() {
+ return hasIrreflexivity;
+ }
+
+ public void setHasIrreflexivity(boolean v) {
+ hasIrreflexivity = v;
+ }
+
+ public boolean hasDisjointRoles() {
+ return hasDisjointRoles;
+ }
+
+ public void setHasDisjointRoles(boolean v) {
+ hasDisjointRoles = v;
+ }
+
+ /**
+ * @deprecated Use {@link #hasAsymmmetry()}
+ */
+ public boolean hasAntiSymmmetry() {
+ return hasAsymmetry;
+ }
+
+ public boolean hasAsymmmetry() {
+ return hasAsymmetry;
+ }
+
+ /**
+ * @deprecated Use {@link #setHasAsymmetry(boolean)}
+ */
+ public void setHasAntiSymmetry(boolean v) {
+ hasAsymmetry = v;
+ }
+
+ public void setHasAsymmetry(boolean v) {
+ hasAsymmetry = v;
+ }
+
+ public boolean hasComplexSubRoles() {
+ return hasComplexSubRoles;
+ }
+
+ public void setHasComplexSubRoles(boolean v) {
+ hasComplexSubRoles = v;
+ }
+
+ /**
+ * @return Returns the hasDatatype.
+ */
+ public boolean hasDatatype() {
+ return hasDatatype;
+ }
+
+ public void setHasDatatype(boolean v) {
+ hasDatatype = v;
+ }
+
+ public boolean hasUserDefinedDatatype() {
+ return hasUserDefinedDatatype;
+ }
+
+ public void setHasUserDefinedDatatype(boolean v) {
+ if ( v )
+ setHasDatatype( true );
+ hasUserDefinedDatatype = v;
+ }
+
+ public boolean hasKeys() {
+ return hasKeys;
+ }
+
+ public void setHasKeys(boolean v) {
+ hasKeys = v;
+ }
+
+ public boolean hasDomain() {
+ return hasDomain;
+ }
+
+ public void setHasDomain(boolean v) {
+ hasDomain = v;
+ }
+
+ public boolean hasRange() {
+ return hasRange;
+ }
+
+ public void setHasRange(boolean v) {
+ hasRange = v;
+ }
+
+ public boolean hasIndividual() {
+ return hasIndividual;
+ }
+
+ public void setHasIndividual(boolean v) {
+ hasIndividual = v;
+ }
+
+ public boolean hasNominal() {
+ return !nominals.isEmpty();
+ }
+
+ public Set getNominals() {
+ return nominals;
+ }
+
+ public void addNominal(ATermAppl n) {
+ nominals.add(n);
+ }
+
+ /**
+ * Returns every property p such that inv(p) is used in an axiom in
+ * the KB. The named inverses are not considered.
+ *
+ * @return the set of properties whose anonymous inverse is used
+ */
+ public Set getAnonInverses() {
+ return anonInverses;
+ }
+
+ public void addAnonInverse(ATermAppl p) {
+ anonInverses.add(p);
+ }
+}
\ No newline at end of file
diff --git a/core/src/com/clarkparsia/pellet/expressivity/ExpressivityChecker.java b/core/src/com/clarkparsia/pellet/expressivity/ExpressivityChecker.java
new file mode 100644
index 000000000..d12b721a3
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/expressivity/ExpressivityChecker.java
@@ -0,0 +1,84 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.expressivity;
+
+import org.mindswap.pellet.KnowledgeBase;
+import org.mindswap.pellet.utils.ATermUtils;
+
+import com.clarkparsia.pellet.el.ELExpressivityChecker;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title:
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Harris Lin
+ */
+public class ExpressivityChecker {
+ private KnowledgeBase m_KB;
+ private ELExpressivityChecker m_ELChecker;
+ private DLExpressivityChecker m_DLChecker;
+ private Expressivity m_Expressivity;
+
+ public ExpressivityChecker(KnowledgeBase kb) {
+ this( kb, new Expressivity() );
+ }
+
+ public ExpressivityChecker(KnowledgeBase kb, Expressivity expr) {
+ m_KB = kb;
+ m_ELChecker = new ELExpressivityChecker(m_KB);
+ m_DLChecker = new DLExpressivityChecker(m_KB);
+ m_Expressivity = expr;
+ }
+
+ public void prepare() {
+ m_Expressivity = new Expressivity();
+ if (m_ELChecker.compute(m_Expressivity)) return;
+
+ m_Expressivity = new Expressivity();
+ // force expressivity to be non-EL
+ m_Expressivity.setHasAllValues( true );
+ m_DLChecker.compute(m_Expressivity);
+ }
+
+ public Expressivity getExpressivity() {
+ return m_Expressivity;
+ }
+
+ public Expressivity getExpressivityWith(ATermAppl c) {
+ if (c == null) return m_Expressivity;
+
+ Expressivity newExp = new Expressivity(m_Expressivity);
+ m_DLChecker.updateWith(newExp, c);
+
+ return newExp;
+ }
+
+ /**
+ * Added for incremental reasoning. Given an aterm corresponding to an
+ * individual and concept, the expressivity is updated accordingly.
+ */
+ public void updateWithIndividual(ATermAppl i, ATermAppl concept) {
+ ATermAppl nominal = ATermUtils.makeValue(i);
+
+ if( concept.equals(nominal) )
+ return;
+
+ m_DLChecker.updateWith(m_Expressivity, concept);
+ }
+}
diff --git a/core/src/com/clarkparsia/pellet/expressivity/ProfileBasedExpressivityChecker.java b/core/src/com/clarkparsia/pellet/expressivity/ProfileBasedExpressivityChecker.java
new file mode 100644
index 000000000..3fb4da635
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/expressivity/ProfileBasedExpressivityChecker.java
@@ -0,0 +1,53 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.expressivity;
+
+import org.mindswap.pellet.KnowledgeBase;
+
+import aterm.ATermAppl;
+
+/**
+ *
+ * Title:
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Harris Lin
+ */
+public abstract class ProfileBasedExpressivityChecker {
+ protected KnowledgeBase m_KB;
+
+ public ProfileBasedExpressivityChecker(KnowledgeBase kb) {
+ m_KB = kb;
+ }
+
+ /**
+ * Compute the expressivity from the {@link org.mindswap.pellet.KnowledgeBase}
+ * and update it to the given {@link com.clarkparsia.pellet.expressivity.Expressivity}.
+ *
+ * @return true
if the expressivity is within the profile defined by the implementation,
+ * false
otherwise.
+ */
+ public abstract boolean compute(Expressivity expressivity);
+
+ /**
+ * Update the given {@link com.clarkparsia.pellet.expressivity.Expressivity} by adding
+ * the new @{link aterm.ATermAppl}.
+ *
+ * @return true
if the new expressivity is within the profile defined by the implementation,
+ * false
otherwise.
+ */
+ public abstract boolean updateWith(Expressivity expressivity, ATermAppl term);
+}
diff --git a/core/src/com/clarkparsia/pellet/impl/SimpleBranchEffectTracker.java b/core/src/com/clarkparsia/pellet/impl/SimpleBranchEffectTracker.java
new file mode 100644
index 000000000..2fc2ceb27
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/impl/SimpleBranchEffectTracker.java
@@ -0,0 +1,155 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.impl;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Set;
+
+import aterm.ATermAppl;
+
+import com.clarkparsia.pellet.BranchEffectTracker;
+
+/**
+ *
+ * Title: Simple Branch Effect Tracker
+ *
+ *
+ * Description: Basic ArrayList implementation of BranchEffectTracker
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Mike Smith
+ */
+public class SimpleBranchEffectTracker implements BranchEffectTracker {
+
+ private ArrayList> effects;
+
+ public SimpleBranchEffectTracker() {
+ effects = new ArrayList>();
+ }
+
+ private SimpleBranchEffectTracker(SimpleBranchEffectTracker other) {
+ final int n = other.effects.size();
+
+ this.effects = new ArrayList>( n );
+ for( int i = 0; i < n; i++ ) {
+ Set s = other.effects.get( i );
+ this.effects.add( (s == null)
+ ? null
+ : new HashSet( s ) );
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.BranchEffectTracker#add(int, aterm.ATermAppl)
+ */
+ public boolean add(int branch, ATermAppl a) {
+
+ if( branch <= 0 )
+ return false;
+
+ final int diff = branch - effects.size();
+ if( diff > 0 ) {
+ @SuppressWarnings("unchecked")
+ Set nulls[] = (Set[]) new Set[diff];
+ effects.addAll( Arrays.asList( nulls ) );
+ }
+
+ Set existing = effects.get( branch - 1 );
+ if( existing == null ) {
+ existing = new HashSet();
+ effects.set( branch - 1, existing );
+ }
+
+ return existing.add( a );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.BranchEffectTracker#copy()
+ */
+ public SimpleBranchEffectTracker copy() {
+ return new SimpleBranchEffectTracker( this );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.BranchEffectTracker#getAll(int)
+ */
+ public Set getAll(int branch) {
+
+ if( branch < 1 )
+ throw new IllegalArgumentException();
+
+ if( branch > effects.size() )
+ return Collections.emptySet();
+
+ Set ret = new HashSet();
+ for( int i = branch - 1; i < effects.size(); i++ ) {
+ Set s = effects.get( i );
+ if( s != null )
+ ret.addAll( s );
+ }
+
+ return ret;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.BranchEffectTracker#remove(int)
+ */
+ public Set remove(int branch) {
+
+ if( branch < 1 )
+ throw new IllegalArgumentException();
+
+ if( branch > effects.size() )
+ return Collections.emptySet();
+
+ Set ret = effects.remove( branch - 1 );
+ if( ret == null )
+ return Collections.emptySet();
+
+ return ret;
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.BranchEffectTracker#removeAll(int)
+ */
+ public Set removeAll(int branch) {
+
+ if( branch < 1 )
+ throw new IllegalArgumentException();
+
+ if( branch > effects.size() )
+ return Collections.emptySet();
+
+ Set ret = new HashSet();
+ for( int i = (effects.size() - 1); i >= (branch - 1); i-- ) {
+ Set s = effects.remove( i );
+ if( s != null )
+ ret.addAll( s );
+ }
+
+ return ret;
+ }
+}
\ No newline at end of file
diff --git a/core/src/com/clarkparsia/pellet/impl/SimpleIncrementalChangeTracker.java b/core/src/com/clarkparsia/pellet/impl/SimpleIncrementalChangeTracker.java
new file mode 100644
index 000000000..cf226e320
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/impl/SimpleIncrementalChangeTracker.java
@@ -0,0 +1,287 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.impl;
+
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Set;
+import java.util.Map.Entry;
+
+import org.mindswap.pellet.ABox;
+import org.mindswap.pellet.DefaultEdge;
+import org.mindswap.pellet.Edge;
+import org.mindswap.pellet.Individual;
+import org.mindswap.pellet.Node;
+
+import aterm.ATermAppl;
+
+import com.clarkparsia.pellet.IncrementalChangeTracker;
+
+/**
+ *
+ * Title: Simple incremental change tracker
+ *
+ *
+ * Description: Basic implementation of {@link IncrementalChangeTracker}
+ * interface
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Mike Smith
+ */
+public class SimpleIncrementalChangeTracker implements IncrementalChangeTracker {
+
+ private HashSet deletedEdges;
+ private HashMap> deletedTypes;
+ private HashSet newEdges;
+ private HashSet newIndividuals;
+ private HashSet unprunedNodes;
+ private HashSet updatedIndividuals;
+
+ public SimpleIncrementalChangeTracker() {
+ deletedEdges = new HashSet();
+ deletedTypes = new HashMap>();
+ newEdges = new HashSet();
+ newIndividuals = new HashSet();
+ unprunedNodes = new HashSet();
+ updatedIndividuals = new HashSet();
+ }
+
+ private SimpleIncrementalChangeTracker(SimpleIncrementalChangeTracker src, ABox target) {
+
+ this.deletedEdges = new HashSet( src.deletedEdges.size() );
+
+ for( Edge se : src.deletedEdges ) {
+ final Individual s = target.getIndividual( se.getFrom().getName() );
+ if( s == null )
+ throw new NullPointerException();
+ final Node o = target.getNode( se.getTo().getName() );
+ if( o == null )
+ throw new NullPointerException();
+
+ this.newEdges.add( new DefaultEdge( se.getRole(), s, o, se.getDepends() ) );
+ }
+
+ this.deletedTypes = new HashMap>( src.deletedTypes.size() );
+
+ for( Map.Entry> e : src.deletedTypes.entrySet() ) {
+ final Node n = target.getNode( e.getKey().getName() );
+ if( n == null )
+ throw new NullPointerException();
+ this.deletedTypes.put( n, new HashSet( e.getValue() ) );
+ }
+
+ this.newEdges = new HashSet( src.newEdges.size() );
+
+ for( Edge se : src.newEdges ) {
+ final Individual s = target.getIndividual( se.getFrom().getName() );
+ if( s == null )
+ throw new NullPointerException();
+ final Node o = target.getNode( se.getTo().getName() );
+ if( o == null )
+ throw new NullPointerException();
+
+ this.newEdges.add( new DefaultEdge( se.getRole(), s, o, se.getDepends() ) );
+ }
+
+ this.newIndividuals = new HashSet( src.newIndividuals.size() );
+
+ for( Individual si : src.newIndividuals ) {
+ final Individual ti = target.getIndividual( si.getName() );
+ if( ti == null )
+ throw new NullPointerException();
+
+ this.newIndividuals.add( ti );
+ }
+
+ this.unprunedNodes = new HashSet( src.unprunedNodes.size() );
+
+ for( Node sn : src.unprunedNodes ) {
+ final Node tn = target.getNode( sn.getName() );
+ if( tn == null )
+ throw new NullPointerException();
+
+ this.unprunedNodes.add( tn );
+ }
+
+ this.updatedIndividuals = new HashSet( src.updatedIndividuals.size() );
+
+ for( Individual si : src.updatedIndividuals ) {
+ final Individual ti = target.getIndividual( si.getName() );
+ if( ti == null )
+ throw new NullPointerException();
+
+ this.updatedIndividuals.add( ti );
+ }
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addDeletedEdge(org.mindswap.pellet.Edge)
+ */
+ public boolean addDeletedEdge(Edge e) {
+ if( e == null )
+ throw new NullPointerException();
+
+ return deletedEdges.add( e );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addDeletedType(org.mindswap.pellet.Node,
+ * aterm.ATermAppl)
+ */
+ public boolean addDeletedType(Node n, ATermAppl type) {
+ if( n == null )
+ throw new NullPointerException();
+ if( type == null )
+ throw new NullPointerException();
+
+ Set existing = deletedTypes.get( n );
+ if( existing == null ) {
+ existing = new HashSet();
+ deletedTypes.put( n, existing );
+ }
+
+ return existing.add( type );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addNewEdge(org.mindswap.pellet.Edge)
+ */
+ public boolean addNewEdge(Edge e) {
+ if( e == null )
+ throw new NullPointerException();
+
+ return newEdges.add( e );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addNewIndividual(org.mindswap.pellet.Individual)
+ */
+ public boolean addNewIndividual(Individual i) {
+ if( i == null )
+ throw new NullPointerException();
+
+ return newIndividuals.add( i );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addUnprunedNode(org.mindswap.pellet.Node)
+ */
+ public boolean addUnprunedNode(Node n) {
+ if( n == null )
+ throw new NullPointerException();
+
+ return unprunedNodes.add( n );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#addUpdatedIndividual(org.mindswap.pellet.Individual)
+ */
+ public boolean addUpdatedIndividual(Individual i) {
+ if( i == null )
+ throw new NullPointerException();
+
+ return updatedIndividuals.add( i );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#clear()
+ */
+ public void clear() {
+ deletedEdges.clear();
+ deletedTypes.clear();
+ newEdges.clear();
+ newIndividuals.clear();
+ unprunedNodes.clear();
+ updatedIndividuals.clear();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#copy(org.mindswap.pellet.ABox)
+ */
+ public SimpleIncrementalChangeTracker copy(ABox target) {
+ return new SimpleIncrementalChangeTracker( this, target );
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#deletedEdges()
+ */
+ public Iterator deletedEdges() {
+ return Collections.unmodifiableSet( deletedEdges ).iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#deletedTypes()
+ */
+ public Iterator>> deletedTypes() {
+ return Collections.unmodifiableMap( deletedTypes ).entrySet().iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#newEdges()
+ */
+ public Iterator newEdges() {
+ return Collections.unmodifiableSet( newEdges ).iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#newIndividuals()
+ */
+ public Iterator newIndividuals() {
+ return Collections.unmodifiableSet( newIndividuals ).iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#unprunedNodes()
+ */
+ public Iterator unprunedNodes() {
+ return Collections.unmodifiableSet( unprunedNodes ).iterator();
+ }
+
+ /*
+ * (non-Javadoc)
+ *
+ * @see com.clarkparsia.pellet.IncrementalChangeTracker#updatedIndividuals()
+ */
+ public Iterator updatedIndividuals() {
+ return Collections.unmodifiableSet( updatedIndividuals ).iterator();
+ }
+}
diff --git a/core/src/com/clarkparsia/pellet/utils/CollectionUtils.java b/core/src/com/clarkparsia/pellet/utils/CollectionUtils.java
new file mode 100644
index 000000000..377d7aafb
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/utils/CollectionUtils.java
@@ -0,0 +1,95 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.utils;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ *
+ * Title: Collection Utilities
+ *
+ *
+ * Description:
+ *
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ *
+ * @author Harris Lin
+ * @author Evren Sirin
+ */
+public class CollectionUtils {
+ public static Map makeIdentityMap() {
+ return new IdentityHashMap();
+ }
+
+ public static Map makeIdentityMap(int size) {
+ return new IdentityHashMap( size );
+ }
+
+ public static Map makeIdentityMap(Map extends K, ? extends V> map) {
+ return new IdentityHashMap( map );
+ }
+
+ public static Set makeIdentitySet() {
+ return new IdentityHashSet();
+ }
+
+ public static Set makeIdentitySet(int size) {
+ return new IdentityHashSet( size );
+ }
+
+ public static Set makeIdentitySet(Collection extends T> a) {
+ return new IdentityHashSet( a );
+ }
+
+ public static List makeList() {
+ return new ArrayList();
+ }
+
+ public static List makeList(int size) {
+ return new ArrayList( size );
+ }
+
+ public static List makeList(Collection extends T> a) {
+ return new ArrayList( a );
+ }
+
+ public static Map makeMap() {
+ return new HashMap();
+ }
+
+ public static Map makeMap(int size) {
+ return new HashMap( size );
+ }
+
+ public static Map makeMap(Map extends K, ? extends V> map) {
+ return new HashMap( map );
+ }
+
+ public static Set makeSet() {
+ return new HashSet();
+ }
+
+ public static Set makeSet(int size) {
+ return new HashSet( size );
+ }
+
+ public static Set makeSet(Collection extends T> a) {
+ return new HashSet( a );
+ }
+}
diff --git a/core/src/com/clarkparsia/pellet/utils/IdentityHashSet.java b/core/src/com/clarkparsia/pellet/utils/IdentityHashSet.java
new file mode 100644
index 000000000..1cb958056
--- /dev/null
+++ b/core/src/com/clarkparsia/pellet/utils/IdentityHashSet.java
@@ -0,0 +1,111 @@
+// Copyright (c) 2006 - 2008, Clark & Parsia, LLC.
+// This source code is available under the terms of the Affero General Public License v3.
+//
+// Please see LICENSE.txt for full license terms, including the availability of proprietary exceptions.
+// Questions, comments, or requests for clarification: licensing@clarkparsia.com
+
+package com.clarkparsia.pellet.utils;
+
+import java.util.AbstractSet;
+import java.util.Collection;
+import java.util.IdentityHashMap;
+import java.util.Iterator;
+import java.util.Set;
+
+/**
+ * Title:
+ *
+ * Description:
+ *
+ * Copyright: Copyright (c) 2008
+ *
+ * Company: Clark & Parsia, LLC.
+ *
+ * @author Evren Sirin
+ */
+public class IdentityHashSet extends AbstractSet implements Set, Cloneable {
+ private static final Object VALUE = new Object();
+
+ private transient IdentityHashMap map;
+
+ public IdentityHashSet() {
+ map = new IdentityHashMap