0001: /*****************************************************************************
0002: * Source code information
0003: * -----------------------
0004: * Original author Ian Dickinson, HP Labs Bristol
0005: * Author email ian.dickinson@hp.com
0006: * Package Jena 2
0007: * Web http://sourceforge.net/projects/jena/
0008: * Created 11-Sep-2003
0009: * Filename $RCSfile: DIGAdapter.java,v $
0010: * Revision $Revision: 1.26 $
0011: * Release status $State: Exp $
0012: *
0013: * Last modified on $Date: 2008/01/02 12:07:11 $
0014: * by $Author: andy_seaborne $
0015: *
0016: * (c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Hewlett-Packard Development Company, LP
0017: * [See end of file]
0018: *****************************************************************************/package com.hp.hpl.jena.reasoner.dig;
0019:
0020: // Imports
0021: ///////////////
0022: import java.util.*;
0023:
0024: import com.hp.hpl.jena.datatypes.*;
0025: import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
0026: import com.hp.hpl.jena.graph.Graph;
0027: import com.hp.hpl.jena.ontology.*;
0028: import com.hp.hpl.jena.rdf.model.*;
0029: import com.hp.hpl.jena.reasoner.TriplePattern;
0030: import com.hp.hpl.jena.util.iterator.*;
0031: import com.hp.hpl.jena.util.xml.SimpleXMLPath;
0032: import com.hp.hpl.jena.vocabulary.*;
0033:
0034: import org.apache.commons.logging.LogFactory;
0035: import org.w3c.dom.*;
0036:
0037: /**
0038: * <p>
0039: * An adapter class that mediates between a Jena InfGraph and a DIG reasoner process.
0040: * </p>
0041: *
0042: * @author Ian Dickinson, HP Labs
0043: * (<a href="mailto:Ian.Dickinson@hp.com" >email</a>)
0044: * @version CVS $Id: DIGAdapter.java,v 1.26 2008/01/02 12:07:11 andy_seaborne Exp $
0045: */
0046: public class DIGAdapter {
0047: // Constants
0048: //////////////////////////////////
0049:
0050: /** DIG profile for 1.7 */
0051: public static final DIGProfile RACER_17_PROFILE = new DIGProfile() {
0052: public String getDIGNamespace() {
0053: return "http://dl.kr.org/dig/lang";
0054: }
0055:
0056: public String getSchemaLocation() {
0057: return "http://potato.cs.man.ac.uk/dig/level0/dig.xsd";
0058: }
0059:
0060: public String getContentType() {
0061: return "application/x-www-form-urlencoded";
0062: }
0063:
0064: public String getInconsistentKBMessage() {
0065: return null;
0066: }
0067: };
0068:
0069: /** DIG profile for Pellet */
0070: public static final DIGProfile PELLET_PROFILE = new DIGProfile() {
0071: public String getDIGNamespace() {
0072: return "http://dl.kr.org/dig/lang";
0073: }
0074:
0075: public String getSchemaLocation() {
0076: return "http://potato.cs.man.ac.uk/dig/level0/dig.xsd";
0077: }
0078:
0079: public String getContentType() {
0080: return "application/x-www-form-urlencoded";
0081: }
0082:
0083: public String getInconsistentKBMessage() {
0084: return "Inconsistent KB";
0085: }
0086: };
0087:
0088: // switch codes for expression types
0089: private static final int UNION = 1;
0090: private static final int INTERSECTION = 2;
0091: private static final int COMPLEMENT = 3;
0092: private static final int ENUMERATED = 4;
0093: private static final int RESTRICTION = 5;
0094:
0095: /** Mark a bNode identifier */
0096: public static final String ANON_MARKER = "anon:";
0097:
0098: /** Well known concept URI's */
0099: public static final List KNOWN_CONCEPTS = Arrays
0100: .asList(new Object[] { OWL.Thing.getURI(),
0101: OWL.Nothing.getURI(), DAML_OIL.Thing.getURI(),
0102: DAML_OIL.Thing.getURI() });
0103:
0104: /** Well known integer type URI's, these we will translate into DIG integer attributes */
0105: public static final List XSD_INT_TYPES = Arrays
0106: .asList(new Object[] { XSDDatatype.XSDint.getURI(),
0107: XSDDatatype.XSDinteger.getURI(),
0108: XSDDatatype.XSDnonNegativeInteger.getURI(),
0109: XSDDatatype.XSDbyte.getURI(),
0110: XSDDatatype.XSDshort.getURI(),
0111: XSDDatatype.XSDlong.getURI(),
0112: XSDDatatype.XSDunsignedByte.getURI(),
0113: XSDDatatype.XSDunsignedLong.getURI(),
0114: XSDDatatype.XSDunsignedInt.getURI(),
0115: XSDDatatype.XSDunsignedShort.getURI(), });
0116:
0117: // Static variables
0118: //////////////////////////////////
0119:
0120: /** Query ID counter */
0121: private static int s_queryID = 0;
0122:
0123: /** The table that represents the query translations we know about */
0124: protected static DIGQueryTranslator[] s_queryTable = {
0125: // subsumes when testing for subsumption between two known class expressions
0126: new DIGQuerySubsumesTranslator(RDFS.subClassOf.getURI()),
0127: new DIGQuerySubsumesTranslator(DAML_OIL.subClassOf.getURI()),
0128:
0129: // testing for disjoint between two known class expressions
0130: new DIGQueryDisjointTranslator(OWL.disjointWith.getURI()),
0131: new DIGQueryDisjointTranslator(DAML_OIL.disjointWith
0132: .getURI()),
0133:
0134: // ancestors and parents when testing for a named and variable node
0135: new DIGQueryAncestorsTranslator(RDFS.subClassOf.getURI(),
0136: true),
0137: new DIGQueryAncestorsTranslator(RDFS.subClassOf.getURI(),
0138: false),
0139: new DIGQueryAncestorsTranslator(DAML_OIL.subClassOf
0140: .getURI(), true),
0141: new DIGQueryAncestorsTranslator(DAML_OIL.subClassOf
0142: .getURI(), false),
0143:
0144: // all parents and all children
0145: new DIGQueryParentsTranslator(
0146: ReasonerVocabulary.directSubClassOf.getURI(), true),
0147: new DIGQueryParentsTranslator(
0148: ReasonerVocabulary.directSubClassOf.getURI(), false),
0149:
0150: // specific named parent or child
0151: new DIGQueryParentsTranslator(null,
0152: ReasonerVocabulary.directSubClassOf.getURI(), null,
0153: true),
0154: new DIGQueryParentsTranslator(null,
0155: ReasonerVocabulary.directSubClassOf.getURI(), null,
0156: false),
0157:
0158: // the entire class hierarchy
0159: new DIGQueryClassHierarchyTranslator(RDFS.subClassOf
0160: .getURI()),
0161: new DIGQueryClassHierarchyTranslator(DAML_OIL.subClassOf
0162: .getURI()),
0163:
0164: // equivalent classes
0165: new DIGQueryEquivalentsTranslator(OWL.equivalentClass
0166: .getURI(), true),
0167: new DIGQueryEquivalentsTranslator(OWL.equivalentClass
0168: .getURI(), false),
0169: new DIGQueryEquivalentsTranslator(DAML_OIL.sameClassAs
0170: .getURI(), true),
0171: new DIGQueryEquivalentsTranslator(DAML_OIL.sameClassAs
0172: .getURI(), false),
0173:
0174: new DIGQueryIsEquivalentTranslator(OWL.equivalentClass
0175: .getURI()),
0176: new DIGQueryIsEquivalentTranslator(DAML_OIL.sameClassAs
0177: .getURI()),
0178:
0179: // rancestors and rparents when testing for a named and variable node
0180: new DIGQueryRoleAncestorsTranslator(RDFS.subPropertyOf
0181: .getURI(), true),
0182: new DIGQueryRoleAncestorsTranslator(RDFS.subPropertyOf
0183: .getURI(), false),
0184: new DIGQueryRoleAncestorsTranslator(DAML_OIL.subPropertyOf
0185: .getURI(), true),
0186: new DIGQueryRoleAncestorsTranslator(DAML_OIL.subPropertyOf
0187: .getURI(), false),
0188:
0189: new DIGQueryRoleParentsTranslator(
0190: ReasonerVocabulary.directSubPropertyOf.getURI(),
0191: true),
0192: new DIGQueryRoleParentsTranslator(
0193: ReasonerVocabulary.directSubPropertyOf.getURI(),
0194: false),
0195:
0196: // the entire role hierarchy
0197: new DIGQueryRoleHierarchyTranslator(RDFS.subPropertyOf
0198: .getURI()),
0199: new DIGQueryRoleHierarchyTranslator(DAML_OIL.subPropertyOf
0200: .getURI()),
0201:
0202: // all concepts query for [* rdf:type :Class]
0203: new DIGQueryAllConceptsTranslator(RDF.type.getURI(),
0204: RDFS.Class.getURI()),
0205: new DIGQueryAllConceptsTranslator(RDF.type.getURI(),
0206: OWL.Class.getURI()),
0207: new DIGQueryAllConceptsTranslator(RDF.type.getURI(),
0208: DAML_OIL.Class.getURI()),
0209:
0210: // instances
0211: new DIGQueryInstancesTranslator(RDF.type.getURI()),
0212: new DIGQueryInstancesTranslator(DAML_OIL.type.getURI()),
0213: new DIGQueryTypesTranslator(RDF.type.getURI()),
0214: new DIGQueryTypesTranslator(DAML_OIL.type.getURI()),
0215: new DIGQueryInstanceTranslator(RDF.type.getURI()),
0216: new DIGQueryInstanceTranslator(DAML_OIL.type.getURI()),
0217: new DIGQueryDifferentFromTranslator(OWL.differentFrom
0218: .getURI()),
0219: new DIGQueryDifferentFromTranslator(
0220: DAML_OIL.differentIndividualFrom.getURI()),
0221: new DIGQueryRoleFillersTranslator(),
0222: new DIGQueryRoleFillerTranslator(),
0223: new DIGQueryRelatedIndividualsTranslator(),
0224:
0225: // specific type tests
0226: new DIGQueryIsConceptTranslator(),
0227: new DIGQueryIsRoleTranslator(),
0228: new DIGQueryIsIndividualTranslator(), };
0229:
0230: // Instance variables
0231: //////////////////////////////////
0232:
0233: /** The profile for the DIG interface this reasoner is interacting with. Pellet */
0234: protected DIGProfile m_profile = PELLET_PROFILE;
0235:
0236: /** The graph that contains the data we are uploading to the external DIG reasoner */
0237: protected OntModel m_sourceData;
0238:
0239: /** Counter for generating skolem names */
0240: private int m_skolemCounter = 0;
0241:
0242: /** The connection to the DIG reasoner */
0243: private DIGConnection m_connection;
0244:
0245: /** The set of known individual names from the DIG reasoner */
0246: protected Set m_indNames = new HashSet();
0247:
0248: /** Flag that is set to true once we have asked the remote reasoner for its list of individual names */
0249: protected boolean m_indNamesAsked = false;
0250:
0251: /** The set of known concept names from the DIG reasoner */
0252: protected Set m_conceptNames = new HashSet();
0253:
0254: /** Flag that is set to true once we have asked the remote reasoner for its list of concept names */
0255: protected boolean m_conceptNamesAsked = false;
0256:
0257: /** The set of known role names from the DIG reasoner */
0258: protected Set m_roleNames = new HashSet();
0259:
0260: /** Flag that is set to true once we have asked the remote reasoner for its list of role names */
0261: protected boolean m_roleNamesAsked = false;
0262:
0263: /** Model containing axiom statements */
0264: protected Model m_axioms = null;
0265:
0266: // Constructors
0267: //////////////////////////////////
0268:
0269: /**
0270: * <p>Construct a DIG adapter for the given source data graph, which is encoding an
0271: * ontology in a language represented by the given model spec. Allocates a new
0272: * DIG connection using the default connection URL (<code>http://localhost:8081</code>).</p>
0273: * @param spec An ont model spec encoding the ontology language of the source graph
0274: * @param source The graph that contains the source data on which the DIG reasoner
0275: * will operate
0276: */
0277: public DIGAdapter(OntModelSpec spec, Graph source) {
0278: this (spec, source, DIGConnectionPool.getInstance().allocate(),
0279: null);
0280: }
0281:
0282: /**
0283: * <p>Construct a DIG adapter for the given source data graph, which is encoding an
0284: * ontology in a language represented by the given model spec.</p>
0285: * @param spec An ont model spec encoding the ontology language of the source graph
0286: * @param source The graph that contains the source data on which the DIG reasoner
0287: * will operate
0288: * @param connection A pre-configured DIG connection to use to communicate with the
0289: * external reasoner
0290: * @param axioms A model containing axioms appropriate to the ontology language
0291: * this adapter is processing. May be null.
0292: */
0293: public DIGAdapter(OntModelSpec spec, Graph source,
0294: DIGConnection connection, Model axioms) {
0295: m_connection = connection;
0296: m_axioms = axioms;
0297:
0298: // we wrap the given graph in a suitable ontology model
0299: m_sourceData = ModelFactory.createOntologyModel(spec,
0300: ModelFactory.createModelForGraph(source));
0301:
0302: // don't do the .as() checking, since we know we're not using the reasoner
0303: m_sourceData.setStrictMode(false);
0304: }
0305:
0306: // External signature methods
0307: //////////////////////////////////
0308:
0309: /**
0310: * <p>Answer the DIG profile for the DIG interface this reasoner is attached to.</p>
0311: * @return A profile detailing the parameters of the DIG variant this reasoner is interacting with.
0312: */
0313: public DIGProfile getProfile() {
0314: return m_profile;
0315: }
0316:
0317: /**
0318: * <p>Set the profile specifying the variable parts of the DIG profile that are being
0319: * used in this instance.</p>
0320: * @param profile The new DIG profile
0321: */
0322: public void setProfile(DIGProfile profile) {
0323: m_profile = profile;
0324: }
0325:
0326: /**
0327: * <p>Answer the ontology language profile we're assuming in this reasoner.</p>
0328: * @return The ontology language via the language profile
0329: */
0330: public Profile getOntLanguage() {
0331: return m_sourceData.getProfile();
0332: }
0333:
0334: /**
0335: * <p>Answer the DIG identification structure we obtain by querying the attached reasoner.</p>
0336: * @return An object containing the results of querying the reasoner for its identity
0337: * and capabilities
0338: */
0339: public DIGIdentifier getDigIdentifier() {
0340: Document getIDVerb = getConnection().createDigVerb(
0341: DIGProfile.GET_IDENTIFIER, getProfile());
0342: return new DigIdentifierImpl(getConnection().sendDigVerb(
0343: getIDVerb, getProfile()));
0344: }
0345:
0346: /**
0347: * <p>Upload the entire contents of the local knowledge base (OWL/DAML model)
0348: * to the DIG reasoner, using a single large TELL verb.</p>
0349: * @return True if the ontology model was uploaded to DIG without any warnings. Recent warnings
0350: * are available via {@link #getRecentWarnings}
0351: * @exception DigReasonerException If the upload fails for any reason. The error message from
0352: * the DIG reasoner will be returned.
0353: */
0354: public boolean uploadKB() {
0355: // ensure first that we have a KB identifier
0356: getConnection().bindKB(false, getProfile());
0357:
0358: // now tell the existing KB contents
0359: Document kbDIG = translateKbToDig();
0360:
0361: Document response = getConnection().sendDigVerb(kbDIG,
0362: getProfile());
0363: return !getConnection().warningCheck(response);
0364: }
0365:
0366: /**
0367: * <p>Answer an iterator over any recent warnings returned from from the remote DIG reasoner.</p>
0368: * @return An iterator over any warnings; if there are no warnings the return
0369: * value will be an iterator that returns <code>hasNext()</code> = false.
0370: */
0371: public Iterator getRecentWarnings() {
0372: return getConnection().getWarnings();
0373: }
0374:
0375: /**
0376: * <p>Answer an XML document that contains the DIG translation of the local graph, wrapped
0377: * as a tell verb</p>
0378: * @return An XML document containing the tell verb
0379: */
0380: public Document translateKbToDig() {
0381: Document tell = getConnection().createDigVerb(DIGProfile.TELLS,
0382: getProfile());
0383: Element root = tell.getDocumentElement();
0384:
0385: addNamedEntities(root);
0386: translateClasses(root);
0387: translateRoles(root);
0388: translateAttributes(root);
0389: translateIndividuals(root);
0390: translateAllDifferentAxioms(root);
0391:
0392: return tell;
0393: }
0394:
0395: /**
0396: * <p>Clear the old contents of the DIG knowledge base</p>
0397: */
0398: public void resetKB() {
0399: getConnection().bindKB(true, getProfile());
0400:
0401: // reset the name caches
0402: m_indNames.clear();
0403: m_indNamesAsked = false;
0404: m_conceptNames.clear();
0405: m_conceptNamesAsked = false;
0406: m_roleNames.clear();
0407: m_roleNamesAsked = false;
0408: }
0409:
0410: /**
0411: * <p>Answer this adapter's connection to the database.</p>
0412: * @return The DIG connector this adapter is using, or null if the connection has
0413: * been closed.
0414: */
0415: public DIGConnection getConnection() {
0416: return m_connection;
0417: }
0418:
0419: /**
0420: * <p>Close this adapter, and release the connector to the external DIG KB.</p>
0421: */
0422: public void close() {
0423: getConnection().release();
0424: m_connection = null;
0425: }
0426:
0427: /**
0428: * <p>Basic pattern lookup interface - answer an iterator over the triples
0429: * matching the given pattern. Where possible, this query will first be
0430: * given to the external reasoner, with the local graph used to generate
0431: * supplemental bindings.</p>
0432: * @param pattern a TriplePattern to be matched against the data
0433: * @return An ExtendedIterator over all Triples in the data set
0434: * that match the pattern
0435: */
0436: public ExtendedIterator find(TriplePattern pattern) {
0437: DIGQueryTranslator tr = getQueryTranslator(pattern, null);
0438:
0439: ExtendedIterator remote = (tr == null) ? null : tr.find(
0440: pattern, this );
0441:
0442: com.hp.hpl.jena.graph.Node pSubj = normaliseNode(pattern
0443: .getSubject());
0444: com.hp.hpl.jena.graph.Node pPred = normaliseNode(pattern
0445: .getPredicate());
0446: com.hp.hpl.jena.graph.Node pObj = normaliseNode(pattern
0447: .getObject());
0448: ExtendedIterator local = m_sourceData.getGraph().find(pSubj,
0449: pPred, pObj);
0450:
0451: // if we have a remote iterator, prepend to the local one and drop duplicates
0452: ExtendedIterator i = (remote == null) ? local : remote
0453: .andThen(local);
0454:
0455: // add the axioms if specified
0456: i = (m_axioms == null) ? i : i.andThen(m_axioms.getGraph()
0457: .find(pSubj, pPred, pObj));
0458:
0459: // make sure we don't have duplicates
0460: return UniqueExtendedIterator.create(i);
0461: }
0462:
0463: /**
0464: * <p>Basic pattern lookup interface - answer an iterator over the triples
0465: * matching the given (S,P,O) pattern, given also some premises for the
0466: * query. Where possible, this query will first be
0467: * given to the external reasoner, with the local graph used to generate
0468: * supplemental bindings.</p>
0469: * @param pattern a TriplePattern to be matched against the data
0470: * @param premises A model containing additional premises for the find query,
0471: * typically used to allow the subject and/or object to be an expression
0472: * rather than just a simple node
0473: * @return An ExtendedIterator over all Triples in the data set
0474: * that match the pattern
0475: */
0476: public ExtendedIterator find(TriplePattern pattern, Model premises) {
0477: DIGQueryTranslator tr = getQueryTranslator(pattern, premises);
0478:
0479: if (tr == null) {
0480: LogFactory.getLog(getClass()).debug(
0481: "Could not find DIG query translator for "
0482: + pattern);
0483: }
0484:
0485: ExtendedIterator remote = (tr == null) ? null : tr.find(
0486: pattern, this , premises);
0487:
0488: com.hp.hpl.jena.graph.Node pSubj = normaliseNode(pattern
0489: .getSubject());
0490: com.hp.hpl.jena.graph.Node pPred = normaliseNode(pattern
0491: .getPredicate());
0492: com.hp.hpl.jena.graph.Node pObj = normaliseNode(pattern
0493: .getObject());
0494: ExtendedIterator local = m_sourceData.getGraph().find(pSubj,
0495: pPred, pObj);
0496:
0497: // if we have a remote iterator, prepend to the local one and drop duplicates
0498: ExtendedIterator i = (remote == null) ? local : remote
0499: .andThen(local);
0500:
0501: // add the axioms if specified
0502: i = (m_axioms == null) ? i : i.andThen(m_axioms.getGraph()
0503: .find(pSubj, pPred, pObj));
0504:
0505: // make sure we don't have duplicates
0506: return UniqueExtendedIterator.create(i);
0507: }
0508:
0509: /**
0510: * <p>Answer the query translator that matches the given pattern, if any</p>
0511: * @param pattern The triple pattern that has been received
0512: * @param premises A model containing the premises to a query (e.g. a class expression)
0513: * @return A DIG translator that can translate this pattern to a DIG query,
0514: * or null if no matches.
0515: */
0516: public DIGQueryTranslator getQueryTranslator(TriplePattern pattern,
0517: Model premises) {
0518: for (int i = 0; i < s_queryTable.length; i++) {
0519: DIGQueryTranslator dqt = s_queryTable[i];
0520:
0521: if (dqt.trigger(pattern, this , premises)) {
0522: return dqt;
0523: }
0524: }
0525:
0526: return null;
0527: }
0528:
0529: /**
0530: * <p>Answer the graph of local (source) data.</p>
0531: * @return The graph containing the local source data.
0532: */
0533: public Graph getGraph() {
0534: return m_sourceData.getGraph();
0535: }
0536:
0537: /**
0538: * <p>Answer an identifier for a resource, named or bNode</p>
0539: * @param r A resource
0540: * @return A unique identifier for the resource as a string, which will either
0541: * be the resource URI for named resources, or a unique ID string for bNodes
0542: */
0543: public String getResourceID(Resource r) {
0544: return getNodeID(r.asNode());
0545: }
0546:
0547: /**
0548: * <p>Answer an identifier for a node, named or anon</p>
0549: * @param n An RDF node
0550: * @return A unique identifier for the node as a string, which will either
0551: * be the resource URI for named nodes, or a unique ID string for bNodes
0552: */
0553: public String getNodeID(com.hp.hpl.jena.graph.Node n) {
0554: if (n.isBlank()) {
0555: return ANON_MARKER + n.getBlankNodeId().toString();
0556: } else {
0557: return n.getURI();
0558: }
0559: }
0560:
0561: /**
0562: * <p>Add a DIG reference to the class identifed in the source graph by the given Jena
0563: * graph Node to the given XML element. If the class is a named class, this will be
0564: * a <code><catom></code> element, otherwise it will be a class description axiom.
0565: * Assumes that the instance variable <code>m_sourceData</code> provides the statements that
0566: * further define the class if it is a description not a name.
0567: * </p>
0568: * @param elem The parent XML element to which the class description will be attached
0569: * @param node An RDF graph node representing a class we wish to describe.
0570: */
0571: public void addClassDescription(Element elem,
0572: com.hp.hpl.jena.graph.Node node) {
0573: addClassDescription(elem, node, m_sourceData);
0574: }
0575:
0576: /**
0577: * <p>Add a DIG reference to the class identifed in the source graph by the given Jena
0578: * graph Node to the given XML element. If the class is a named class, this will be
0579: * a <code><catom></code> element, otherwise it will be a class description axiom.
0580: * </p>
0581: * @param elem The parent XML element to which the class description will be attached
0582: * @param node An RDF graph node representing a class we wish to describe.
0583: * @param sourceData A model containing the statements about the given class description
0584: * resource
0585: */
0586: public void addClassDescription(Element elem,
0587: com.hp.hpl.jena.graph.Node node, Model sourceData) {
0588: Model m = (sourceData == null) ? m_sourceData : sourceData;
0589: addClassDescription(elem, (Resource) m.getRDFNode(node), m);
0590: }
0591:
0592: /**
0593: * <p>Add a DIG reference to the class identifed in the source graph by the given Jena
0594: * resource to the given XML element. If the class is a named class, this will be
0595: * a <code><catom></code> element, otherwise it will be a class description axiom.</p>
0596: * @param elem The parent XML element to which the class description will be attached
0597: * @param res An RDF resource representing a class we wish to describe.
0598: * @param sourceData A model containing the statements about the given class description
0599: * resource
0600: */
0601: public void addClassDescription(Element elem, Resource res,
0602: Model sourceData) {
0603: // ensure we have a resource from the source data model
0604: Resource cls = (res.getModel() != sourceData) ? sourceData
0605: .getResource(res.getURI()) : res;
0606:
0607: if (!cls.isAnon()
0608: || m_conceptNames.contains(getNodeID(cls.asNode()))) {
0609: // a named class, or an already known bNode
0610: translateClassIdentifier(elem, cls);
0611: } else {
0612: // a new bNode introducing a class expression
0613: translateClassDescription(elem, (OntClass) cls
0614: .as(OntClass.class), sourceData);
0615: }
0616: }
0617:
0618: /**
0619: * <p>Answer true if the given node corresponds to one of the individuals known to
0620: * the DIG reasoner.</p>
0621: * @param node A node to test
0622: * @return True if <code>node</code> is a known individual
0623: */
0624: public boolean isIndividual(com.hp.hpl.jena.graph.Node node) {
0625: return node.isConcrete() && !node.isLiteral()
0626: && getKnownIndividuals().contains(getNodeID(node));
0627: }
0628:
0629: /**
0630: * <p>Answer true if the given node corresponds to one of the roles known to
0631: * the DIG reasoner.</p>
0632: * @param node A node to test
0633: * @param premises A model defining premises that may encode more information about
0634: * node, or may be null
0635: * @return True if <code>node</code> is a known role
0636: */
0637: public boolean isRole(com.hp.hpl.jena.graph.Node node,
0638: Model premises) {
0639: return node.isConcrete()
0640: && (getKnownRoles().contains(getNodeID(node)) || ((premises != null) && isPremisesRole(
0641: node, premises)));
0642: }
0643:
0644: /**
0645: * <p>Answer true if the given node corresponds to one of the concepts known to
0646: * the DIG reasoner.</p>
0647: * @param node A node to test
0648: * @param premises A model defining premises that may encode more information about
0649: * node, or may be null
0650: * @return True if <code>node</code> is a known concept
0651: */
0652: public boolean isConcept(com.hp.hpl.jena.graph.Node node,
0653: Model premises) {
0654: return node.isConcrete()
0655: && !node.isLiteral()
0656: && (getKnownConcepts().contains(getNodeID(node))
0657: || ((premises != null) && isPremisesClass(node,
0658: premises)) || KNOWN_CONCEPTS
0659: .contains(getNodeID(node)));
0660: }
0661:
0662: /**
0663: * <p>Answer the ontology language specification for the source model underlying
0664: * this DIG adapter.</p>
0665: * @return The ontology model spec
0666: */
0667: public OntModelSpec getSourceSpecification() {
0668: return m_sourceData.getSpecification();
0669: }
0670:
0671: /**
0672: * <p>Create a new element to represent a query, adding to it a unique query
0673: * ID.</p>
0674: * @param query The query document
0675: * @param elemName The string name of the query element
0676: * @return The new query element
0677: */
0678: public Element createQueryElement(Document query, String elemName) {
0679: Element qElem = addElement(query.getDocumentElement(), elemName);
0680: qElem.setAttribute(DIGProfile.ID, "q" + s_queryID++);
0681: return qElem;
0682: }
0683:
0684: // Internal implementation methods
0685: //////////////////////////////////
0686:
0687: /**
0688: * <p>In Dig, defXXX elements are required to introduce all named entities,
0689: * such as concepts and roles. This method collects such definitions and
0690: * adds the defXXX elements as children of the tell element.</p>
0691: * @param tell The XML element, typically <tells>, to which to attach the
0692: * declarations
0693: */
0694: protected void addNamedEntities(Element tell) {
0695: // first we collect the named entities
0696: HashSet roles = new HashSet();
0697: HashSet attributes = new HashSet();
0698: HashSet concepts = new HashSet();
0699: HashSet individuals = new HashSet();
0700:
0701: addAll(m_sourceData.listClasses(), concepts);
0702: addAll(m_sourceData.listDatatypeProperties(), attributes);
0703: addAll(m_sourceData.listIndividuals(), individuals);
0704:
0705: collectRoleProperties(roles);
0706:
0707: // collect the DIG definitions at the beginning of the document
0708: addNamedDefs(tell, concepts.iterator(), DIGProfile.DEFCONCEPT,
0709: m_conceptNames);
0710: addNamedDefs(tell, roles.iterator(), DIGProfile.DEFROLE,
0711: m_roleNames);
0712: addNamedDefs(tell, attributes.iterator(),
0713: DIGProfile.DEFATTRIBUTE, null);
0714: addNamedDefs(tell, individuals.iterator(),
0715: DIGProfile.DEFINDIVIDUAL, m_indNames);
0716: }
0717:
0718: /** Add all object properties (roles) to the given collection */
0719: protected void collectRoleProperties(Collection roles) {
0720: addAll(m_sourceData.listObjectProperties(), roles);
0721: addAll(m_sourceData.listInverseFunctionalProperties(), roles);
0722: addAll(m_sourceData.listTransitiveProperties(), roles);
0723:
0724: // not present in DAML
0725: if (m_sourceData.getProfile().SYMMETRIC_PROPERTY() != null) {
0726: addAll(m_sourceData.listSymmetricProperties(), roles);
0727: }
0728: }
0729:
0730: /**
0731: * <p>Add the named definitions from the given iterator to the tell document we are building.</p>
0732: * @param tell The document being built
0733: * @param i An iterator over resources
0734: * @param defType The type of DIG element we want to build
0735: * @param nameCollection Optional set of names of this type of entity to collect
0736: */
0737: protected void addNamedDefs(Element tell, Iterator i,
0738: String defType, Set nameCollection) {
0739: while (i.hasNext()) {
0740: RDFNode n = (Resource) i.next();
0741: if (n instanceof Resource) {
0742: String id = getNodeID(n.asNode());
0743: addNamedElement(tell, defType, getNodeID(n.asNode()));
0744:
0745: // a named concept, role, etc is being defined
0746: if (nameCollection != null) {
0747: nameCollection.add(id);
0748: }
0749: }
0750: }
0751: }
0752:
0753: /**
0754: * <p>Answer a element with the given element name,
0755: * and with a attribute 'name' with the given uri as name.<p>
0756: * @param parent The parent node to add to
0757: * @param elemName The element name, eg defconcept
0758: * @param uri The URI of the definition
0759: * @return A named element
0760: */
0761: protected Element addNamedElement(Element parent, String elemName,
0762: String uri) {
0763: Element elem = addElement(parent, elemName);
0764: elem.setAttribute(DIGProfile.NAME, uri);
0765:
0766: return elem;
0767: }
0768:
0769: /** Add to the given element a child element with the given name */
0770: protected Element addElement(Element parent, String childName) {
0771: Element child = parent.getOwnerDocument().createElement(
0772: childName);
0773: return (Element) parent.appendChild(child);
0774: }
0775:
0776: /** Add iterator contents to collection */
0777: private void addAll(Iterator i, Collection c) {
0778: for (; i.hasNext(); c.add(i.next()))
0779: ;
0780: }
0781:
0782: /**
0783: * <p>Translate all of the classes in the current KB into descriptions
0784: * using the DIG concept language, and attach the axioms generated
0785: * to the given element.</p>
0786: * @param tell The XML element, typically <tells>, to which
0787: * to attach the generated translations.
0788: */
0789: protected void translateClasses(Element tell) {
0790: translateSubClassAxioms(tell);
0791: translateClassEquivalences(tell);
0792: translateClassDisjointAxioms(tell);
0793:
0794: translateRestrictions(tell);
0795:
0796: // now the implicit equivalences
0797: translateClassExpressions(tell, getOntLanguage()
0798: .INTERSECTION_OF(), INTERSECTION);
0799: translateClassExpressions(tell, getOntLanguage().UNION_OF(),
0800: UNION);
0801: translateClassExpressions(tell, getOntLanguage()
0802: .COMPLEMENT_OF(), COMPLEMENT);
0803: translateClassExpressions(tell, getOntLanguage().ONE_OF(),
0804: ENUMERATED);
0805: }
0806:
0807: /**
0808: * <p>Translate the sub-class axioms in the source model into DIG
0809: * impliesc axioms</p>
0810: * @param tell The node representing the DIG tell verb
0811: */
0812: protected void translateSubClassAxioms(Element tell) {
0813: StmtIterator i = m_sourceData.listStatements(null,
0814: getOntLanguage().SUB_CLASS_OF(), (RDFNode) null);
0815: while (i.hasNext()) {
0816: Statement sc = i.nextStatement();
0817: Element impliesc = addElement(tell, DIGProfile.IMPLIESC);
0818: addClassDescription(impliesc, sc.getSubject(), m_sourceData);
0819: addClassDescription(impliesc, sc.getResource(),
0820: m_sourceData);
0821: }
0822: }
0823:
0824: /**
0825: * <p>Translate the class equivalence axioms in the source model into DIG
0826: * equalsc axioms.</p>
0827: * @param tell The node representing the DIG tell verb
0828: */
0829: protected void translateClassEquivalences(Element tell) {
0830: // first we do stated equivalences
0831: StmtIterator i = m_sourceData.listStatements(null,
0832: getOntLanguage().EQUIVALENT_CLASS(), (RDFNode) null);
0833: while (i.hasNext()) {
0834: Statement sc = i.nextStatement();
0835: Element equalc = addElement(tell, DIGProfile.EQUALC);
0836: addClassDescription(equalc, sc.getSubject(), m_sourceData);
0837: addClassDescription(equalc, sc.getResource(), m_sourceData);
0838: }
0839: }
0840:
0841: /**
0842: * <p>Translate class expressions, such as union classes, intersection classes, etc, into the DIG.</p>
0843: * concept language. The translations are attached to the given tell node.</p>
0844: * @param tell The node representing the DIG tell verb
0845: * @param p A property that will require an implicit equivalence to be made explicit
0846: * in a correct translation to DIG
0847: * @param classExprType Denotes the type of class expression we are translating
0848: */
0849: protected void translateClassExpressions(Element tell, Property p,
0850: int classExprType) {
0851: translateClassExpressions(tell, m_sourceData.listStatements(
0852: null, p, (RDFNode) null), classExprType, m_sourceData);
0853: }
0854:
0855: /**
0856: * <p>Translate the restrictions in the source model into the DIG concept language.</p>
0857: * @param tell The node representing the DIG tell verb
0858: */
0859: protected void translateRestrictions(Element tell) {
0860: translateClassExpressions(tell, m_sourceData.listStatements(
0861: null, RDF.type, getOntLanguage().RESTRICTION()),
0862: RESTRICTION, m_sourceData);
0863: }
0864:
0865: /**
0866: * <p>A named owl:class with a class-construction axiom directly attached is implicitly
0867: * an equivalence axiom with the anonymous class that has the given class construction.</p>
0868: * @param tell The node representing the DIG tell verb
0869: * @param i A statement iterator whose subjects denote the class expressions to be translated
0870: * @param classExprType Denotes the type of class expression we are translating
0871: */
0872: protected void translateClassExpressions(Element tell,
0873: StmtIterator i, int classExprType, Model source) {
0874: while (i.hasNext()) {
0875: OntClass cls = (OntClass) i.nextStatement().getSubject()
0876: .as(OntClass.class);
0877:
0878: Element equalc = addElement(tell, DIGProfile.EQUALC);
0879: addClassDescription(equalc, cls, source);
0880:
0881: switch (classExprType) {
0882: case UNION:
0883: translateUnionClass(equalc, cls, source);
0884: break;
0885: case INTERSECTION:
0886: translateIntersectionClass(equalc, cls, source);
0887: break;
0888: case COMPLEMENT:
0889: translateComplementClass(equalc, cls, source);
0890: break;
0891: case ENUMERATED:
0892: translateEnumeratedClass(equalc, cls, source);
0893: break;
0894: case RESTRICTION:
0895: translateRestrictionClass(equalc, cls, source);
0896: break;
0897: }
0898: }
0899: }
0900:
0901: /**
0902: * <p>Translate a node representing a class expression (presumed anonymous, though
0903: * this is not tested) into the appropriate DIG class axiom.</p>
0904: * @param parent The XML node that will be the parent of the class description axiom
0905: * @param classDescr An OntClass representing the class expression to be translated
0906: */
0907: protected void translateClassDescription(Element parent,
0908: OntClass classDescr, Model source) {
0909: if (classDescr.isUnionClass()) {
0910: translateUnionClass(parent, classDescr, source);
0911: } else if (classDescr.isIntersectionClass()) {
0912: translateIntersectionClass(parent, classDescr, source);
0913: } else if (classDescr.isComplementClass()) {
0914: translateComplementClass(parent, classDescr, source);
0915: } else if (classDescr.isEnumeratedClass()) {
0916: translateEnumeratedClass(parent, classDescr, source);
0917: } else if (classDescr.isRestriction()) {
0918: translateRestrictionClass(parent, classDescr, source);
0919: }
0920: }
0921:
0922: /**
0923: * <p>Translate any statements from the KB that indicates disjointness between
0924: * two classes.</p>
0925: * @param tell The XML element representing the tell verb we will attach the
0926: * translations to.
0927: */
0928: protected void translateClassDisjointAxioms(Element tell) {
0929: StmtIterator i = m_sourceData.listStatements(null,
0930: getOntLanguage().DISJOINT_WITH(), (RDFNode) null);
0931: while (i.hasNext()) {
0932: Statement sc = i.nextStatement();
0933: Element impliesc = addElement(tell, DIGProfile.DISJOINT);
0934: addClassDescription(impliesc, sc.getSubject(), m_sourceData);
0935: addClassDescription(impliesc, sc.getResource(),
0936: m_sourceData);
0937: }
0938: }
0939:
0940: /**
0941: * <p>Translate a given class resource into a DIG concept description, as a child
0942: * of the given expression element</p>
0943: * @param expr The parent expression element
0944: * @param c The concept resource
0945: */
0946: protected void translateClassIdentifier(Element expr, Resource c) {
0947: if (c.equals(getOntLanguage().THING())) {
0948: // this is TOP in DIG
0949: addElement(expr, DIGProfile.TOP);
0950: return;
0951: } else if (c.equals(getOntLanguage().NOTHING())) {
0952: // this is BOTTOM in DIG
0953: addElement(expr, DIGProfile.BOTTOM);
0954: return;
0955: } else {
0956: // a named class is represented as a catom element
0957: Element catom = addElement(expr, DIGProfile.CATOM);
0958: String digConceptName = getNodeID(c.asNode());
0959: catom.setAttribute(DIGProfile.NAME, digConceptName);
0960: }
0961: }
0962:
0963: /**
0964: * <p>Translate a given restriction resource into a DIG concept description, as a child
0965: * of the given expression element</p>
0966: * @param expr The parent expression element
0967: * @param c The restriction concept resource
0968: */
0969: protected void translateRestrictionClass(Element expr, Resource c,
0970: Model source) {
0971: Restriction r = (Restriction) c.as(Restriction.class);
0972:
0973: if (r.isAllValuesFromRestriction()) {
0974: // all values from restriction translates to a DIG <all>R E</all> axiom
0975: Element all = addElement(expr, DIGProfile.ALL);
0976: addNamedElement(all, DIGProfile.RATOM, r.getOnProperty()
0977: .getURI());
0978: addClassDescription(all, r.asAllValuesFromRestriction()
0979: .getAllValuesFrom(), source);
0980: } else if (r.isSomeValuesFromRestriction()) {
0981: // some values from restriction translates to a DIG <some>R E</some> axiom
0982: Element some = addElement(expr, DIGProfile.SOME);
0983: addNamedElement(some, DIGProfile.RATOM, r.getOnProperty()
0984: .getURI());
0985: addClassDescription(some, r.asSomeValuesFromRestriction()
0986: .getSomeValuesFrom(), source);
0987: } else if (r.isHasValueRestriction()) {
0988: // special case
0989: translateHasValueRestriction(expr, r
0990: .asHasValueRestriction());
0991: } else if (r.isMinCardinalityRestriction()) {
0992: // unqualified, so we make the qualification class TOP
0993: translateCardinalityRestriction(expr, r
0994: .asMinCardinalityRestriction().getMinCardinality(),
0995: r, DIGProfile.ATLEAST, getOntLanguage().THING(),
0996: source);
0997: } else if (r.isMaxCardinalityRestriction()) {
0998: // unqualified, so we make the qualification class TOP
0999: translateCardinalityRestriction(expr, r
1000: .asMaxCardinalityRestriction().getMaxCardinality(),
1001: r, DIGProfile.ATMOST, getOntLanguage().THING(),
1002: source);
1003: } else if (r.isCardinalityRestriction()) {
1004: // we model a cardinality restriction as the intersection of min and max resrictions
1005: Element and = addElement(expr, DIGProfile.AND);
1006:
1007: // unqualified, so we make the qualification class TOP
1008: translateCardinalityRestriction(and, r
1009: .asCardinalityRestriction().getCardinality(), r,
1010: DIGProfile.ATMOST, getOntLanguage().THING(), source);
1011: translateCardinalityRestriction(and, r
1012: .asCardinalityRestriction().getCardinality(), r,
1013: DIGProfile.ATLEAST, getOntLanguage().THING(),
1014: source);
1015: }
1016: // TODO qualified cardinality restrictions
1017: }
1018:
1019: /** Translate an enumerated class to an iset element */
1020: protected void translateEnumeratedClass(Element expr, OntClass cls,
1021: Model source) {
1022: // an anonymous enumeration of class expressions
1023: Element iset = addElement(expr, DIGProfile.ISET);
1024: for (Iterator i = cls.asEnumeratedClass().listOneOf(); i
1025: .hasNext();) {
1026: RDFNode n = (RDFNode) i.next();
1027:
1028: if (n instanceof Resource) {
1029: addNamedElement(iset, DIGProfile.INDIVIDUAL,
1030: ((Resource) n).getURI());
1031: } else {
1032: LogFactory.getLog(getClass()).warn(
1033: "DIG language cannot yet represent enumerations of concrete literals: "
1034: + ((Literal) n).getLexicalForm());
1035: //translateLiteral( (Literal) n, iset );
1036: }
1037: }
1038: }
1039:
1040: /** Translate a complement class to a not element */
1041: protected void translateComplementClass(Element expr, OntClass cls,
1042: Model source) {
1043: // an anonymous complement of another class expression
1044: Element not = addElement(expr, DIGProfile.NOT);
1045: addClassDescription(not, cls.asComplementClass().getOperand(),
1046: source);
1047: }
1048:
1049: /** Translate an intersection class to an and element */
1050: protected void translateIntersectionClass(Element expr,
1051: OntClass cls, Model source) {
1052: // an anonymous intersection of class expressions
1053: Element or = addElement(expr, DIGProfile.AND);
1054: translateClassList(or, cls.asIntersectionClass().getOperands(),
1055: source);
1056: }
1057:
1058: /** Translate an union class to an or element */
1059: protected void translateUnionClass(Element expr, OntClass cls,
1060: Model source) {
1061: // an anonymous intersection of class expressions
1062: Element or = addElement(expr, DIGProfile.OR);
1063: translateClassList(or, cls.asUnionClass().getOperands(), source);
1064: }
1065:
1066: /**
1067: * <p>Translate a cardinality restriction, with qualification</p>
1068: * @param parent The parent element
1069: * @param card The cardinality value
1070: * @param r The restriction we are translating
1071: * @param exprName The restriction type (e.g. mincardinality)
1072: * @param qualType The qualification class
1073: */
1074: private void translateCardinalityRestriction(Element parent,
1075: int card, Restriction r, String exprName,
1076: Resource qualType, Model source) {
1077: Element restrict = addElement(parent, exprName);
1078: restrict.setAttribute(DIGProfile.NUM, Integer.toString(card));
1079: addNamedElement(restrict, DIGProfile.RATOM, r.getOnProperty()
1080: .getURI());
1081: addClassDescription(restrict, qualType, source);
1082: }
1083:
1084: /**
1085: * <p>Translate a has value restriction to DIG form. This is slightly tricky, because there is no
1086: * direct translation in the DIG concept language. We translate a has value restriction with an
1087: * individual value to a existential restriction of the singleton concept. We translate a has
1088: * value restriction with a datatype value either to an exists restriction on an integer
1089: * equality or a string equality, depending on the value.</p>
1090: * @param expr The parent expression node
1091: * @param r The has value restriction to translate
1092: */
1093: protected void translateHasValueRestriction(Element expr,
1094: HasValueRestriction r) {
1095: RDFNode value = r.getHasValue();
1096: Property p = r.getOnProperty();
1097:
1098: // we must chose whether to use the concrete domain construction or the individual domain
1099: if (value instanceof Literal) {
1100: // int or string domain?
1101: Literal lit = (Literal) value;
1102: boolean intDomain = isIntegerType(lit.getDatatype());
1103:
1104: // encode as <intequals val="x"> or <stringequals val="x">
1105: Element eq = addElement(expr,
1106: (intDomain ? DIGProfile.INTEQUALS
1107: : DIGProfile.STRINGEQUALS));
1108: eq.setAttribute(DIGProfile.VAL, lit.getLexicalForm());
1109:
1110: addNamedElement(eq, DIGProfile.ATTRIBUTE, p.getURI());
1111: } else {
1112: // we model hasValue as an existential restriction on a very small set of possible values!
1113: Element some = addElement(expr, DIGProfile.SOME);
1114: addNamedElement(some, DIGProfile.RATOM, p.getURI());
1115:
1116: // we want the set of one individual
1117: Element iset = addElement(some, DIGProfile.ISET);
1118: addNamedElement(iset, DIGProfile.INDIVIDUAL,
1119: ((Resource) value).getURI());
1120: }
1121: }
1122:
1123: /**
1124: * <p>Translate a list of class descriptions into DIG concept descriptions
1125: */
1126: protected void translateClassList(Element expr, RDFList operands,
1127: Model source) {
1128: for (Iterator i = operands.iterator(); i.hasNext();) {
1129: addClassDescription(expr, (Resource) i.next(), source);
1130: }
1131: }
1132:
1133: /** Translate the individuals in the KB to DIG form */
1134: protected void translateIndividuals(Element expr) {
1135: for (Iterator i = m_sourceData.listIndividuals(); i.hasNext();) {
1136: translateIndividual(expr, (Resource) i.next());
1137: }
1138: }
1139:
1140: /** Translate the various axioms pertaining to an individual */
1141: protected void translateIndividual(Element expr, Resource r) {
1142: Individual ind = (Individual) r.as(Individual.class);
1143: translateInstanceTypes(expr, ind);
1144:
1145: for (StmtIterator i = ind.listProperties(); i.hasNext();) {
1146: Statement s = i.nextStatement();
1147: OntProperty p = (OntProperty) s.getPredicate().as(
1148: OntProperty.class);
1149:
1150: if (p.equals(getOntLanguage().DIFFERENT_FROM())) {
1151: translateDifferentIndividuals(expr, ind, (Individual) s
1152: .getResource().as(Individual.class));
1153: } else if (p.equals(getOntLanguage().SAME_AS())) {
1154: translateSameIndividuals(expr, ind, (Individual) s
1155: .getResource().as(Individual.class));
1156: } else if (p.isObjectProperty() || p.isTransitiveProperty()
1157: || p.isSymmetricProperty()
1158: || p.isInverseFunctionalProperty()) {
1159: translateInstanceRole(expr, ind, p, (Individual) s
1160: .getResource().as(Individual.class));
1161: } else if (p.isDatatypeProperty()) {
1162: translateInstanceAttrib(expr, ind, p, s.getLiteral());
1163: }
1164: }
1165: }
1166:
1167: /** The rdf:type of each individual becomes a DIG instanceof element */
1168: protected void translateInstanceTypes(Element expr, Individual ind) {
1169: for (Iterator i = ind.listRDFTypes(true); i.hasNext();) {
1170: Resource type = (Resource) i.next();
1171: Element inst = addElement(expr, DIGProfile.INSTANCEOF);
1172: addNamedElement(inst, DIGProfile.INDIVIDUAL,
1173: getResourceID(ind));
1174: addClassDescription(inst, (OntClass) type
1175: .as(OntClass.class), m_sourceData);
1176: }
1177: }
1178:
1179: /** Translate an object property into a DIG related element */
1180: protected void translateInstanceRole(Element expr, Individual ind,
1181: OntProperty p, Individual obj) {
1182: Element related = addElement(expr, DIGProfile.RELATED);
1183: addNamedElement(related, DIGProfile.INDIVIDUAL,
1184: getResourceID(ind));
1185: addNamedElement(related, DIGProfile.RATOM, p.getURI());
1186: addNamedElement(related, DIGProfile.INDIVIDUAL,
1187: getResourceID(obj));
1188: }
1189:
1190: /** Translate a datatype property into a DIG value element */
1191: protected void translateInstanceAttrib(Element expr,
1192: Individual ind, OntProperty p, Literal obj) {
1193: Element related = addElement(expr, DIGProfile.VALUE);
1194: addNamedElement(related, DIGProfile.INDIVIDUAL,
1195: getResourceID(ind));
1196: addNamedElement(related, DIGProfile.ATTRIBUTE, p.getURI());
1197:
1198: translateLiteral(obj, related);
1199: }
1200:
1201: /** Translate an RDF literal to an IVAL or SVAL element */
1202: protected void translateLiteral(Literal lit, Element parent) {
1203: if (isIntegerType(lit.getDatatype())) {
1204: Element ival = addElement(parent, DIGProfile.IVAL);
1205: ival.appendChild(parent.getOwnerDocument().createTextNode(
1206: lit.getLexicalForm()));
1207: } else {
1208: Element sval = addElement(parent, DIGProfile.SVAL);
1209: sval.appendChild(parent.getOwnerDocument().createTextNode(
1210: lit.getLexicalForm()));
1211: }
1212: }
1213:
1214: /** Translate differentFrom(i0, i1) we assert disjoint( iset(i0), iset(i1) ) */
1215: protected void translateDifferentIndividuals(Element expr,
1216: Individual ind, Individual other) {
1217: Element disjoint = addElement(expr, DIGProfile.DISJOINT);
1218: Element iset0 = addElement(disjoint, DIGProfile.ISET);
1219: addNamedElement(iset0, DIGProfile.INDIVIDUAL,
1220: getResourceID(ind));
1221: Element iset1 = addElement(disjoint, DIGProfile.ISET);
1222: addNamedElement(iset1, DIGProfile.INDIVIDUAL,
1223: getResourceID(other));
1224: }
1225:
1226: /** Translate sameAs(i0, i1) we assert equalc( iset(i0), iset(i1) ) */
1227: protected void translateSameIndividuals(Element expr,
1228: Individual ind, Individual other) {
1229: Element disjoint = addElement(expr, DIGProfile.EQUALC);
1230: Element iset0 = addElement(disjoint, DIGProfile.ISET);
1231: addNamedElement(iset0, DIGProfile.INDIVIDUAL,
1232: getResourceID(ind));
1233: Element iset1 = addElement(disjoint, DIGProfile.ISET);
1234: addNamedElement(iset1, DIGProfile.INDIVIDUAL,
1235: getResourceID(other));
1236: }
1237:
1238: /** Translate all of the roles (ObjectProperties) in the KB */
1239: protected void translateRoles(Element expr) {
1240: Set roles = new HashSet();
1241: collectRoleProperties(roles);
1242:
1243: for (Iterator i = roles.iterator(); i.hasNext();) {
1244: translateRole(expr, (ObjectProperty) ((Property) i.next())
1245: .as(ObjectProperty.class), m_sourceData);
1246: }
1247: }
1248:
1249: /** Translate the various axioms that can apply to roles */
1250: protected void translateRole(Element expr, ObjectProperty role,
1251: Model source) {
1252: translateBinaryPropertyAxioms(expr, role.getURI(),
1253: DIGProfile.IMPLIESR, role.listSuperProperties(),
1254: DIGProfile.RATOM);
1255: translateBinaryPropertyAxioms(expr, role.getURI(),
1256: DIGProfile.EQUALR, role.listEquivalentProperties(),
1257: DIGProfile.RATOM);
1258: translateDomainRangeAxioms(expr, role.getURI(),
1259: DIGProfile.DOMAIN, role.listDomain(), DIGProfile.RATOM,
1260: source);
1261: translateDomainRangeAxioms(expr, role.getURI(),
1262: DIGProfile.RANGE, role.listRange(), DIGProfile.RATOM,
1263: source);
1264: translateInverseAxioms(expr, role, DIGProfile.RATOM);
1265:
1266: if (role.isTransitiveProperty()) {
1267: translateUnaryPropertyAxiom(expr, role.getURI(),
1268: DIGProfile.TRANSITIVE, DIGProfile.RATOM);
1269: }
1270: if (role.isFunctionalProperty()) {
1271: translateUnaryPropertyAxiom(expr, role.getURI(),
1272: DIGProfile.FUNCTIONAL, DIGProfile.RATOM);
1273: }
1274: if (role.isInverseFunctionalProperty()) {
1275: translateInverseFunctionalAxiom(expr, role,
1276: DIGProfile.RATOM);
1277: }
1278: if (role.isSymmetricProperty()) {
1279: translateInverseAxiom(expr, role, DIGProfile.RATOM, role);
1280: }
1281: }
1282:
1283: /** Translate all of the attribute (datatype properties) in the KB */
1284: protected void translateAttributes(Element expr) {
1285: for (Iterator i = m_sourceData.listDatatypeProperties(); i
1286: .hasNext();) {
1287: translateAttribute(expr, (DatatypeProperty) ((Property) i
1288: .next()).as(DatatypeProperty.class), m_sourceData);
1289: }
1290: }
1291:
1292: /** Attributes (datatype properties) have fewer axiom choices than roles */
1293: protected void translateAttribute(Element expr,
1294: DatatypeProperty attrib, Model source) {
1295: translateBinaryPropertyAxioms(expr, attrib.getURI(),
1296: DIGProfile.IMPLIESR, attrib.listSuperProperties(),
1297: DIGProfile.ATTRIBUTE);
1298: translateBinaryPropertyAxioms(expr, attrib.getURI(),
1299: DIGProfile.EQUALR, attrib.listEquivalentProperties(),
1300: DIGProfile.ATTRIBUTE);
1301: translateDomainRangeAxioms(expr, attrib.getURI(),
1302: DIGProfile.DOMAIN, attrib.listDomain(),
1303: DIGProfile.ATTRIBUTE, source);
1304: translateAttribRangeAxioms(expr, attrib.getURI(), attrib
1305: .listRange(), DIGProfile.ATTRIBUTE);
1306:
1307: if (attrib.isFunctionalProperty()) {
1308: translateUnaryPropertyAxiom(expr, attrib.getURI(),
1309: DIGProfile.FUNCTIONAL, DIGProfile.ATTRIBUTE);
1310: }
1311: }
1312:
1313: /** Helper method for binary axioms each argument of which is an ratom element */
1314: protected void translateBinaryPropertyAxioms(Element expr,
1315: String propURI, String axiomType, Iterator i,
1316: String propType) {
1317: while (i.hasNext()) {
1318: Property prop = (Property) i.next();
1319: Element binaryAxiom = addElement(expr, axiomType);
1320: addNamedElement(binaryAxiom, propType, propURI);
1321: addNamedElement(binaryAxiom, propType, prop.getURI());
1322: }
1323: }
1324:
1325: /** Helper method for unary axioms, the argument of which is an ratom element */
1326: protected void translateUnaryPropertyAxiom(Element expr,
1327: String propURI, String axiomType, String propType) {
1328: Element unaryAxiom = addElement(expr, axiomType);
1329: addNamedElement(unaryAxiom, propType, propURI);
1330: }
1331:
1332: /** Domain and range are translated as dig domain and range elements */
1333: protected void translateDomainRangeAxioms(Element expr,
1334: String propURI, String axiomType, Iterator i,
1335: String propType, Model source) {
1336: while (i.hasNext()) {
1337: Element drAxiom = addElement(expr, axiomType);
1338: addNamedElement(drAxiom, propType, propURI);
1339: addClassDescription(drAxiom, (Resource) i.next(), source);
1340: }
1341: }
1342:
1343: /** Concrete ranges have special treatment*/
1344: protected void translateAttribRangeAxioms(Element expr,
1345: String propURI, Iterator i, String propType) {
1346: while (i.hasNext()) {
1347: Resource type = (Resource) i.next();
1348: RDFDatatype dt = TypeMapper.getInstance().getTypeByName(
1349: type.getURI());
1350:
1351: Element drAxiom = addElement(expr,
1352: isIntegerType(dt) ? DIGProfile.RANGEINT
1353: : DIGProfile.RANGESTRING);
1354: addNamedElement(drAxiom, propType, propURI);
1355: }
1356: }
1357:
1358: /** Axioms for all of the inverses of a property */
1359: protected void translateInverseAxioms(Element expr,
1360: ObjectProperty p, String propType) {
1361: for (Iterator i = p.listInverse(); i.hasNext();) {
1362: translateInverseAxiom(expr, p, propType, (Property) i
1363: .next());
1364: }
1365: }
1366:
1367: /** Translate inverseOf as equality between the role and the inverse of the named inverse role */
1368: protected void translateInverseAxiom(Element expr, Property p,
1369: String propType, Property inv) {
1370: Element equalr = addElement(expr, DIGProfile.EQUALR);
1371: addNamedElement(equalr, propType, p.getURI());
1372: Element inverse = addElement(equalr, DIGProfile.INVERSE);
1373: addNamedElement(inverse, propType, inv.getURI());
1374: }
1375:
1376: /** To translate an inverse functional property, we must introduce a new skolem constant for the inverse role */
1377: protected void translateInverseFunctionalAxiom(Element expr,
1378: ObjectProperty role, String propType) {
1379: // we need a skolem name for the inverse property
1380: String skolemName = getSkolemName(role.getLocalName());
1381:
1382: // first we make the skolem role functional
1383: addNamedElement(expr, DIGProfile.DEFROLE, skolemName);
1384: Element functional = addElement(expr, DIGProfile.FUNCTIONAL);
1385: addNamedElement(functional, propType, skolemName);
1386:
1387: // then we make its inverse equal to role
1388: Element equalr = addElement(expr, DIGProfile.EQUALR);
1389: addNamedElement(equalr, propType, role.getURI());
1390: Element inverse = addElement(equalr, DIGProfile.INVERSE);
1391: addNamedElement(inverse, propType, skolemName);
1392: }
1393:
1394: /** Translate all of the AllDifferent axioms in the KB */
1395: protected void translateAllDifferentAxioms(Element expr) {
1396: if (m_sourceData.getProfile().ALL_DIFFERENT() != null) {
1397: for (Iterator i = m_sourceData.listAllDifferent(); i
1398: .hasNext();) {
1399: AllDifferent ad = (AllDifferent) ((Resource) i.next())
1400: .as(AllDifferent.class);
1401: translateAllDifferent(expr, ad.getDistinctMembers());
1402: }
1403: }
1404: }
1405:
1406: /** Translate a single AllDifferent declaration as a set of pair-wise disjoints */
1407: protected void translateAllDifferent(Element expr,
1408: RDFList diffMembers) {
1409: List dm = diffMembers.asJavaList();
1410:
1411: for (int i = 0; i < dm.size(); i++) {
1412: Individual ind0 = (Individual) ((Resource) dm.get(i))
1413: .as(Individual.class);
1414:
1415: for (int j = i + 1; j < dm.size(); j++) {
1416: Individual ind1 = (Individual) ((Resource) dm.get(j))
1417: .as(Individual.class);
1418: translateDifferentIndividuals(expr, ind0, ind1);
1419: }
1420: }
1421: }
1422:
1423: /**
1424: * <p>Answer true if the given RDF datatype represents an integer value</p>
1425: */
1426: private boolean isIntegerType(RDFDatatype type) {
1427: String typeURI = (type != null) ? type.getURI() : null;
1428: return typeURI != null && XSD_INT_TYPES.contains(typeURI);
1429: }
1430:
1431: /** Answer a skolem constant, using the given name as a root */
1432: private String getSkolemName(String root) {
1433: return "skolem(" + root + "," + m_skolemCounter++ + ")";
1434:
1435: }
1436:
1437: /**
1438: * <p>Answer an iterator of the individual names known to the DIG reasoner, from the cache if possible.</p>
1439: * @return An iterator of the known individual names
1440: */
1441: protected Set getKnownIndividuals() {
1442: if (!m_indNamesAsked) {
1443: m_indNames.addAll(collectNamedTerms(
1444: DIGProfile.ALL_INDIVIDUALS, new String[] {
1445: DIGProfile.INDIVIDUAL_SET,
1446: DIGProfile.INDIVIDUAL }));
1447: m_indNamesAsked = true;
1448: }
1449:
1450: return m_indNames;
1451: }
1452:
1453: /**
1454: * <p>Answer an iterator of the concept names known to the DIG reasoner, from the cache if possible.</p>
1455: * @return An iterator of the known concept names
1456: */
1457: protected Set getKnownConcepts() {
1458: if (!m_conceptNamesAsked) {
1459: m_conceptNames.addAll(collectNamedTerms(
1460: DIGProfile.ALL_CONCEPT_NAMES, new String[] {
1461: DIGProfile.CONCEPT_SET,
1462: DIGProfile.SYNONYMS, DIGProfile.CATOM }));
1463: m_conceptNamesAsked = true;
1464: }
1465:
1466: return m_conceptNames;
1467: }
1468:
1469: /**
1470: * <p>Answer an iterator of the role names known to the DIG reasoner, from the cache if possible.</p>
1471: * @return An iterator of the known role names
1472: */
1473: protected Set getKnownRoles() {
1474: if (!m_roleNamesAsked) {
1475: m_roleNames.addAll(collectNamedTerms(
1476: DIGProfile.ALL_ROLE_NAMES, new String[] {
1477: DIGProfile.ROLE_SET, DIGProfile.SYNONYMS,
1478: DIGProfile.RATOM }));
1479: m_roleNamesAsked = true;
1480: }
1481:
1482: return m_roleNames;
1483: }
1484:
1485: /**
1486: * <p>Answer an iterator of named terms known to the DIG reasoner, from the cache if possible.</p>
1487: * @param queryType The query verb for the ask
1488: * @param path A list of element names to extract the term names from the returned document
1489: * @return An iterator of the known names of a particular type
1490: */
1491: protected Set collectNamedTerms(String queryType, String[] path) {
1492: Set names = new HashSet();
1493:
1494: // query the DIG ks for the currently known individuals
1495: Document query = getConnection().createDigVerb(DIGProfile.ASKS,
1496: getProfile());
1497: createQueryElement(query, queryType);
1498: Document response = getConnection().sendDigVerb(query,
1499: getProfile());
1500:
1501: // build the path to extract the names
1502: SimpleXMLPath p = new SimpleXMLPath(true);
1503: for (int j = 0; j < path.length; j++) {
1504: p.appendElementPath(path[j]);
1505: }
1506: p.appendAttrPath(DIGProfile.NAME);
1507:
1508: // collect them into a cached set
1509: addAll(p.getAll(response), names);
1510:
1511: return names;
1512: }
1513:
1514: /** Check whether the given node represents a class in the premises */
1515: private boolean isPremisesClass(com.hp.hpl.jena.graph.Node node,
1516: Model premises) {
1517: RDFNode rdfNode = premises.getRDFNode(node);
1518: Profile oProf = getOntLanguage();
1519:
1520: if (rdfNode instanceof Resource) {
1521: Resource r = (Resource) rdfNode;
1522: Resource any = null;
1523:
1524: return ((oProf.CLASS() != null) && premises.contains(r,
1525: RDF.type, oProf.CLASS()))
1526: || ((oProf.RESTRICTION() != null) && premises
1527: .contains(r, RDF.type, oProf.RESTRICTION()))
1528: || ((oProf.SUB_CLASS_OF() != null) && premises
1529: .contains(r, oProf.SUB_CLASS_OF(), any))
1530: || ((oProf.SUB_CLASS_OF() != null) && premises
1531: .contains(any, oProf.SUB_CLASS_OF(), r))
1532: || ((oProf.UNION_OF() != null) && premises
1533: .contains(r, oProf.SUB_CLASS_OF(), any))
1534: || ((oProf.INTERSECTION_OF() != null) && premises
1535: .contains(r, oProf.SUB_CLASS_OF(), any))
1536: || ((oProf.COMPLEMENT_OF() != null) && premises
1537: .contains(r, oProf.SUB_CLASS_OF(), any))
1538: || ((oProf.DISJOINT_WITH() != null) && premises
1539: .contains(r, oProf.DISJOINT_WITH(), any))
1540: || ((oProf.EQUIVALENT_CLASS() != null) && premises
1541: .contains(r, oProf.EQUIVALENT_CLASS(), any));
1542: }
1543:
1544: // by default it is not a class
1545: return false;
1546: }
1547:
1548: /** Check whether the given node represents a class in the premises */
1549: private boolean isPremisesRole(com.hp.hpl.jena.graph.Node node,
1550: Model premises) {
1551: RDFNode rdfNode = premises.getRDFNode(node);
1552: Profile oProf = getOntLanguage();
1553:
1554: if (rdfNode instanceof Resource) {
1555: Resource r = (Resource) rdfNode;
1556: Resource any = null;
1557:
1558: return ((oProf.PROPERTY() != null) && premises.contains(r,
1559: RDF.type, oProf.PROPERTY()))
1560: || ((oProf.OBJECT_PROPERTY() != null) && premises
1561: .contains(r, RDF.type, oProf
1562: .OBJECT_PROPERTY()))
1563: || ((oProf.DATATYPE_PROPERTY() != null) && premises
1564: .contains(r, RDF.type, oProf
1565: .DATATYPE_PROPERTY()))
1566: || ((oProf.TRANSITIVE_PROPERTY() != null) && premises
1567: .contains(r, RDF.type, oProf
1568: .TRANSITIVE_PROPERTY()))
1569: || ((oProf.FUNCTIONAL_PROPERTY() != null) && premises
1570: .contains(r, RDF.type, oProf
1571: .FUNCTIONAL_PROPERTY()))
1572: || ((oProf.INVERSE_FUNCTIONAL_PROPERTY() != null) && premises
1573: .contains(r, RDF.type, oProf
1574: .INVERSE_FUNCTIONAL_PROPERTY()))
1575: || ((oProf.SYMMETRIC_PROPERTY() != null) && premises
1576: .contains(r, RDF.type, oProf
1577: .SYMMETRIC_PROPERTY()))
1578: || ((oProf.SUB_PROPERTY_OF() != null) && premises
1579: .contains(r, oProf.SUB_PROPERTY_OF(), any))
1580: || ((oProf.SUB_PROPERTY_OF() != null) && premises
1581: .contains(any, oProf.SUB_PROPERTY_OF(), r))
1582: || ((oProf.INVERSE_OF() != null) && premises
1583: .contains(r, oProf.INVERSE_OF(), any))
1584: || ((oProf.INVERSE_OF() != null) && premises
1585: .contains(any, oProf.INVERSE_OF(), r));
1586: }
1587:
1588: // by default it is not a class
1589: return false;
1590: }
1591:
1592: /** Normalise any variables to Node.ANY */
1593: private com.hp.hpl.jena.graph.Node normaliseNode(
1594: com.hp.hpl.jena.graph.Node n) {
1595: return n.isConcrete() ? n : com.hp.hpl.jena.graph.Node.ANY;
1596: }
1597:
1598: //==============================================================================
1599: // Inner class definitions
1600: //==============================================================================
1601:
1602: /** Encapsulates the identification information from a DIG reasoner */
1603: private class DigIdentifierImpl implements DIGIdentifier {
1604: private Document m_id;
1605:
1606: private DigIdentifierImpl(Document id) {
1607: m_id = id;
1608: }
1609:
1610: public String getName() {
1611: return m_id.getDocumentElement().getAttribute(
1612: DIGProfile.NAME);
1613: }
1614:
1615: public String getVersion() {
1616: return m_id.getDocumentElement().getAttribute(
1617: DIGProfile.VERSION);
1618: }
1619:
1620: public String getMessage() {
1621: return m_id.getDocumentElement().getAttribute(
1622: DIGProfile.MESSAGE);
1623: }
1624:
1625: public Iterator supportsLanguage() {
1626: return supports(DIGProfile.LANGUAGE);
1627: }
1628:
1629: public Iterator supportsTell() {
1630: return supports(DIGProfile.TELL);
1631: }
1632:
1633: public Iterator supportsAsk() {
1634: return supports(DIGProfile.ASK);
1635: }
1636:
1637: private Iterator supports(String support) {
1638: Element supports = getChild(m_id.getDocumentElement(),
1639: DIGProfile.SUPPORTS);
1640: return childElementNames(getChild(supports, support));
1641: }
1642:
1643: /** Answer an iterator of the child node names for a given node */
1644: private Iterator childElementNames(Element node) {
1645: ArrayList l = new ArrayList();
1646: NodeList nl = node.getChildNodes();
1647:
1648: for (int i = 0; i < nl.getLength(); i++) {
1649: org.w3c.dom.Node n = nl.item(i);
1650:
1651: // ignore whitespace text etc
1652: if (n instanceof Element) {
1653: l.add(n.getNodeName());
1654: }
1655: }
1656:
1657: return l.iterator();
1658: }
1659:
1660: /** Answer the first named child node */
1661: private Element getChild(Element node, String name) {
1662: return (Element) node.getElementsByTagName(name).item(0);
1663: }
1664: }
1665: }
1666:
1667: /*
1668: * (c) Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Hewlett-Packard Development Company, LP
1669: * All rights reserved.
1670: *
1671: * Redistribution and use in source and binary forms, with or without
1672: * modification, are permitted provided that the following conditions
1673: * are met:
1674: * 1. Redistributions of source code must retain the above copyright
1675: * notice, this list of conditions and the following disclaimer.
1676: * 2. Redistributions in binary form must reproduce the above copyright
1677: * notice, this list of conditions and the following disclaimer in the
1678: * documentation and/or other materials provided with the distribution.
1679: * 3. The name of the author may not be used to endorse or promote products
1680: * derived from this software without specific prior written permission.
1681: *
1682: * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
1683: * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
1684: * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
1685: * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
1686: * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
1687: * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
1688: * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
1689: * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
1690: * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
1691: * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
1692: */
|