如何理解reasoner ProgressMonitor输出



我是语义web领域的新手,我正在尝试比较更多的推理器。这是我的代码:

OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
File file = new File(args[0]);
OWLOntology ontology = manager.loadOntologyFromOntologyDocument(file);
Set<OWLClass> classes = ontology.getClassesInSignature();

String inferredFile = args[1];
//test for correctly uploading ontology
OWLDataFactory df = manager.getOWLDataFactory();     
Reasoner jfact = Reasoner.JFACT;
System.out.println(RunReasoner(jfact, df,ontology,manager,inferredFile));

}
//CREATE AN ENUM REASONER
public enum Reasoner{
HERMIT, 
PELLET, 
KONCLUDE,
JFACT,
FACT,
ELK
}   
public static String RunReasoner(Reasoner reasoner, OWLDataFactory df, OWLOntology ontology,                                                         OWLOntologyManager manager, String inferredFile) throws OWLOntologyCreationException, FileNotFoundException, IOException, OWLOntologyStorageException {
String esito = "";
OWLReasoner reasoner_object = null;
if(reasoner == Reasoner.HERMIT) {
/****************HERMIT****************************************************************************************/
OWLReasonerFactory rf = new ReasonerFactory();
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
Configuration configuration = new Configuration();
configuration.reasonerProgressMonitor = progressMonitor;
configuration.ignoreUnsupportedDatatypes = true;
reasoner_object = rf.createReasoner(ontology, configuration);

}
else if(reasoner == Reasoner.KONCLUDE) {
// configure the server end-point
URL url = new URL("http://localhost:8080");
OWLlinkHTTPXMLReasonerFactory factory = new OWLlinkHTTPXMLReasonerFactory();
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
//OWLlinkReasonerConfiguration conf = (OWLlinkReasonerConfiguration) new SimpleConfiguration(progressMonitor);
reasoner_object = factory.createNonBufferingReasoner(ontology);
}
else if(reasoner == Reasoner.JFACT) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
JFactFactory factory = new JFactFactory();          
reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
//      else if(reasoner == Reasoner.FACT) {
//          TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
//          OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
//          FaCTPlusPlusReasonerFactory factory = new FaCTPlusPlusReasonerFactory();
//          reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
//      }
else if(reasoner == Reasoner.ELK) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
ElkReasonerFactory factory = new ElkReasonerFactory();
reasoner_object = factory.createNonBufferingReasoner(ontology,conf);
}
else if(reasoner == Reasoner.PELLET) {
TimedConsoleProgressMonitor progressMonitor = new TimedConsoleProgressMonitor();
OWLReasonerConfiguration conf = new SimpleConfiguration(progressMonitor);
reasoner_object = OpenlletReasonerFactory.getInstance().createReasoner(ontology,conf);          
}
else{
esito = "Reasoner non valido";
}
boolean consistencyCheck = reasoner_object.isConsistent();
if (consistencyCheck) {
reasoner_object.precomputeInferences(InferenceType.CLASS_HIERARCHY,
InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_HIERARCHY,
InferenceType.DATA_PROPERTY_HIERARCHY, InferenceType.OBJECT_PROPERTY_ASSERTIONS);
List<InferredAxiomGenerator<? extends OWLAxiom>> generators = new ArrayList<>();
generators.add(new InferredSubClassAxiomGenerator());
generators.add(new InferredClassAssertionAxiomGenerator());
generators.add(new InferredDataPropertyCharacteristicAxiomGenerator());
generators.add(new InferredEquivalentClassAxiomGenerator());
generators.add(new InferredEquivalentDataPropertiesAxiomGenerator());
generators.add(new InferredEquivalentObjectPropertyAxiomGenerator());
generators.add(new InferredInverseObjectPropertiesAxiomGenerator());
generators.add(new InferredObjectPropertyCharacteristicAxiomGenerator());
// NOTE: InferredPropertyAssertionGenerator significantly slows down
// inference computation
generators.add(new org.semanticweb.owlapi.util.InferredPropertyAssertionGenerator());
generators.add(new InferredSubClassAxiomGenerator());
generators.add(new InferredSubDataPropertyAxiomGenerator());
generators.add(new InferredSubObjectPropertyAxiomGenerator());
List<InferredIndividualAxiomGenerator<? extends OWLIndividualAxiom>> individualAxioms =
new ArrayList<>();
generators.addAll(individualAxioms);
generators.add(new InferredDisjointClassesAxiomGenerator());
InferredOntologyGenerator iog = new InferredOntologyGenerator(reasoner_object, generators); //Generates an ontology based on inferred axioms which are essentially supplied by a reasoner
OWLOntology inferredAxiomsOntology = manager.createOntology();
iog.fillOntology(df, inferredAxiomsOntology);
System.out.println(inferredAxiomsOntology.getAxiomCount());
//                  for(InferredAxiomGenerator<?> i : iog.getAxiomGenerators()) {
//                      System.out.println(i);}
File inferredOntologyFile = new File(inferredFile);
// Now we create a stream since the ontology manager can then write to that stream.
try (OutputStream outputStream = new FileOutputStream(inferredOntologyFile)) {
// We use the same format as for the input ontology.
manager.saveOntology(inferredAxiomsOntology, outputStream);
}
esito = "done "+ reasoner.toString();
reasoner_object.dispose();
} // End if consistencyCheck
else {
esito = reasoner.toString() +" -- Inconsistent input Ontology, Please check the OWL File";
}
return esito;
}     

我的输出是:

Loading ...
busy ...
... finished in 3484.5453
Classifying ...
1%  73
2%  56...

有人能解释一下这是什么意思吗?是否有一些关于进度监视器输出的文档?第二个问题:如何获得推断类/公理的数量?谢谢你的帮助,Rita

该类的Javadoc可在线获取,并作为类源代码的一部分http://owlcs.github.io/owlapi/apidocs_5/org/semanticweb/owlapi/reasoner/TimedConsoleProgressMonitor.html

百分比增量取决于推理机的实现(可能存在不可靠性,因为推理机只对完成的工作量和仍要做的工作量进行有根据的猜测。数字是自上次百分比增量以来经过的毫秒数。

推断出的公理并不都是计算出来的,它们在需要时会被惰性地评估,所以它们不容易计数。如果您希望实现推断,请查找InferedAxiomGeneratorhttp://owlcs.github.io/owlapi/apidocs_5/index.html?org/semanticweb/owlapi/reasoner/package-summary.html了解可用的可能性。

相关内容

  • 没有找到相关文章

最新更新