Skip to content
Snippets Groups Projects
Commit 343abb32 authored by William CHARLES's avatar William CHARLES
Browse files

Initial commit

parents
No related branches found
No related tags found
No related merge requests found
Showing
with 863 additions and 0 deletions
import os
from SPARQLWrapper import SPARQLWrapper, JSON
import itertools
from rdflib import Graph
import jpype
import jpype.imports
from threading import Thread
sparqlQuery="http://c2200024:7200/repositories/dhfc"
sparqlUpdate="http://localhost:7200/repositories/dhfc/statements"
baseNameSpace="https://cluedo4kg.irit.fr/repositories/sparqluedo"
dhfc="https://w3id.org/DHFC#"
classpath = []
for jar in os.listdir("lib"):
classpath.append(r"lib\\"+jar)
jpype.startJVM(classpath=classpath)
from java.util import ArrayList
from java.io import ByteArrayInputStream
from org.semanticweb.owlapi.rdf.model import RDFTranslator
from org.semanticweb.owlapi.apibinding import OWLManager
from java.util.concurrent.atomic import AtomicInteger
from java.util import IdentityHashMap, HashSet
from org.semanticweb.owlapi.util import AlwaysOutputId
from org.semanticweb.owlapi.formats import TurtleDocumentFormat
from com.clarkparsia.pellet.owlapiv3 import PelletReasonerFactory
from org.semanticweb.owlapi.reasoner import InferenceType
from com.clarkparsia.owlapi.explanation import PelletExplanation
from org.semanticweb.owlapi.util import InferredPropertyAssertionGenerator, InferredClassAssertionAxiomGenerator, InferredOntologyGenerator
def updateSPARQL(endpoint, query, step=0) :
print(query)
sparql = SPARQLWrapper(endpoint)
sparql.setMethod("POST")
sparql.setQuery(query)
try :
ret = sparql.query()
ret.response.read()
except :
sp=query.split("GRAPH ")
qr=query.split("{")
q1=""
for x in range(len(sp)//2) :
if x>0 :
q1+="GRAPH "
q1+=sp[x]
q1+="}"
q2=qr[0]+"{"
for x in range(len(sp)//2, len(sp)) :
q2+="GRAPH "
q2+=sp[x]
if step<5 :
updateSPARQL(endpoint, q1, step=step+1)
updateSPARQL(endpoint, q2, step=step+1)
else :
print("Step: "+str(step))
raise
class ReturnableThread(Thread):
# This class is a subclass of Thread that allows the thread to return a value.
def __init__(self, target):
Thread.__init__(self)
self.target = target
self.result = None
def run(self) -> None:
self.result = self.target()
def querySPARQL(endpoint, query) :
print(endpoint)
print(query)
sparql = SPARQLWrapper(endpoint)
sparql.setReturnFormat(JSON)
sparql.setQuery(query)
ret = sparql.queryAndConvert()
return ret["results"]["bindings"]
def toList(dic) :
l=[]
for e in dic :
f=[]
for el in e.values() :
f.append(el["value"])
l.append(f)
return l
def interpretationCount(sourceName) :
query="PREFIX dhfc: <"+dhfc+"> SELECT (COUNT(?i) AS ?count) {<"+baseNameSpace+"/"+sourceName+"> dhfc:hasInterpretation ?i} "
ret=querySPARQL(sparqlQuery, query)
for e in ret :
for el in e.values() :
return int(el["value"])
def assertionCount(sourceName, author) :
query="PREFIX dhfc: <"+dhfc+"> SELECT (COUNT(?o) AS ?count) {?o dhfc:assertedFrom <"+baseNameSpace+"/"+sourceName+">. <"+baseNameSpace+"/"+author+"> dhfc:assertsThrough ?a. ?a dhfc:assertsOver ?o} "
ret=querySPARQL(sparqlQuery, query)
for e in ret :
for el in e.values() :
return int(el["value"])
def blockPrint():
sys.stdout = open(os.devnull, 'w')
def newSource(sourceName) :
query="""PREFIX dhfc: <"""+dhfc+""">INSERT DATA {<"""+baseNameSpace+"/"+sourceName+"""> a dhfc:Source} """
updateSPARQL(sparqlUpdate, query)
def exists(eName) :
sparql = SPARQLWrapper(sparqlQuery)
sparql.setQuery("""
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
ASK WHERE {
<"""+baseNameSpace+"/"+eName+"""> ?p ?o.
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return results["boolean"]
def normalize() :
query="""PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX dhfc: <http://dhfc/>
DELETE {?bNode a rdf:Statement.?bNode rdf:subject ?s.
?bNode rdf:predicate ?p. ?bNode rdf:object ?o.
?bNode ?p2 ?o2.
?s2 ?p3 ?bNode.
?bNode dhfc:inNamedGraph ?l.
}
INSERT {?b a rdf:Statement.?b rdf:subject ?s.
?b rdf:predicate ?p. ?b rdf:object ?o.
?b ?p2 ?o2.
?s2 ?p3 ?b.
?b dhfc:inNamedGraph ?l.
}
WHERE {
{SELECT ?s ?p ?o ?l (SAMPLE(?bNode) AS ?b) WHERE {
?bNode a rdf:Statement.?bNode rdf:subject ?s.
?bNode rdf:predicate ?p. ?bNode rdf:object ?o.
} GROUP BY ?s ?p ?o ?l
}
?bNode a rdf:Statement.?bNode rdf:subject ?s.
?bNode rdf:predicate ?p.
?bNode rdf:object ?o.
OPTIONAL{?bNode dhfc:inNamedGraph ?l}
?bNode ?p2 ?o2.
?s2 ?p3 ?bNode.
}"""
updateSPARQL(sparqlUpdate, query)
def newInterpretation(sourceName) :
if not exists(sourceName) :
newSource(sourceName)
number=interpretationCount(sourceName)
query="""PREFIX dhfc: <"""+dhfc+"""> INSERT DATA {<"""+baseNameSpace+"/"+sourceName+"#interpretation"+str(number)+"""> a dhfc:InterpretationUniverse. <"""+baseNameSpace+"/"+sourceName+"""> dhfc:hasInterpretation <"""+baseNameSpace+"/"+sourceName+"#interpretation"+str(number)+""">} """
updateSPARQL(sparqlUpdate, query)
return baseNameSpace+"/"+sourceName+"#interpretation"+str(number)
def assertionProtocol(sourceName, assertion, triplesSerialization, new, author=None) :
print(triplesSerialization)
query="""PREFIX dhfc: <"""+dhfc+"""> INSERT DATA {
GRAPH <"""+assertion+"""> {"""+triplesSerialization+"""}."""
if new and author!=None :
query+="""<"""+baseNameSpace+"/"+author+"""> dhfc:assertsThrough [dhfc:assertsOver [dhfc:assertedFrom <"""+baseNameSpace+"/"+sourceName+""">; dhfc:assertsOver <"""+baseNameSpace+"/"+sourceName+"#interpretation"+str(assertionCount(sourceName, author))+"_"+author+""">]]"""
query+="""}"""
updateSPARQL(sparqlUpdate, query)
if new :
pattern=checkInterpretations(sourceName, toAdd=[assertion])
else :
pattern={}
interpretationsToCheck=getListInterpretation(assertion)
threads={}
for interp in interpretationsToCheck :
t=ReturnableThread(target = lambda : checkInterpretationConsistency(getListAssertion(interp),toAdd= []))
t.start()
threads[interp]=t
for interp in threads :
threads[interp].join()
pattern[interp]=threads[interp].result
l=[]
for interpretation in pattern :
t=Thread(target=editGraph, args=(pattern, interpretation, sourceName, assertion, new))
t.start()
l.append(t)
for t in l :
t.join()
def editGraph(pattern, interpretation, sourceName, assertion, new) :
if pattern[interpretation] and new :
query="""PREFIX dhfc: <"""+dhfc+""">INSERT DATA {
<"""+interpretation+"""> dhfc:hasAssertion <"""+assertion+""">.
}
"""
updateSPARQL(sparqlUpdate, query)
addInferenceResults(interpretation)
else :
if not new :
query="""PREFIX dhfc: <"""+dhfc+""">DELETE DATA {
<"""+interpretation+""""> dhfc:hasAssertion <"""+assertion+""">.
}"""
updateSPARQL(sparqlUpdate, query)
fragmentInterpretation(sourceName,interpretation, assertion)
def newAssertionSet(sourceName, author, triplesSerialization) :
if reasonerConsistency(triplesSerialization) :
if interpretationCount(sourceName)==0:
newInterpretation(sourceName)
n=assertionCount(sourceName, author)
assertion=baseNameSpace+"/"+sourceName+"#interpretation"+str(n)+"_"+author
assertionProtocol(sourceName, assertion, triplesSerialization, True, author=author)
else :
print("Inconsistent triples not added !")
def editAssertionSet(sourceName, assertion, triplesSerialization) :
if reasonerConsistency(triplesSerialization) :
assertionProtocol(sourceName,assertion,triplesSerialization, False)
def checkInterpretations(sourceName, toAdd=[]) :
query="PREFIX dhfc: <"+dhfc+"> SELECT ?i {<"+baseNameSpace+"/"+sourceName+"> dhfc:hasInterpretation ?i} "
ret=toList(querySPARQL(sparqlQuery, query))
dic={}
threads={}
for e in ret :
t=ReturnableThread(target=lambda : checkInterpretationConsistency(getListAssertion(e[0]),toAdd= [toAdd]))
t.start()
threads[e[0]]=t
for e in threads :
threads[e].join()
dic[e]=threads[e].result
return dic
def getCoreNamedGraph(graphName) :
query="""PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
CONSTRUCT {
?s ?p ?o
}
WHERE {
GRAPH <"""+graphName+"""> {?s ?p ?o.
FILTER(?p!= rdf:subject && ?p!= rdf:predicate && ?p!= rdf:object && ?p != dhfc:hasRDFPart && ?p != dhfc:inNamedGraph)}
FILTER(?o!= rdf:Statement && ?p!= dhfc:owlAxiom)}
}
"""
def flatten(l) :
res=[]
for e in l :
res.append(e)
return res
def ontologyContent():
sparql = SPARQLWrapper(sparqlQuery)
sparql.setQuery("""
PREFIX dhfc: <"""+dhfc+""">
CONSTRUCT {
?s ?p ?o
}
WHERE {
GRAPH ?g {?s ?p ?o}
?g a dhfc:OntologyGraph
}
""")
return sparql.queryAndConvert().serialize()
def getListAssertion(interpretation) :
query="""PREFIX dhfc: <"""+dhfc+"""> SELECT ?a {<"""+interpretation+"""> dhfc:hasAssertion ?a}
"""
return flatten(toList(querySPARQL(sparqlQuery, query)))
def getListInterpretation(assertion) :
query="""PREFIX dhfc: <"""+dhfc+"""> SELECT ?a {?a dhfc:hasAssertion <"""+assertion+"""> }
"""
return flatten(toList(querySPARQL(sparqlQuery, query)))
def checkInterpretationConsistency( assertions, toAdd=[]) :
if assertions!=None :
ret=assertions+toAdd
construct=ontologyContent()
for asert in ret :
sparql = SPARQLWrapper(sparqlQuery)
sparql.setQuery("""
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
CONSTRUCT {
?s ?p ?o
}
WHERE {
GRAPH <"""+asert[0]+"""> {?s ?p ?o.}
}
""")
construct+= str(sparql.queryAndConvert().serialize())
return reasonerConsistency(construct)
else :
return True
def hashEncoding(triple):
return hex(hash(triple[0]))+hex(hash(triple[1]))+hex(hash(triple[2]))
def reification(t, graphLocation=None) :
if len(t)==1 :
triple=reifiedVersion(t[0], graphLocation)
else :
triple=reifiedOWLAxiom(t, graphLocation)
if graphLocation!=None :
triple=("GRAPH <"+graphLocation+"> {"+triple[0]+"}", triple[1])
return triple
def reifiedOWLAxiom(triples, graphLocation=None) :
axiomName=baseNameSpace
for triple in triples :
axiomName+=hashEncoding(triple)
sparql="""<"""+axiomName+"""> a dhfc:owlAxiom. """
for triple in triples :
res=reifiedVersion(triple, graphLocation=graphLocation)
sparql+=res[0]+""" <"""+axiomName+"""> dhfc:hasRDFPart <"""+res[1]+">. "
return (sparql, axiomName)
def reifiedVersion(triple, graphLocation=None):
blankNodeName=baseNameSpace+hashEncoding(triple)
sparql="""<"""+blankNodeName+"""> a rdf:Statement. <"""+blankNodeName+"""> rdf:subject <"""+triple[0]+""">.
<"""+blankNodeName+"""> rdf:predicate <"""+triple[1]+""">.
<"""+blankNodeName+"""> rdf:object <"""+triple[2]+""">. """
if graphLocation!=None :
sparql+= """<"""+blankNodeName+"""> dhfc:inNamedGraph <"""+graphLocation+""">. """
return (sparql,blankNodeName)
def clearInferenceGraph(interpretation) :
print("Suppression")
query="""PREFIX dhfc: <"""+dhfc+"""> SELECT ?g WHERE {<"""+interpretation+"""> dhfc:entails ?g."""
try :
res=toList(querySPARQL(sparqlQuery, query))[0][0]
clearSuppports=""" PREFIX dhfc: <"""+dhfc+"""> DELETE {?s ?p ?o. ?o2 ?p ?s.} WHERE {{?s dhfc:inNamedGraph <"""+res+""">.}UNION {?s dhfc:hasRDFPart ?s1. ?s1 dhfc:inNamedGraph <"""+res+""">.} UNION {?s dhfc:isSupportFor ?s2. ?s2 dhfc:hasRDFPart ?s1. ?s1 dhfc:inNamedGraph <"""+res+""">.} UNION {?s dhfc:isSupportFor ?s2. ?s2 dhfc:inNamedGraph <"""+res+""">.} ?s ?p ?o. ?o2 ?p2 ?s.}"""
updateSPARQL(sparqlUpdate, clearSuppports)
updateSPARQL(sparqlUpdate, """DROP GRAPH <"""+res+""">""")
except :
print("No Inference Graph Found for : "+interpretation)
def addInferenceResults(interpretation) :
# Clear previous inference
clearInferenceGraph(interpretation)
#Collect and map data from the interpretation
locationsDic={}
construct=""
assertions=getListAssertion(interpretation)
for asert in assertions :
sparql = SPARQLWrapper(sparqlQuery)
sparql.setQuery("""
CONSTRUCT {
?s ?p ?o
}
WHERE {
GRAPH <"""+asert[0]+"""> {?s ?p ?o}
}
""")
res = str(sparql.queryAndConvert().serialize())
g=Graph()
g.parse(data=res)
for s, p, o in g :
triple=(str(s), str(p), str(o))
if triple not in locationsDic :
locationsDic[triple]=[]
locationsDic[triple].append(asert)
construct+=res
# Get ontology content :
construct+=ontologyContent()
# Get reasoning results :
inferences=getInferedAxiomWithExplanation(construct)
#Act upon reasoning results
#Edit for correct representation(OWL Axiom)
inferGraph=interpretation+"___"+"inferences"
query="""PREFIX dhfc: <"""+dhfc+"> INSERT DATA {<"""+interpretation+"""> dhfc:entails <"""+inferGraph+">.<"+inferGraph+"> a dhfc:InferenceGraph. GRAPH <"+inferGraph+"> {"
supportQuery= """PREFIX dhfc: <"""+dhfc+"""> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> INSERT DATA {"""
count=0
expCount=0
for infered in inferences :
iTrip=splitTurtle(infered)
for inf in iTrip :
query+="<"+inf[0]+"> <" +inf[1]+"> <"+ inf[2]+">. "
for explanation in inferences[infered] :
# Ajouter le support au KG
reif=reification(iTrip,inferGraph )
supportQuery += reif[0] +"_:explanation"+str(expCount)+" dhfc:isSupportFor <"+reif[1]+">. "
for triples in explanation :
rTrip=splitTurtle(triples)
aT=rTrip[0]
if aT in locationsDic :
loc=locationsDic[aT][0][0]
else :
loc=None
reif=reification(rTrip,loc)
supportQuery+= reif[0]+ "<"+reif[1]+"> dhfc:isPartOfSupport _:explanation"+str(expCount)+". "
count+=1
#TODO Reasonning pattern ?
expCount+=1
supportQuery+="}"
updateSPARQL(sparqlUpdate, supportQuery)
query+="}}"
updateSPARQL(sparqlUpdate, query)
#normalize()
def reasonerConsistency(turtleContent: str) :
input_stream = ByteArrayInputStream(turtleContent.encode())
manager = OWLManager.createOWLOntologyManager()
ontology = manager.loadOntologyFromOntologyDocument(input_stream)
reasoner = PelletReasonerFactory().getInstance().createNonBufferingReasoner(ontology)
manager.addOntologyChangeListener(reasoner)
l=reasoner.isConsistent()
if not l :
expGen = PelletExplanation(reasoner)
explanations=expGen.getInconsistencyExplanation()
#TODO pour plus tard : ajouter un print de ça
return l
def RDFserialization(axiom) :
input_stream = ByteArrayInputStream("".encode())
manager = OWLManager.createOWLOntologyManager()
ontology = manager.loadOntologyFromOntologyDocument(input_stream)
ontology.addAxiom(axiom)
translator=RDFTranslator(manager, ontology, TurtleDocumentFormat(), False, AlwaysOutputId(), AlwaysOutputId(), AtomicInteger(1), IdentityHashMap(), HashSet())
translator.translate(axiom)
triples= ArrayList(translator.getGraph().getAllTriples())
l=""
try :
for el in triples :
l+=str(el)
except Exception as err :
print(err)
return l
def splitTurtle(l) :
g=Graph()
g.parse(data=l)
l=[]
for s, p, o in g :
l.append((str(s), str(p), str(o)))
return l
# Should return dictionnary with structure as example :
# {<Inferred triple> : [[<Explanation1p1>, <Explanation1p2>], [<Explanation2p1>, <Explanation2p2>, <Explanation2p3>]]}
def getInferedAxiomWithExplanation(turtleContent) :
PelletExplanation.setup()
input_stream = ByteArrayInputStream(turtleContent.encode())
man = OWLManager.createOWLOntologyManager()
ont = man.loadOntologyFromOntologyDocument(input_stream)
axioms=ont.getAxioms()
reasoner = PelletReasonerFactory().getInstance().createNonBufferingReasoner(ont)
reasoner.precomputeInferences([InferenceType.CLASS_ASSERTIONS, InferenceType.OBJECT_PROPERTY_ASSERTIONS, InferenceType.DATA_PROPERTY_ASSERTIONS])
gens = ArrayList()
gens.add(InferredPropertyAssertionGenerator())
gens.add(InferredClassAssertionAxiomGenerator())
infOnt = man.createOntology()
iog = InferredOntologyGenerator(reasoner, gens)
iog.fillOntology(man.getOWLDataFactory(), infOnt)
infAxioms = infOnt.getAxioms()
finals=ArrayList()
for el in infAxioms :
if el not in axioms :
finals.add(el)
expGen = PelletExplanation(reasoner)
explanations = {}
for ax in finals :
rdf=RDFserialization(ax)
try :
exp=expGen.getEntailmentExplanations(ax)
explain=[]
for el in exp :
subExp=[]
for axiom in el :
subExp.append(RDFserialization(axiom))
explain.append(subExp)
# exp=expGen.getEntailmentExplanation(ax)
# explain=[]
# subExp=[]
# for axiom in exp :
# subExp.append(RDFserialization(axiom))
# explain.append(subExp)
explanations[rdf]=(explain)
except :
None
return explanations
def fragmentInterpretation(sourceName,interpretation, newAssertion) :
toFragment=getListAssertion(interpretation)
l=[]
fragmentInterpretation_rec(sourceName,toFragment, newAssertion, l)
def fragmentInterpretation_rec(sourceName, assertions, newAssertion, l) :
print(assertions)
if len(assertions)>0 :
frags=list(itertools.combinations(assertions, len(assertions)-1))
thrs=[]
for e in frags :
t=Thread(target=fragment, args=(sourceName, e, newAssertion, l))
t.start()
thrs.append(t)
for t in thrs :
t.join()
def fragment(sourceName, e, newAssertion, l) :
if e not in l :
l.append(e)
if checkInterpretationConsistency(list(e), toAdd=[[newAssertion]]) :
i=newInterpretation(sourceName)
query=query="""PREFIX dhfc: <"""+dhfc+""">INSERT DATA {"""
eliste=list(e)
eliste.append([newAssertion])
for asert in eliste:
query+=""" <"""+i+"""> dhfc:hasAssertion <"""+asert[0]+""">."""
query+="""}"""
updateSPARQL(sparqlUpdate, query)
addInferenceResults(i)
else :
fragmentInterpretation_rec(sourceName, list(e), newAssertion, l)
\ No newline at end of file
import DHFCMonoBuildTools as dhfc
import pygraft as gen
from rdflib import Graph
import json
import time
from random import randint
def clearAndAddOntologyGraph(graph) :
query= """CLEAR ALL"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
query= """ INSERT DATA {<https://example.org/ontologyGraph>
a <http://dhfc/OntologyGraph> .
GRAPH <https://example.org/ontologyGraph> {"""+graph+"""
}
}
"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
def reset() :
clearAndAddOntologyGraph("""<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80>
a <http://www.w3.org/2002/07/owl#Ontology> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#SymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#domain>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> ;
<http://www.w3.org/2000/01/rdf-schema#range>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#oncleDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> ;
<http://www.w3.org/2002/07/owl#propertyChainAxiom>
( <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> ) .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#AsymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person>
a <http://www.w3.org/2002/07/owl#Class> .""")
reset()
def testSimple() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Francois", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
def testDoubleAssertion() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
#testDoubleAssertion()
def simpleTurtle(file) :
g=Graph()
g.parse(file)
content=""
for s, p,o in g :
content+="<"+str(s)+"> <"+str(p)+"> <"+str(o)+">. "
return content
def bigTest(historians, nTriples, nEntities) :
file=open('template.json', "r")
dic=json.load(file)
file.close()
dic["num_triples"]=nTriples
file=open('template.json', "w")
json.dump(dic, file)
file.close()
dic["num_entities"]=nEntities
timeTable=[]
t=time.time()
clearAndAddOntologyGraph(simpleTurtle("output\\template\\schema.rdf"))
if historians==2 :
for e in range(historians) :
content=simpleTurtle("TestData\\test_"+str(nEntities)+"_"+str(n)+"_"+str(1+e))
dhfc.newAssertionSet("source1", "Historien"+str(e),content)
else :
for e in range(historians) :
i=randint(1,9)
j=randint(1,2)
content=simpleTurtle("TestData\\test_"+str(nEntities)+"_"+str(i)+"_"+str(j))
dhfc.newAssertionSet("source1", "Historien"+str(e),content)
newT=time.time()
timeTable.append(newT-t)
t=newT
return timeTable
times=[0,0,0,0,0,0, 0, 0, 0 ,0]
timer=0
for n in range (1,10) :
timer=bigTest(10, n, 50)
for i in range (len(times)):
times[i]+=timer[i]
for i in range (len(times)):
times[i]=times[i]/9
print(times)
import DHFCMonoBuildTools as dhfc
import pygraft as gen
from rdflib import Graph
import json
import time
def clearAndAddOntologyGraph(graph) :
query= """CLEAR ALL"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
query= """ INSERT DATA {<https://example.org/ontologyGraph>
a <http://dhfc/OntologyGraph> .
GRAPH <https://example.org/ontologyGraph> {"""+graph+"""
}
}
"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
def reset() :
clearAndAddOntologyGraph("""<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80>
a <http://www.w3.org/2002/07/owl#Ontology> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#SymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#domain>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> ;
<http://www.w3.org/2000/01/rdf-schema#range>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#oncleDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> ;
<http://www.w3.org/2002/07/owl#propertyChainAxiom>
( <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> ) .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#AsymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person>
a <http://www.w3.org/2002/07/owl#Class> .""")
reset()
def testSimple() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Francois", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
def testDoubleAssertion() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
#testDoubleAssertion()
def simpleTurtle(file) :
g=Graph()
g.parse(file)
content=""
for s, p,o in g :
content+="<"+str(s)+"> <"+str(p)+"> <"+str(o)+">. "
return content
def bigTest(historians, nTriples, nEntities) :
file=open('template.json', "r")
dic=json.load(file)
file.close()
dic["num_triples"]=nTriples
file=open('template.json', "w")
json.dump(dic, file)
file.close()
dic["num_entities"]=nEntities
t=time.time()
clearAndAddOntologyGraph(simpleTurtle("output\\template\\schema.rdf"))
for e in range(historians) :
content=simpleTurtle("TestData\\test_"+str(nEntities)+"_"+str(n)+"_"+str(1+e))
dhfc.newAssertionSet("source1", "Historien"+str(e),content)
return time.time()-t
times=[]
for nEntities in range(10, 100, 10):
timer=0
for n in range (1,10) :
timer+=bigTest(2, n, nEntities)
times.append(timer/9)
print(times)
import DHFCMonoBuildTools as dhfc
import pygraft as gen
from rdflib import Graph
import json
import time
def clearAndAddOntologyGraph(graph) :
query= """CLEAR ALL"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
query= """ INSERT DATA {<https://example.org/ontologyGraph>
a <http://dhfc/OntologyGraph> .
GRAPH <https://example.org/ontologyGraph> {"""+graph+"""
}
}
"""
dhfc.updateSPARQL(dhfc.sparqlUpdate, query)
def reset() :
clearAndAddOntologyGraph("""<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80>
a <http://www.w3.org/2002/07/owl#Ontology> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#SymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#domain>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> ;
<http://www.w3.org/2000/01/rdf-schema#range>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#oncleDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> ;
<http://www.w3.org/2002/07/owl#propertyChainAxiom>
( <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> ) .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe>
a <http://www.w3.org/2002/07/owl#ObjectProperty> , <http://www.w3.org/2002/07/owl#AsymmetricProperty> ;
<http://www.w3.org/2000/01/rdf-schema#subPropertyOf>
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#relationDe> .
<http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#Person>
a <http://www.w3.org/2002/07/owl#Class> .""")
reset()
def testSimple() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Francois", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
def testDoubleAssertion() :
reset()
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/B> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/A>. """)
dhfc.newAssertionSet("source1", "Sebastien", """ <https://example.org/A> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#pereDe> <https://example.org/B>. """)
dhfc.newAssertionSet("source1", "Didier", """ <https://example.org/C> <http://www.semanticweb.org/wcharles/ontologies/2024/9/untitled-ontology-80#frereDe> <https://example.org/A>. """)
#testDoubleAssertion()
def simpleTurtle(file) :
g=Graph()
g.parse(file)
content=""
for s, p,o in g :
content+="<"+str(s)+"> <"+str(p)+"> <"+str(o)+">. "
return content
def generate(nTriples, nEntities) :
file=open('template.json', "r")
dic=json.load(file)
file.close()
dic["num_triples"]=nTriples
file=open('template.json', "w")
json.dump(dic, file)
file.close()
dic["num_entities"]=nEntities
print(str(nTriples)+"_"+str(nEntities))
gen.generate_kg("template.json")
return simpleTurtle("output\\template\\full_graph.rdf")
f=open("log.json")
log=json.load(f)
f.close()
counter=log["number"]
ent=log["nEntities"]
for nEntities in range(100, 1000, 100):
for n in range (10) :
if nEntities>=ent and n>counter :
c=generate(nEntities*4, nEntities)
file=open("TestData\\test_"+str(nEntities)+"_"+str(n)+"_1", "w")
file.write(c)
c=generate(nEntities*4, nEntities)
file=open("TestData\\test_"+str(nEntities)+"_"+str(n)+"_2", "w")
file.write(c)
f=open("log.json", "w")
json.dump({"number" : n, "nEntities" : nEntities}, f)
f.close()
counter=0
\ No newline at end of file
# Before running
Put all dependencies in a folder named `lib` in the same directory as the script :
- aterm-1.6.jar
- caffeine-2.9.3.jar
- commons-io-2.6.jar
- eclipse-rdf4j-3.7.7.jar
- guava-30.1.1-jre.jar
- hppcrt-0.7.5.jar
- javax.inject-1.jar
- jgrapht-jdk1.5-0.7.3.jar
- owlapi-distribution-4.5.29.jar
- pellet-core-2.3.1.jar
- pellet-datatypes-2.2.2.jar
- pellet-el-2.2.2.jar
- pellet-explanation-2.5.2-dllearner.jar
- pellet-modularity-2.3.2-dllearner.jar
- pellet-owlapi-2.5.2-dllearner.jar
- pellet-rules-2.2.2.jar
- slf4j-api-1.7.36.jar
- xz-1.8.jar
(optional)
- slf4j-simple-1.7.36.jar
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment