Commit 92968860 authored by Thodoris Nestoridis's avatar Thodoris Nestoridis

Add Select Ontology from Fuseki + minor changes

parent 930410c0
......@@ -20,6 +20,15 @@
<textarea type="text" class="form-control" id="description" required [(ngModel)]="groupboilerplate.description"
name="description"></textarea>
</div>
<h4>Select Ontology</h4>
<mat-form-field appearance="fill">
<mat-label>Ontology File</mat-label>
<mat-select [(ngModel)]="groupboilerplate.ontology_file" name="file" required>
<mat-option *ngFor="let food of ontologyList" [value]="food[0]">
{{food[0]}}
</mat-option>
</mat-select>
</mat-form-field>
</form>
<button (click)="saveBoilerplate()" class="btn btn-success">Submit</button>
</div>
\ No newline at end of file
......@@ -10,10 +10,12 @@ import { NotifierService } from "angular-notifier";
styleUrls: ['./add-groupboil.component.scss']
})
export class AddGroupboilComponent implements OnInit {
ontologyList=[]
groupboilerplate: GroupBoilerplate = {
title_bgroup: "",
description: "",
ontology_file: "/Default",
};
submitted = false;
private readonly notifier: NotifierService;
......@@ -23,12 +25,15 @@ export class AddGroupboilComponent implements OnInit {
}
ngOnInit(): void {
this.getOntologies();
}
saveBoilerplate(): void {
const data = {
title_bgroup: this.groupboilerplate.title_bgroup,
description: this.groupboilerplate.description ? this.groupboilerplate.description : this.groupboilerplate.title_bgroup,
ontology_file: '../../Ontologies' + this.groupboilerplate.ontology_file ,
};
this.submitted = true;
this.boilerplatelService.creategroupboil(data)
.subscribe(
......@@ -53,4 +58,16 @@ export class AddGroupboilComponent implements OnInit {
};
}
getOntologies(): void {
this.boilerplatelService.getOntolgies()
.subscribe(
data => {
this.ontologyList=data;
console.log(data);
},
error => {
console.log(error);
});
}
}
<div fxLayoutAlign="end">
<h2><span class="badge badge-secondary"> MAIN TEMPLATE : {{this.main}}</span></h2>
<h2 *ngIf="this.color_badge=='danger'" ><span class="badge badge-danger" > MAIN TEMPLATE : {{this.main}}</span></h2>
<h2 *ngIf="this.color_badge=='success'" ><span class="badge badge-success" > MAIN TEMPLATE : {{this.main}}</span></h2>
</div>
<div class="form-row">
<div class="col">
......
......@@ -89,6 +89,7 @@ export class MainDetailsComponent implements OnInit {
};
message = '';
main = '';
color_badge='danger'
constructor(
private mainService: MainService,
......@@ -141,6 +142,7 @@ export class MainDetailsComponent implements OnInit {
}
ngDoCheck() {
this.color_badge='danger'
this.dis();
}
......@@ -173,6 +175,7 @@ export class MainDetailsComponent implements OnInit {
//add the connection data
}
this.main = this.mainsyntax[x].Id
this.color_badge='success'
return 1;
}
}
......
......@@ -20,6 +20,7 @@ export class GroupBoilerplate {
id?: any;
title_bgroup?: string;
description?: string;
ontology_file?: string;
}
export class InferenceResults {
......
......@@ -9,6 +9,7 @@ import { environment } from '../../environments/environment';
const baseUrl = '/boilerplates/';
const inferUrl = '/infer/';
const baseUrlgb = environment.apiroot+'api-auth/groupboilerplates/';
const genericbaseUrl = environment.apiroot+'api-auth';
@Injectable({
providedIn: 'root'
......@@ -70,4 +71,8 @@ export class BoilerplateService {
return this.http.get(`${baseUrlgb}${gb}`+`/search`+`/${instance}`);
}
getOntolgies(): Observable<any> {
return this.http.get(`${genericbaseUrl}`+`/ontologies`+`/`);
}
}
......@@ -95,7 +95,7 @@ SHALL_CHOICES = ( ("shall", "shall"), ("shall not", "shall not"))
#
#VERB_CHOICES = get_verb_instances("LO#Verb")
VERB_CHOICES = get_verb_file_and_instances("LO#Verb")
#VERB_CHOICES = get_verb_file_and_instances("LO#Verb")
#test to add M
CLASS_CHOICES = (("SYSTEM","SYSTEM"), ("FUNCTION","FUNCTION"), ("SHALL","SHALL/SHALL NOT"),
......
......@@ -11,6 +11,8 @@ from reqman.apps.reqtool.rest_api.services.parse_ontologies import *
from pygments.formatters.html import HtmlFormatter
from pygments import highlight
from subprocess import call
#get prefix syntax from the Onotlogy#
#print(get_prefix_new_syntax())
......@@ -83,14 +85,20 @@ def create_infer_result(instance, destination):
def initialize_instances(instance):
try :
g = Graph()
print(instance.ontology_file)
file_name = instance.title_bgroup.replace('/', '')
save_path = "../../Ontologies/"+file_name+"/"
if (os.path.exists("../../Ontologies/"+file_name+"/") == False):
os.mkdir("../../Ontologies/"+file_name)
completeName = os.path.join(save_path, file_name)
g.load(instance.ontology_file, format="turtle")
destination=save_path+file_name+'.ttl'
g.serialize(destination, format='turtle')
if (instance.ontology_file != '../../Ontologies/Default'):
ont = instance.ontology_file.replace('../../Ontologies/', '')
call("./../../Ontologies/s-get http://155.207.131.19:3030/"+ont+" default >> ../../Ontologies/"+file_name+'/'+file_name+'.ttl', shell=True)
destination=save_path+file_name+'.ttl'
else :
g.load('../../Ontologies/2022_AOCS.ttl', format="turtle")
destination=save_path+file_name+'.ttl'
g.serialize(destination, format='turtle')
except:
print('Cannot load Ontology')
classes_inctances = BoilerplateGroupClassesInstances ( classes_instances_owner= instance.boilerplate_owner, classes_instances_group_of_boilerplate = instance,
......@@ -113,6 +121,7 @@ def update_instances(instance):
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/RDO-instances#")
#PREFIX_SYNTAX = get_prefix_syntax(g)
VERB_CHOICES_PREFIX = get_prefix_verbs(g)
VERB_CHOICES = get_verb_file_and_instances("LO#Verb", g)
SYSTEM_CHOICES = get_instances_g("SAO#System",g, list_of_DSO)
FUNCTION_CHOICES = get_instances_g("SAO#Function",g, list_of_DSO)
ITEM_CHOICES = get_instances_g("SAO#Item",g, list_of_DSO)
......@@ -147,7 +156,8 @@ def update_instances(instance):
class BoilerplateGroup(models.Model):
boilerplate_owner = models.ForeignKey(User, related_name='boilerplate_group_owner', on_delete=models.CASCADE)
#add Boilerplate path, so each Boilerplate Group can hace it's own Ontology file
ontology_file = models.CharField(default="../../Ontologies/2022_AOCS.ttl", max_length=100, blank=True)
# ontology_file = models.CharField(default="../../Ontologies/2022_AOCS.ttl", max_length=100, blank=True)
ontology_file = models.CharField(max_length=100, blank=True)
title_bgroup = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=500)
......
......@@ -35,7 +35,7 @@ class BoilerplateGroupSerializer(serializers.ModelSerializer):
boilerplate_owner = serializers.ReadOnlyField(source='owner.username')
class Meta:
model = BoilerplateGroup
fields = ('id', 'boilerplate_owner', 'title_bgroup', 'description')
fields = ('id', 'boilerplate_owner', 'ontology_file', 'title_bgroup', 'description')
class BoilerplateGroupClassesInstancesSerializer(serializers.ModelSerializer):
classes_instances_owner = serializers.ReadOnlyField(source='owner.username')
......
class FusekiActions():
def write(self, data):
print("i am writing form here",data)
\ No newline at end of file
print("i am writing form here",data)
......@@ -14,15 +14,11 @@ list_of_inference_metrics = ["IncompleteRequirement", "AmbiguousRequirement",
info_metric = ["isMissingConcept", "isMissingInstanceOf", "isMissingProducer", "isMissingState", "hasAmbiguousConcept", "hasAmbiguousConcept2",
"hasNoisyConcept", "hasOpaqueConcept", "isInconsistentTo"]
prefix_syntax = get_prefix_syntax(g)
def getclassofprefix(prefix, ontology_file):
return(findclassofprefix(prefix, ontology_file))
main_syntax = get_main_sytax_inference(g)
def getclassofprefix(prefix):
return(findclassofprefix(prefix))
def getclassofmain(main):
return(findclassofmain(main))
def getclassofmain(main, ontology_file):
return(findclassofmain(main, ontology_file))
def getinstancefile(data, g):
datam = data.split(":")
......@@ -257,7 +253,7 @@ def shacl(ontotlogy_file):
print(i+1, 'Cycle of inferencing')
#result = subprocess.check_output(["./reqman/apps/reqtool/rest_api/services/shacl-1.3.2/bin/shaclinfer.sh", "-datafile", target])
try:
result = subprocess.check_output("./reqman/apps/reqtool/rest_api/services/shacl-1.4.2/bin/shaclinfer.sh -datafile "+target1+" | grep -v -e 'WARN OntDocumentManager' -e 'at org.' -e 'org.apache.' -e '@'",shell=True)
result = subprocess.check_output("./reqman/apps/reqtool/rest_api/services/shacl-1.4.2/bin/shaclinfer.sh -datafile "+target1+" | grep -v -e 'WARN OntDocumentManager' -e 'at org.' -e 'org.apache.' -e '@'",shell=True, timeout=360)
except:
raise APIException("Problem during the Inferencing")
if(result==final_result):
......@@ -320,7 +316,7 @@ def shacl(ontotlogy_file):
from rest_framework.exceptions import APIException
'''Find if the Prefix exists in syntax'''
def findclassofprefix(listofprefix):
def findclassofprefix(listofprefix, Ontology_file):
classlist=[]
currentverb = listofprefix['state_or_verb'].rsplit(' : ', 1)
currentstate = listofprefix['state_or_verb'].rsplit(' : ')
......@@ -335,6 +331,9 @@ def findclassofprefix(listofprefix):
if (currentsubjectspl[0] == "System"):
raise APIException("The Subject must be an Item")
#
g = Graph()
g.load(Ontology_file, format="turtle")
prefix_syntax = get_prefix_syntax(g)
for prefix in prefix_syntax:
for inprefix in prefix_syntax[prefix]:
if (currentverb[1] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"]) or (currentstate[0] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"] or currentverb[1] =='is' ):
......@@ -357,7 +356,10 @@ def findclassofprefix(listofprefix):
'''Find if the Main exists in syntax'''
def findclassofmain(listofmain):
def findclassofmain(listofmain, Ontology_file):
g = Graph()
g.load(Ontology_file, format="turtle")
main_syntax = get_main_sytax_inference(g)
classlist = []
for i in range(len(main_syntax)):
verbsmain = main_syntax[('M'+str(i+1))]['Verbs']
......
......@@ -6,13 +6,6 @@ from rdflib import ConjunctiveGraph, URIRef, RDFS, RDF, Namespace, Literal
#call("./../../Ontologies/s-get http://155.207.131.19:3030/Mokos_18_1_7_47/data default >> ../../Ontologies/data.ttl", shell=True)
#Ontology_file = "../../Ontologies/Mokos_18_1_7_47.ttl"
#Ontology_file = "../../Ontologies/Autonomy_v1.ttl"
Ontology_file = "../../Ontologies/2022_AOCS.ttl"
#Ontology_file = "../../Ontologies/Autonomy_v1.ttl"
subClass_instances = "../../Ontologies/instances_subclass.txt"
#HERE
#list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP-instances#","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"]
list_of_DSO = []
......@@ -25,8 +18,8 @@ DSO = ["http://delab.csd.auth.gr/ontologies/2018/DSO#"]
Dictionary_Data = {}
Dictionary_Data_list = []
g = Graph()
g.load(Ontology_file, format="turtle")
#g = Graph()
#g.load(Ontology_file, format="turtle")
'''def find_subclass_domain_range():
file2 = open(r"../../Ontologies/instances.txt","w+")
......@@ -79,7 +72,7 @@ def get_verb_instances(keyword):
return tuple((str(n), str(n)) for n in (out_list))
'''Get Verbs - return tuple'''
def get_verb_file_and_instances(keyword):
def get_verb_file_and_instances(keyword, g):
get_file_and_instances(keyword, g)
keywordspl = keyword.split("#")
in_list = []
......@@ -292,7 +285,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO):
sub_ins.append(sub_ins_dict)
###############################
#get Instance of all subclasses
subsubclass_list= get_Classes_Dictionary(subclas_name, list_of_DSO)
subsubclass_list= get_Classes_Dictionary(subclas_name, list_of_DSO, g)
suclass_dict = {'SubClass_name': subclas_name, 'SubclassComment':find_comment, 'InstanceOfSubclass': sub_ins, 'SubSubClass' : subsubclass_list}
subclass_list.append(suclass_dict)
############################################
......@@ -303,7 +296,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO):
return Dictionary_Data
def get_Classes_Dictionary(name, list_of_DSO):
def get_Classes_Dictionary(name, list_of_DSO, g):
#subsubsub_ins_dict = {}
subsubsubsuclass_dict = {}
subsubsubsubclass_list = []
......@@ -330,7 +323,7 @@ def get_Classes_Dictionary(name, list_of_DSO):
subsubsubsub_ins_dict = {"Instance": spl2[len(spl2) - 1], "Comment": subsubsubfind_comment_instance}
subsubsubsub_ins.append(subsubsubsub_ins_dict)
#get current Subclass -Subclasses / Instances
newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name, list_of_DSO)
newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name, list_of_DSO, g)
subsubsubsuclass_dict = {'SubSubClass_name': subsubsubsubclas_name, 'SubSubclassComment':subsubsubfind_comment, 'SubInstanceOfSubclass': subsubsubsub_ins, 'SubSubClass': newsubclass_list}
subsubsubsubclass_list.append(subsubsubsuclass_dict)
return subsubsubsubclass_list
......@@ -428,7 +421,7 @@ def get_related_to_subject(bnodes_uriref):
'''Find Boilerplate Verb - return list'''
def get_verb(bnodes_uriref):
def get_verb(bnodes_uriref, g):
verb_list = []
out_list =[]
bnodes_uriref = [str(i) for i in bnodes_uriref]
......@@ -664,7 +657,7 @@ def get_prefix_syntax(g):
#prefix_dict[prefix]["Verbs"] = []
#prefix_dict[prefix]["Attribute"] = []
#prefix_dict[prefix]["Subject"] = []
bnodes_uriref = find_triples(prefix)
bnodes_uriref = find_triples(prefix, g)
subclass_data = []
for per in bnodes_uriref:
tmp = get_instances_list(str(per), g)
......@@ -744,7 +737,7 @@ def get_prefix_verbs(g):
for prefix in get_prefix:
prefix_list.extend(get_instances_list(prefix, g))
for prefix in prefix_list:
bnodes_uriref = find_triples(prefix)
bnodes_uriref = find_triples(prefix, g)
subclass_data = []
for per in bnodes_uriref:
tmp = get_instances_list(str(per), g)
......@@ -752,11 +745,11 @@ def get_prefix_verbs(g):
for i in tmp:
subclass_data.append(i)
for pre in subclass_data:
tmp_list2 = find_triples(pre)
tmp_list2 = find_triples(pre, g)
tmp_list2 = [str(i) for i in tmp_list2]
#add verbs
if tmp_list2:
ver = get_verb(tmp_list2)
ver = get_verb(tmp_list2, g)
if (len(ver)==0):
out_list.append('Is : is')
verblist.append('Is : is')
......@@ -782,7 +775,7 @@ def get_prefix_new_syntax(g):
for prefix in prefix_list:
per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix)
#per2 = URIRef("http://www.w3.org/2002/07/owl#allValuesFrom")
bnodes_uriref = find_triples(prefix)
bnodes_uriref = find_triples(prefix, g)
subclass_data = []
#print(bnodes_uriref)
for per in bnodes_uriref:
......@@ -797,7 +790,7 @@ def get_prefix_new_syntax(g):
'''Find the triples of the prefix - return list.type(URIRef)'''
def find_triples(prefix):
def find_triples(prefix, g):
bnodes = []
URIRef_list = []
per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix)
......
......@@ -7,6 +7,7 @@ from .views import user_views, requirements_views, main_req_views, suffix_req_vi
urlpatterns = [
path('users/', user_views.UserList.as_view(), name='api-user-list'),
path('users/<uuid:pk>/', user_views.UserDetail.as_view(), name='api-post-details'),
path('ontologies/', requirements_views.OntologiesChoicesViewSet.as_view(), name='api-get-ontologies'),
path('groupboilerplates/', requirements_views.BoilerplateGroupCreateAPIView.as_view(), name='api-groupboilerplates-create'),
path('groupboilerplates/<int:pk>/', requirements_views.BoilerplateGroupDetailsAPIView.as_view(), name='api-groupboilerplates-list'),
......
......@@ -12,7 +12,7 @@ from reqman.apps.reqtool.rest_api.serializers.main_req import MainSerializer
from reqman.apps.permissions import IsOwnerOrReadOnly
from reqman.apps.reqtool.rest_api.services import fuseki, inference
from reqman.apps.reqtool.rest_api.services.parse_ontologies import Dictionary_Data_list, Ontology_file, get_main_sytax, get_main_sytax_inference
from reqman.apps.reqtool.rest_api.services.parse_ontologies import get_main_sytax, get_main_sytax_inference
from rest_framework.response import Response
from rest_framework.views import APIView
......@@ -83,7 +83,6 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView):
numinstance= instance['numerical']
mn = (es_instance[len(es_instance)-1] + ' ' + stateitem_instance[len(stateitem_instance)-1] + ' ' + statevalue_instance[len(statevalue_instance)-1] + ' ' + instance['shall'] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + quantifier_instance[len(quantifier_instance)-1] +' '+numinstance +' ' + numunit_instance[len(numunit_instance)-1] + ' ' + eo_instance[len(eo_instance)-1] + ' ' + stsysconset_instance[len(stsysconset_instance)-1])
boildata = BoilerplateData.objects.filter(owner_data=instance['boilerplate_of_main'].owner, boilerplate_data_id = instance['boilerplate_of_main'])
boildata.update(main_data = mn )
#get instances of the ontology that this boilerplate belongs
data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['boilerplate_of_main'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data'])
sub_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('inteface_choices')[0]['inteface_choices']
......@@ -110,12 +109,14 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView):
numunit_data = data.values('number_unit_choices')[0]['number_unit_choices']
if( instance['mumerical_units'] not in numunit_data):
raise APIException("MAIN : Instance of Number Unit does not exist")
curmain_choices = inference.getclassofmain(instance)
ontology_file = data.values('ontology_file')[0]['ontology_file']
curmain_choices = inference.getclassofmain(instance, ontology_file)
if(curmain_choices==None):
raise APIException("Main choices not in [M1-M16]")
if (curmain_choices[0] == 'ERROR'):
raise APIException("Main choices not in [M1-M16]")
else:
boildata.update(main_data = mn )
instance = serializer.save(main_choices = curmain_choices[0])
#instance = serializer.save()
......
......@@ -22,6 +22,29 @@ from rest_framework.views import APIView
from rest_framework.exceptions import APIException
from django.shortcuts import get_object_or_404
import requests, json
from requests.auth import HTTPBasicAuth
from reqman.settings.environment import env
class OntologiesChoicesViewSet(APIView):
'''Get current Ontologies from Fuseki server'''
def get(self, request):
data={'datasets':''}
try :
url = requests.get(env("FUSEKI_URL")+"/$/datasets", auth = HTTPBasicAuth('admin', 'pw!!!'))
text = url.text
data = json.loads(text)
except:
pass
list_ontology=['Default']
for i in data['datasets'] :
list_ontology.append(i['ds.name'])
ONTOLOGIES_CHOICES = tuple((str(n), str(n)) for n in (list_ontology))
response = Response(ONTOLOGIES_CHOICES)
return response
class BoilerplateGroupCreateAPIView(ListCreateAPIView):
......@@ -174,7 +197,6 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView):
ei_instance = instance['item_function_flow_statevalue'].rsplit(':', 1)
pr = (instance['prefix'] + ' ' + es_instance[len(es_instance)-1] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + ei_instance[len(ei_instance)-1])
boildata = BoilerplateData.objects.filter(owner_data=instance['prefix_boilerplate'].owner, boilerplate_data_id = instance['prefix_boilerplate'])
boildata.update(prefix_data = pr )
#get instances of the ontology that this boilerplate belongs
data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['prefix_boilerplate'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data'])
es_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('item_choices')[0]['item_choices']
......@@ -187,10 +209,12 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView):
if( instance['item_function_flow_statevalue'] not in ei_data):
raise APIException("PREFIX : Instance of Object does not exist")
#find the class of the prefix
prefix_choices = inference.getclassofprefix(instance)
ontology_file = data.values('ontology_file')[0]['ontology_file']
prefix_choices = inference.getclassofprefix(instance, ontology_file)
if prefix_choices[0] == 'ERROR':
raise APIException("Prefix choices not in [P1-P3]")
else:
boildata.update(prefix_data = pr )
serializer.save(simple_prefix = prefix_choices)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment