Commit 92968860 authored by Thodoris Nestoridis's avatar Thodoris Nestoridis

Add Select Ontology from Fuseki + minor changes

parent 930410c0
...@@ -20,6 +20,15 @@ ...@@ -20,6 +20,15 @@
<textarea type="text" class="form-control" id="description" required [(ngModel)]="groupboilerplate.description" <textarea type="text" class="form-control" id="description" required [(ngModel)]="groupboilerplate.description"
name="description"></textarea> name="description"></textarea>
</div> </div>
<h4>Select Ontology</h4>
<mat-form-field appearance="fill">
<mat-label>Ontology File</mat-label>
<mat-select [(ngModel)]="groupboilerplate.ontology_file" name="file" required>
<mat-option *ngFor="let food of ontologyList" [value]="food[0]">
{{food[0]}}
</mat-option>
</mat-select>
</mat-form-field>
</form> </form>
<button (click)="saveBoilerplate()" class="btn btn-success">Submit</button> <button (click)="saveBoilerplate()" class="btn btn-success">Submit</button>
</div> </div>
\ No newline at end of file
...@@ -10,10 +10,12 @@ import { NotifierService } from "angular-notifier"; ...@@ -10,10 +10,12 @@ import { NotifierService } from "angular-notifier";
styleUrls: ['./add-groupboil.component.scss'] styleUrls: ['./add-groupboil.component.scss']
}) })
export class AddGroupboilComponent implements OnInit { export class AddGroupboilComponent implements OnInit {
ontologyList=[]
groupboilerplate: GroupBoilerplate = { groupboilerplate: GroupBoilerplate = {
title_bgroup: "", title_bgroup: "",
description: "", description: "",
ontology_file: "/Default",
}; };
submitted = false; submitted = false;
private readonly notifier: NotifierService; private readonly notifier: NotifierService;
...@@ -23,12 +25,15 @@ export class AddGroupboilComponent implements OnInit { ...@@ -23,12 +25,15 @@ export class AddGroupboilComponent implements OnInit {
} }
ngOnInit(): void { ngOnInit(): void {
this.getOntologies();
} }
saveBoilerplate(): void { saveBoilerplate(): void {
const data = { const data = {
title_bgroup: this.groupboilerplate.title_bgroup, title_bgroup: this.groupboilerplate.title_bgroup,
description: this.groupboilerplate.description ? this.groupboilerplate.description : this.groupboilerplate.title_bgroup, description: this.groupboilerplate.description ? this.groupboilerplate.description : this.groupboilerplate.title_bgroup,
ontology_file: '../../Ontologies' + this.groupboilerplate.ontology_file ,
}; };
this.submitted = true; this.submitted = true;
this.boilerplatelService.creategroupboil(data) this.boilerplatelService.creategroupboil(data)
.subscribe( .subscribe(
...@@ -53,4 +58,16 @@ export class AddGroupboilComponent implements OnInit { ...@@ -53,4 +58,16 @@ export class AddGroupboilComponent implements OnInit {
}; };
} }
getOntologies(): void {
this.boilerplatelService.getOntolgies()
.subscribe(
data => {
this.ontologyList=data;
console.log(data);
},
error => {
console.log(error);
});
}
} }
<div fxLayoutAlign="end"> <div fxLayoutAlign="end">
<h2><span class="badge badge-secondary"> MAIN TEMPLATE : {{this.main}}</span></h2> <h2 *ngIf="this.color_badge=='danger'" ><span class="badge badge-danger" > MAIN TEMPLATE : {{this.main}}</span></h2>
<h2 *ngIf="this.color_badge=='success'" ><span class="badge badge-success" > MAIN TEMPLATE : {{this.main}}</span></h2>
</div> </div>
<div class="form-row"> <div class="form-row">
<div class="col"> <div class="col">
......
...@@ -89,6 +89,7 @@ export class MainDetailsComponent implements OnInit { ...@@ -89,6 +89,7 @@ export class MainDetailsComponent implements OnInit {
}; };
message = ''; message = '';
main = ''; main = '';
color_badge='danger'
constructor( constructor(
private mainService: MainService, private mainService: MainService,
...@@ -141,6 +142,7 @@ export class MainDetailsComponent implements OnInit { ...@@ -141,6 +142,7 @@ export class MainDetailsComponent implements OnInit {
} }
ngDoCheck() { ngDoCheck() {
this.color_badge='danger'
this.dis(); this.dis();
} }
...@@ -173,6 +175,7 @@ export class MainDetailsComponent implements OnInit { ...@@ -173,6 +175,7 @@ export class MainDetailsComponent implements OnInit {
//add the connection data //add the connection data
} }
this.main = this.mainsyntax[x].Id this.main = this.mainsyntax[x].Id
this.color_badge='success'
return 1; return 1;
} }
} }
......
...@@ -20,6 +20,7 @@ export class GroupBoilerplate { ...@@ -20,6 +20,7 @@ export class GroupBoilerplate {
id?: any; id?: any;
title_bgroup?: string; title_bgroup?: string;
description?: string; description?: string;
ontology_file?: string;
} }
export class InferenceResults { export class InferenceResults {
......
...@@ -9,6 +9,7 @@ import { environment } from '../../environments/environment'; ...@@ -9,6 +9,7 @@ import { environment } from '../../environments/environment';
const baseUrl = '/boilerplates/'; const baseUrl = '/boilerplates/';
const inferUrl = '/infer/'; const inferUrl = '/infer/';
const baseUrlgb = environment.apiroot+'api-auth/groupboilerplates/'; const baseUrlgb = environment.apiroot+'api-auth/groupboilerplates/';
const genericbaseUrl = environment.apiroot+'api-auth';
@Injectable({ @Injectable({
providedIn: 'root' providedIn: 'root'
...@@ -70,4 +71,8 @@ export class BoilerplateService { ...@@ -70,4 +71,8 @@ export class BoilerplateService {
return this.http.get(`${baseUrlgb}${gb}`+`/search`+`/${instance}`); return this.http.get(`${baseUrlgb}${gb}`+`/search`+`/${instance}`);
} }
getOntolgies(): Observable<any> {
return this.http.get(`${genericbaseUrl}`+`/ontologies`+`/`);
}
} }
...@@ -95,7 +95,7 @@ SHALL_CHOICES = ( ("shall", "shall"), ("shall not", "shall not")) ...@@ -95,7 +95,7 @@ SHALL_CHOICES = ( ("shall", "shall"), ("shall not", "shall not"))
# #
#VERB_CHOICES = get_verb_instances("LO#Verb") #VERB_CHOICES = get_verb_instances("LO#Verb")
VERB_CHOICES = get_verb_file_and_instances("LO#Verb") #VERB_CHOICES = get_verb_file_and_instances("LO#Verb")
#test to add M #test to add M
CLASS_CHOICES = (("SYSTEM","SYSTEM"), ("FUNCTION","FUNCTION"), ("SHALL","SHALL/SHALL NOT"), CLASS_CHOICES = (("SYSTEM","SYSTEM"), ("FUNCTION","FUNCTION"), ("SHALL","SHALL/SHALL NOT"),
......
...@@ -11,6 +11,8 @@ from reqman.apps.reqtool.rest_api.services.parse_ontologies import * ...@@ -11,6 +11,8 @@ from reqman.apps.reqtool.rest_api.services.parse_ontologies import *
from pygments.formatters.html import HtmlFormatter from pygments.formatters.html import HtmlFormatter
from pygments import highlight from pygments import highlight
from subprocess import call
#get prefix syntax from the Onotlogy# #get prefix syntax from the Onotlogy#
#print(get_prefix_new_syntax()) #print(get_prefix_new_syntax())
...@@ -83,14 +85,20 @@ def create_infer_result(instance, destination): ...@@ -83,14 +85,20 @@ def create_infer_result(instance, destination):
def initialize_instances(instance): def initialize_instances(instance):
try : try :
g = Graph() g = Graph()
print(instance.ontology_file)
file_name = instance.title_bgroup.replace('/', '') file_name = instance.title_bgroup.replace('/', '')
save_path = "../../Ontologies/"+file_name+"/" save_path = "../../Ontologies/"+file_name+"/"
if (os.path.exists("../../Ontologies/"+file_name+"/") == False): if (os.path.exists("../../Ontologies/"+file_name+"/") == False):
os.mkdir("../../Ontologies/"+file_name) os.mkdir("../../Ontologies/"+file_name)
completeName = os.path.join(save_path, file_name) completeName = os.path.join(save_path, file_name)
g.load(instance.ontology_file, format="turtle") if (instance.ontology_file != '../../Ontologies/Default'):
destination=save_path+file_name+'.ttl' ont = instance.ontology_file.replace('../../Ontologies/', '')
g.serialize(destination, format='turtle') call("./../../Ontologies/s-get http://155.207.131.19:3030/"+ont+" default >> ../../Ontologies/"+file_name+'/'+file_name+'.ttl', shell=True)
destination=save_path+file_name+'.ttl'
else :
g.load('../../Ontologies/2022_AOCS.ttl', format="turtle")
destination=save_path+file_name+'.ttl'
g.serialize(destination, format='turtle')
except: except:
print('Cannot load Ontology') print('Cannot load Ontology')
classes_inctances = BoilerplateGroupClassesInstances ( classes_instances_owner= instance.boilerplate_owner, classes_instances_group_of_boilerplate = instance, classes_inctances = BoilerplateGroupClassesInstances ( classes_instances_owner= instance.boilerplate_owner, classes_instances_group_of_boilerplate = instance,
...@@ -113,6 +121,7 @@ def update_instances(instance): ...@@ -113,6 +121,7 @@ def update_instances(instance):
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/RDO-instances#") list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/RDO-instances#")
#PREFIX_SYNTAX = get_prefix_syntax(g) #PREFIX_SYNTAX = get_prefix_syntax(g)
VERB_CHOICES_PREFIX = get_prefix_verbs(g) VERB_CHOICES_PREFIX = get_prefix_verbs(g)
VERB_CHOICES = get_verb_file_and_instances("LO#Verb", g)
SYSTEM_CHOICES = get_instances_g("SAO#System",g, list_of_DSO) SYSTEM_CHOICES = get_instances_g("SAO#System",g, list_of_DSO)
FUNCTION_CHOICES = get_instances_g("SAO#Function",g, list_of_DSO) FUNCTION_CHOICES = get_instances_g("SAO#Function",g, list_of_DSO)
ITEM_CHOICES = get_instances_g("SAO#Item",g, list_of_DSO) ITEM_CHOICES = get_instances_g("SAO#Item",g, list_of_DSO)
...@@ -147,7 +156,8 @@ def update_instances(instance): ...@@ -147,7 +156,8 @@ def update_instances(instance):
class BoilerplateGroup(models.Model): class BoilerplateGroup(models.Model):
boilerplate_owner = models.ForeignKey(User, related_name='boilerplate_group_owner', on_delete=models.CASCADE) boilerplate_owner = models.ForeignKey(User, related_name='boilerplate_group_owner', on_delete=models.CASCADE)
#add Boilerplate path, so each Boilerplate Group can hace it's own Ontology file #add Boilerplate path, so each Boilerplate Group can hace it's own Ontology file
ontology_file = models.CharField(default="../../Ontologies/2022_AOCS.ttl", max_length=100, blank=True) # ontology_file = models.CharField(default="../../Ontologies/2022_AOCS.ttl", max_length=100, blank=True)
ontology_file = models.CharField(max_length=100, blank=True)
title_bgroup = models.CharField(max_length=100, unique=True) title_bgroup = models.CharField(max_length=100, unique=True)
description = models.CharField(max_length=500) description = models.CharField(max_length=500)
......
...@@ -35,7 +35,7 @@ class BoilerplateGroupSerializer(serializers.ModelSerializer): ...@@ -35,7 +35,7 @@ class BoilerplateGroupSerializer(serializers.ModelSerializer):
boilerplate_owner = serializers.ReadOnlyField(source='owner.username') boilerplate_owner = serializers.ReadOnlyField(source='owner.username')
class Meta: class Meta:
model = BoilerplateGroup model = BoilerplateGroup
fields = ('id', 'boilerplate_owner', 'title_bgroup', 'description') fields = ('id', 'boilerplate_owner', 'ontology_file', 'title_bgroup', 'description')
class BoilerplateGroupClassesInstancesSerializer(serializers.ModelSerializer): class BoilerplateGroupClassesInstancesSerializer(serializers.ModelSerializer):
classes_instances_owner = serializers.ReadOnlyField(source='owner.username') classes_instances_owner = serializers.ReadOnlyField(source='owner.username')
......
class FusekiActions(): class FusekiActions():
def write(self, data): def write(self, data):
print("i am writing form here",data) print("i am writing form here",data)
\ No newline at end of file
...@@ -14,15 +14,11 @@ list_of_inference_metrics = ["IncompleteRequirement", "AmbiguousRequirement", ...@@ -14,15 +14,11 @@ list_of_inference_metrics = ["IncompleteRequirement", "AmbiguousRequirement",
info_metric = ["isMissingConcept", "isMissingInstanceOf", "isMissingProducer", "isMissingState", "hasAmbiguousConcept", "hasAmbiguousConcept2", info_metric = ["isMissingConcept", "isMissingInstanceOf", "isMissingProducer", "isMissingState", "hasAmbiguousConcept", "hasAmbiguousConcept2",
"hasNoisyConcept", "hasOpaqueConcept", "isInconsistentTo"] "hasNoisyConcept", "hasOpaqueConcept", "isInconsistentTo"]
prefix_syntax = get_prefix_syntax(g) def getclassofprefix(prefix, ontology_file):
return(findclassofprefix(prefix, ontology_file))
main_syntax = get_main_sytax_inference(g) def getclassofmain(main, ontology_file):
return(findclassofmain(main, ontology_file))
def getclassofprefix(prefix):
return(findclassofprefix(prefix))
def getclassofmain(main):
return(findclassofmain(main))
def getinstancefile(data, g): def getinstancefile(data, g):
datam = data.split(":") datam = data.split(":")
...@@ -257,7 +253,7 @@ def shacl(ontotlogy_file): ...@@ -257,7 +253,7 @@ def shacl(ontotlogy_file):
print(i+1, 'Cycle of inferencing') print(i+1, 'Cycle of inferencing')
#result = subprocess.check_output(["./reqman/apps/reqtool/rest_api/services/shacl-1.3.2/bin/shaclinfer.sh", "-datafile", target]) #result = subprocess.check_output(["./reqman/apps/reqtool/rest_api/services/shacl-1.3.2/bin/shaclinfer.sh", "-datafile", target])
try: try:
result = subprocess.check_output("./reqman/apps/reqtool/rest_api/services/shacl-1.4.2/bin/shaclinfer.sh -datafile "+target1+" | grep -v -e 'WARN OntDocumentManager' -e 'at org.' -e 'org.apache.' -e '@'",shell=True) result = subprocess.check_output("./reqman/apps/reqtool/rest_api/services/shacl-1.4.2/bin/shaclinfer.sh -datafile "+target1+" | grep -v -e 'WARN OntDocumentManager' -e 'at org.' -e 'org.apache.' -e '@'",shell=True, timeout=360)
except: except:
raise APIException("Problem during the Inferencing") raise APIException("Problem during the Inferencing")
if(result==final_result): if(result==final_result):
...@@ -320,7 +316,7 @@ def shacl(ontotlogy_file): ...@@ -320,7 +316,7 @@ def shacl(ontotlogy_file):
from rest_framework.exceptions import APIException from rest_framework.exceptions import APIException
'''Find if the Prefix exists in syntax''' '''Find if the Prefix exists in syntax'''
def findclassofprefix(listofprefix): def findclassofprefix(listofprefix, Ontology_file):
classlist=[] classlist=[]
currentverb = listofprefix['state_or_verb'].rsplit(' : ', 1) currentverb = listofprefix['state_or_verb'].rsplit(' : ', 1)
currentstate = listofprefix['state_or_verb'].rsplit(' : ') currentstate = listofprefix['state_or_verb'].rsplit(' : ')
...@@ -335,6 +331,9 @@ def findclassofprefix(listofprefix): ...@@ -335,6 +331,9 @@ def findclassofprefix(listofprefix):
if (currentsubjectspl[0] == "System"): if (currentsubjectspl[0] == "System"):
raise APIException("The Subject must be an Item") raise APIException("The Subject must be an Item")
# #
g = Graph()
g.load(Ontology_file, format="turtle")
prefix_syntax = get_prefix_syntax(g)
for prefix in prefix_syntax: for prefix in prefix_syntax:
for inprefix in prefix_syntax[prefix]: for inprefix in prefix_syntax[prefix]:
if (currentverb[1] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"]) or (currentstate[0] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"] or currentverb[1] =='is' ): if (currentverb[1] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"]) or (currentstate[0] in prefix_syntax[prefix][inprefix.split("_")[0]+"_Verbs"] or currentverb[1] =='is' ):
...@@ -357,7 +356,10 @@ def findclassofprefix(listofprefix): ...@@ -357,7 +356,10 @@ def findclassofprefix(listofprefix):
'''Find if the Main exists in syntax''' '''Find if the Main exists in syntax'''
def findclassofmain(listofmain): def findclassofmain(listofmain, Ontology_file):
g = Graph()
g.load(Ontology_file, format="turtle")
main_syntax = get_main_sytax_inference(g)
classlist = [] classlist = []
for i in range(len(main_syntax)): for i in range(len(main_syntax)):
verbsmain = main_syntax[('M'+str(i+1))]['Verbs'] verbsmain = main_syntax[('M'+str(i+1))]['Verbs']
......
...@@ -6,13 +6,6 @@ from rdflib import ConjunctiveGraph, URIRef, RDFS, RDF, Namespace, Literal ...@@ -6,13 +6,6 @@ from rdflib import ConjunctiveGraph, URIRef, RDFS, RDF, Namespace, Literal
#call("./../../Ontologies/s-get http://155.207.131.19:3030/Mokos_18_1_7_47/data default >> ../../Ontologies/data.ttl", shell=True) #call("./../../Ontologies/s-get http://155.207.131.19:3030/Mokos_18_1_7_47/data default >> ../../Ontologies/data.ttl", shell=True)
#Ontology_file = "../../Ontologies/Mokos_18_1_7_47.ttl"
#Ontology_file = "../../Ontologies/Autonomy_v1.ttl"
Ontology_file = "../../Ontologies/2022_AOCS.ttl"
#Ontology_file = "../../Ontologies/Autonomy_v1.ttl"
subClass_instances = "../../Ontologies/instances_subclass.txt"
#HERE #HERE
#list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP-instances#","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"] #list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP-instances#","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"]
list_of_DSO = [] list_of_DSO = []
...@@ -25,8 +18,8 @@ DSO = ["http://delab.csd.auth.gr/ontologies/2018/DSO#"] ...@@ -25,8 +18,8 @@ DSO = ["http://delab.csd.auth.gr/ontologies/2018/DSO#"]
Dictionary_Data = {} Dictionary_Data = {}
Dictionary_Data_list = [] Dictionary_Data_list = []
g = Graph() #g = Graph()
g.load(Ontology_file, format="turtle") #g.load(Ontology_file, format="turtle")
'''def find_subclass_domain_range(): '''def find_subclass_domain_range():
file2 = open(r"../../Ontologies/instances.txt","w+") file2 = open(r"../../Ontologies/instances.txt","w+")
...@@ -79,7 +72,7 @@ def get_verb_instances(keyword): ...@@ -79,7 +72,7 @@ def get_verb_instances(keyword):
return tuple((str(n), str(n)) for n in (out_list)) return tuple((str(n), str(n)) for n in (out_list))
'''Get Verbs - return tuple''' '''Get Verbs - return tuple'''
def get_verb_file_and_instances(keyword): def get_verb_file_and_instances(keyword, g):
get_file_and_instances(keyword, g) get_file_and_instances(keyword, g)
keywordspl = keyword.split("#") keywordspl = keyword.split("#")
in_list = [] in_list = []
...@@ -292,7 +285,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO): ...@@ -292,7 +285,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO):
sub_ins.append(sub_ins_dict) sub_ins.append(sub_ins_dict)
############################### ###############################
#get Instance of all subclasses #get Instance of all subclasses
subsubclass_list= get_Classes_Dictionary(subclas_name, list_of_DSO) subsubclass_list= get_Classes_Dictionary(subclas_name, list_of_DSO, g)
suclass_dict = {'SubClass_name': subclas_name, 'SubclassComment':find_comment, 'InstanceOfSubclass': sub_ins, 'SubSubClass' : subsubclass_list} suclass_dict = {'SubClass_name': subclas_name, 'SubclassComment':find_comment, 'InstanceOfSubclass': sub_ins, 'SubSubClass' : subsubclass_list}
subclass_list.append(suclass_dict) subclass_list.append(suclass_dict)
############################################ ############################################
...@@ -303,7 +296,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO): ...@@ -303,7 +296,7 @@ def get_dmo_classes_and_comment(keyword, g, list_of_DSO):
return Dictionary_Data return Dictionary_Data
def get_Classes_Dictionary(name, list_of_DSO): def get_Classes_Dictionary(name, list_of_DSO, g):
#subsubsub_ins_dict = {} #subsubsub_ins_dict = {}
subsubsubsuclass_dict = {} subsubsubsuclass_dict = {}
subsubsubsubclass_list = [] subsubsubsubclass_list = []
...@@ -330,7 +323,7 @@ def get_Classes_Dictionary(name, list_of_DSO): ...@@ -330,7 +323,7 @@ def get_Classes_Dictionary(name, list_of_DSO):
subsubsubsub_ins_dict = {"Instance": spl2[len(spl2) - 1], "Comment": subsubsubfind_comment_instance} subsubsubsub_ins_dict = {"Instance": spl2[len(spl2) - 1], "Comment": subsubsubfind_comment_instance}
subsubsubsub_ins.append(subsubsubsub_ins_dict) subsubsubsub_ins.append(subsubsubsub_ins_dict)
#get current Subclass -Subclasses / Instances #get current Subclass -Subclasses / Instances
newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name, list_of_DSO) newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name, list_of_DSO, g)
subsubsubsuclass_dict = {'SubSubClass_name': subsubsubsubclas_name, 'SubSubclassComment':subsubsubfind_comment, 'SubInstanceOfSubclass': subsubsubsub_ins, 'SubSubClass': newsubclass_list} subsubsubsuclass_dict = {'SubSubClass_name': subsubsubsubclas_name, 'SubSubclassComment':subsubsubfind_comment, 'SubInstanceOfSubclass': subsubsubsub_ins, 'SubSubClass': newsubclass_list}
subsubsubsubclass_list.append(subsubsubsuclass_dict) subsubsubsubclass_list.append(subsubsubsuclass_dict)
return subsubsubsubclass_list return subsubsubsubclass_list
...@@ -428,7 +421,7 @@ def get_related_to_subject(bnodes_uriref): ...@@ -428,7 +421,7 @@ def get_related_to_subject(bnodes_uriref):
'''Find Boilerplate Verb - return list''' '''Find Boilerplate Verb - return list'''
def get_verb(bnodes_uriref): def get_verb(bnodes_uriref, g):
verb_list = [] verb_list = []
out_list =[] out_list =[]
bnodes_uriref = [str(i) for i in bnodes_uriref] bnodes_uriref = [str(i) for i in bnodes_uriref]
...@@ -664,7 +657,7 @@ def get_prefix_syntax(g): ...@@ -664,7 +657,7 @@ def get_prefix_syntax(g):
#prefix_dict[prefix]["Verbs"] = [] #prefix_dict[prefix]["Verbs"] = []
#prefix_dict[prefix]["Attribute"] = [] #prefix_dict[prefix]["Attribute"] = []
#prefix_dict[prefix]["Subject"] = [] #prefix_dict[prefix]["Subject"] = []
bnodes_uriref = find_triples(prefix) bnodes_uriref = find_triples(prefix, g)
subclass_data = [] subclass_data = []
for per in bnodes_uriref: for per in bnodes_uriref:
tmp = get_instances_list(str(per), g) tmp = get_instances_list(str(per), g)
...@@ -744,7 +737,7 @@ def get_prefix_verbs(g): ...@@ -744,7 +737,7 @@ def get_prefix_verbs(g):
for prefix in get_prefix: for prefix in get_prefix:
prefix_list.extend(get_instances_list(prefix, g)) prefix_list.extend(get_instances_list(prefix, g))
for prefix in prefix_list: for prefix in prefix_list:
bnodes_uriref = find_triples(prefix) bnodes_uriref = find_triples(prefix, g)
subclass_data = [] subclass_data = []
for per in bnodes_uriref: for per in bnodes_uriref:
tmp = get_instances_list(str(per), g) tmp = get_instances_list(str(per), g)
...@@ -752,11 +745,11 @@ def get_prefix_verbs(g): ...@@ -752,11 +745,11 @@ def get_prefix_verbs(g):
for i in tmp: for i in tmp:
subclass_data.append(i) subclass_data.append(i)
for pre in subclass_data: for pre in subclass_data:
tmp_list2 = find_triples(pre) tmp_list2 = find_triples(pre, g)
tmp_list2 = [str(i) for i in tmp_list2] tmp_list2 = [str(i) for i in tmp_list2]
#add verbs #add verbs
if tmp_list2: if tmp_list2:
ver = get_verb(tmp_list2) ver = get_verb(tmp_list2, g)
if (len(ver)==0): if (len(ver)==0):
out_list.append('Is : is') out_list.append('Is : is')
verblist.append('Is : is') verblist.append('Is : is')
...@@ -782,7 +775,7 @@ def get_prefix_new_syntax(g): ...@@ -782,7 +775,7 @@ def get_prefix_new_syntax(g):
for prefix in prefix_list: for prefix in prefix_list:
per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix) per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix)
#per2 = URIRef("http://www.w3.org/2002/07/owl#allValuesFrom") #per2 = URIRef("http://www.w3.org/2002/07/owl#allValuesFrom")
bnodes_uriref = find_triples(prefix) bnodes_uriref = find_triples(prefix, g)
subclass_data = [] subclass_data = []
#print(bnodes_uriref) #print(bnodes_uriref)
for per in bnodes_uriref: for per in bnodes_uriref:
...@@ -797,7 +790,7 @@ def get_prefix_new_syntax(g): ...@@ -797,7 +790,7 @@ def get_prefix_new_syntax(g):
'''Find the triples of the prefix - return list.type(URIRef)''' '''Find the triples of the prefix - return list.type(URIRef)'''
def find_triples(prefix): def find_triples(prefix, g):
bnodes = [] bnodes = []
URIRef_list = [] URIRef_list = []
per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix) per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix)
......
...@@ -7,6 +7,7 @@ from .views import user_views, requirements_views, main_req_views, suffix_req_vi ...@@ -7,6 +7,7 @@ from .views import user_views, requirements_views, main_req_views, suffix_req_vi
urlpatterns = [ urlpatterns = [
path('users/', user_views.UserList.as_view(), name='api-user-list'), path('users/', user_views.UserList.as_view(), name='api-user-list'),
path('users/<uuid:pk>/', user_views.UserDetail.as_view(), name='api-post-details'), path('users/<uuid:pk>/', user_views.UserDetail.as_view(), name='api-post-details'),
path('ontologies/', requirements_views.OntologiesChoicesViewSet.as_view(), name='api-get-ontologies'),
path('groupboilerplates/', requirements_views.BoilerplateGroupCreateAPIView.as_view(), name='api-groupboilerplates-create'), path('groupboilerplates/', requirements_views.BoilerplateGroupCreateAPIView.as_view(), name='api-groupboilerplates-create'),
path('groupboilerplates/<int:pk>/', requirements_views.BoilerplateGroupDetailsAPIView.as_view(), name='api-groupboilerplates-list'), path('groupboilerplates/<int:pk>/', requirements_views.BoilerplateGroupDetailsAPIView.as_view(), name='api-groupboilerplates-list'),
......
...@@ -12,7 +12,7 @@ from reqman.apps.reqtool.rest_api.serializers.main_req import MainSerializer ...@@ -12,7 +12,7 @@ from reqman.apps.reqtool.rest_api.serializers.main_req import MainSerializer
from reqman.apps.permissions import IsOwnerOrReadOnly from reqman.apps.permissions import IsOwnerOrReadOnly
from reqman.apps.reqtool.rest_api.services import fuseki, inference from reqman.apps.reqtool.rest_api.services import fuseki, inference
from reqman.apps.reqtool.rest_api.services.parse_ontologies import Dictionary_Data_list, Ontology_file, get_main_sytax, get_main_sytax_inference from reqman.apps.reqtool.rest_api.services.parse_ontologies import get_main_sytax, get_main_sytax_inference
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.views import APIView from rest_framework.views import APIView
...@@ -83,7 +83,6 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView): ...@@ -83,7 +83,6 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView):
numinstance= instance['numerical'] numinstance= instance['numerical']
mn = (es_instance[len(es_instance)-1] + ' ' + stateitem_instance[len(stateitem_instance)-1] + ' ' + statevalue_instance[len(statevalue_instance)-1] + ' ' + instance['shall'] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + quantifier_instance[len(quantifier_instance)-1] +' '+numinstance +' ' + numunit_instance[len(numunit_instance)-1] + ' ' + eo_instance[len(eo_instance)-1] + ' ' + stsysconset_instance[len(stsysconset_instance)-1]) mn = (es_instance[len(es_instance)-1] + ' ' + stateitem_instance[len(stateitem_instance)-1] + ' ' + statevalue_instance[len(statevalue_instance)-1] + ' ' + instance['shall'] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + quantifier_instance[len(quantifier_instance)-1] +' '+numinstance +' ' + numunit_instance[len(numunit_instance)-1] + ' ' + eo_instance[len(eo_instance)-1] + ' ' + stsysconset_instance[len(stsysconset_instance)-1])
boildata = BoilerplateData.objects.filter(owner_data=instance['boilerplate_of_main'].owner, boilerplate_data_id = instance['boilerplate_of_main']) boildata = BoilerplateData.objects.filter(owner_data=instance['boilerplate_of_main'].owner, boilerplate_data_id = instance['boilerplate_of_main'])
boildata.update(main_data = mn )
#get instances of the ontology that this boilerplate belongs #get instances of the ontology that this boilerplate belongs
data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['boilerplate_of_main'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data']) data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['boilerplate_of_main'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data'])
sub_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('inteface_choices')[0]['inteface_choices'] sub_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('inteface_choices')[0]['inteface_choices']
...@@ -110,12 +109,14 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView): ...@@ -110,12 +109,14 @@ class MainDetailsAPIView(RetrieveUpdateDestroyAPIView):
numunit_data = data.values('number_unit_choices')[0]['number_unit_choices'] numunit_data = data.values('number_unit_choices')[0]['number_unit_choices']
if( instance['mumerical_units'] not in numunit_data): if( instance['mumerical_units'] not in numunit_data):
raise APIException("MAIN : Instance of Number Unit does not exist") raise APIException("MAIN : Instance of Number Unit does not exist")
curmain_choices = inference.getclassofmain(instance) ontology_file = data.values('ontology_file')[0]['ontology_file']
curmain_choices = inference.getclassofmain(instance, ontology_file)
if(curmain_choices==None): if(curmain_choices==None):
raise APIException("Main choices not in [M1-M16]") raise APIException("Main choices not in [M1-M16]")
if (curmain_choices[0] == 'ERROR'): if (curmain_choices[0] == 'ERROR'):
raise APIException("Main choices not in [M1-M16]") raise APIException("Main choices not in [M1-M16]")
else: else:
boildata.update(main_data = mn )
instance = serializer.save(main_choices = curmain_choices[0]) instance = serializer.save(main_choices = curmain_choices[0])
#instance = serializer.save() #instance = serializer.save()
......
...@@ -22,6 +22,29 @@ from rest_framework.views import APIView ...@@ -22,6 +22,29 @@ from rest_framework.views import APIView
from rest_framework.exceptions import APIException from rest_framework.exceptions import APIException
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
import requests, json
from requests.auth import HTTPBasicAuth
from reqman.settings.environment import env
class OntologiesChoicesViewSet(APIView):
'''Get current Ontologies from Fuseki server'''
def get(self, request):
data={'datasets':''}
try :
url = requests.get(env("FUSEKI_URL")+"/$/datasets", auth = HTTPBasicAuth('admin', 'pw!!!'))
text = url.text
data = json.loads(text)
except:
pass
list_ontology=['Default']
for i in data['datasets'] :
list_ontology.append(i['ds.name'])
ONTOLOGIES_CHOICES = tuple((str(n), str(n)) for n in (list_ontology))
response = Response(ONTOLOGIES_CHOICES)
return response
class BoilerplateGroupCreateAPIView(ListCreateAPIView): class BoilerplateGroupCreateAPIView(ListCreateAPIView):
...@@ -174,7 +197,6 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView): ...@@ -174,7 +197,6 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView):
ei_instance = instance['item_function_flow_statevalue'].rsplit(':', 1) ei_instance = instance['item_function_flow_statevalue'].rsplit(':', 1)
pr = (instance['prefix'] + ' ' + es_instance[len(es_instance)-1] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + ei_instance[len(ei_instance)-1]) pr = (instance['prefix'] + ' ' + es_instance[len(es_instance)-1] + ' ' + ev_instance[len(ev_instance)-1] + ' ' + ei_instance[len(ei_instance)-1])
boildata = BoilerplateData.objects.filter(owner_data=instance['prefix_boilerplate'].owner, boilerplate_data_id = instance['prefix_boilerplate']) boildata = BoilerplateData.objects.filter(owner_data=instance['prefix_boilerplate'].owner, boilerplate_data_id = instance['prefix_boilerplate'])
boildata.update(prefix_data = pr )
#get instances of the ontology that this boilerplate belongs #get instances of the ontology that this boilerplate belongs
data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['prefix_boilerplate'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data']) data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_owner= instance['prefix_boilerplate'].owner, classes_instances_group_of_boilerplate = boildata.values('group_of_boilerplate_data')[0]['group_of_boilerplate_data'])
es_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('item_choices')[0]['item_choices'] es_data = data.values('system_choices')[0]['system_choices'] + data.values('function_choices')[0]['function_choices'] + data.values('item_choices')[0]['item_choices']
...@@ -187,10 +209,12 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView): ...@@ -187,10 +209,12 @@ class PrefixDetailsAPIView(RetrieveUpdateDestroyAPIView):
if( instance['item_function_flow_statevalue'] not in ei_data): if( instance['item_function_flow_statevalue'] not in ei_data):
raise APIException("PREFIX : Instance of Object does not exist") raise APIException("PREFIX : Instance of Object does not exist")
#find the class of the prefix #find the class of the prefix
prefix_choices = inference.getclassofprefix(instance) ontology_file = data.values('ontology_file')[0]['ontology_file']
prefix_choices = inference.getclassofprefix(instance, ontology_file)
if prefix_choices[0] == 'ERROR': if prefix_choices[0] == 'ERROR':
raise APIException("Prefix choices not in [P1-P3]") raise APIException("Prefix choices not in [P1-P3]")
else: else:
boildata.update(prefix_data = pr )
serializer.save(simple_prefix = prefix_choices) serializer.save(simple_prefix = prefix_choices)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment