Commit 930410c0 authored by Thodoris Nestoridis's avatar Thodoris Nestoridis

Production (Api-Docker) (http-server Angular)

parent 98d70a8b
For Production
`ng build --prod`
It creates the /dist file in the root folder.
------------------------------------------------
Install http-server (need nmp package manager)
`npm install http-server@13.0.2`
Run the http-server
`npx http-server --cors -e`
# ReqmanAngular11
This project was generated with [Angular CLI](https://github.com/angular/angular-cli) version 11.1.4.
......
......@@ -54,8 +54,8 @@
"budgets": [
{
"type": "initial",
"maximumWarning": "500kb",
"maximumError": "1mb"
"maximumWarning": "2mb",
"maximumError": "5mb"
},
{
"type": "anyComponentStyle",
......
This diff is collapsed.
......@@ -27,9 +27,10 @@
"@ng-bootstrap/ng-bootstrap": "^9.0.2",
"angular-notifier": "^6.0.1",
"bootstrap": "^4.6.0",
"echarts": "^5.1.2",
"echarts": "^5.3.2",
"http-server": "^13.0.2",
"jwt-decode": "^3.1.2",
"moment": "^2.29.1",
"moment": "^2.29.3",
"ng2-search-filter": "^0.5.1",
"ngx-echarts": "^7.0.1",
"rxjs": "~6.6.0",
......
......@@ -37,6 +37,8 @@ import { Ng2SearchPipeModule } from 'ng2-search-filter';
import { NgxEchartsModule } from 'ngx-echarts';
import { CourseDialogComponentComponent } from './components/course-dialog-component/course-dialog-component.component';
import {APP_BASE_HREF} from '@angular/common';
@NgModule({
......@@ -85,6 +87,7 @@ import { CourseDialogComponentComponent } from './components/course-dialog-compo
useClass: AuthInterceptor,
multi: true,
},
{provide: APP_BASE_HREF, useValue: '/'},
],
bootstrap: [AppComponent],
})
......
......@@ -136,7 +136,7 @@ export class MainDetailsComponent implements OnInit {
ngOnInit(): void {
this.getAllMain(this.route.snapshot.params.id);
this.getchoices();
this.getchoices(this.route.snapshot.params.gb);
this.getmainchoices(this.route.snapshot.params.gb);
}
......@@ -377,12 +377,12 @@ export class MainDetailsComponent implements OnInit {
}
}
getchoices(): void {
this.mainService.getmainsyntax()
getchoices(id: string): void {
this.mainService.getmainsyntax(id)
.subscribe(
data => {
this.mainsyntax = data
//console.log(data);
//console.log(this.mainsyntax);
},
error => {
......
......@@ -162,8 +162,8 @@ export class MainService {
return this.http.get(`${baseUrl2}${gb}`+`/dictionary/`);
}
getmainsyntax(): Observable<any> {
return this.http.get(environment.apiroot +`api-auth/mainsyntax`);
getmainsyntax(gb : any): Observable<any> {
return this.http.get(`${baseUrl2}${gb}`+`/mainsyntax/`);
}
setinstances(gb : any, instance: any, comment:any): Observable<any> {
......
export const environment = {
apiroot : "http://155.207.131.19:8000/",
production: true
};
......@@ -12,6 +12,9 @@
will run only services needed for development like PostgreSQL, Redis etc
`docker-compose -f build/docker-compose-api.yml up`
`sudo docker-compose -f build/docker-compose-api.yml up`
will run all needed services and API (backend application)
then connect to the docker container (the current name of container is "build_api_reqman_1") to create the super user
`sudo docker exec -it build_api_reqman_1 /bin/bash `
......@@ -9,7 +9,7 @@ from reqman.apps.reqtool.rest_api.services.parse_ontologies import *
#find_subclass_domain_range()
#MAIN SYNTAX
MAIN_SYNTAX = get_main_sytax()
import pprint
#pprint.pprint(get_main_sytax())
......
......@@ -36,9 +36,7 @@ LOGICAL_EXPRESSION = ( ("",""),
#Need fixing
#VERB_CHOICES_PREFIX = ( ("",""))
list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-AOCS#","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"]#,
list_of_RBO = ["http://delab.csd.auth.gr/ontologies/2018/LO#","http://delab.csd.auth.gr/ontologies/2018/RBO#","http://delab.csd.auth.gr/ontologies/2018/LO-instances#"]
#list_of_LO = ["http://delab.csd.auth.gr/ontologies/2018/LO#","http://delab.csd.auth.gr/ontologies/2018/RBO#","http://delab.csd.auth.gr/ontologies/2018/LO-instances#"]
#Must read this info from the Ontology
......@@ -105,8 +103,14 @@ def initialize_instances(instance):
'''Update instaces of the ontology after the inferencing'''
def update_instances(instance):
list_of_DSO=[]
g = Graph()
g.load(instance.ontology_file, format="turtle")
for ns_prefix, namespace in g.namespaces():
if ('http://delab.csd.auth.gr/ontologies/2018/DSO' in namespace):
list_of_DSO.append(str(namespace))
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/SAO#")
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/RDO-instances#")
#PREFIX_SYNTAX = get_prefix_syntax(g)
VERB_CHOICES_PREFIX = get_prefix_verbs(g)
SYSTEM_CHOICES = get_instances_g("SAO#System",g, list_of_DSO)
......@@ -133,7 +137,7 @@ def update_instances(instance):
Dict_list=[]
sao_names=['SAO#System', 'SAO#Function', 'SAO#Item', 'SAO#Interface', 'SAO#Connection', 'SAO#Flow', 'SAO#State', 'SAO#StateSet', 'SAO#StateValue']
for i in sao_names:
Dict_list.append(get_dmo_classes_and_comment(i, g))
Dict_list.append(get_dmo_classes_and_comment(i, g, list_of_DSO))
#Dictionary_Data_list.append
BoilerplateGroupDictionary.objects.filter(classes_instances_owner_dic= instance.owner_infer, classes_instances_group_of_boilerplate_dic = instance.infer_group_of_boilerplate).update(ontology_file_dic =instance.ontology_file, Dictionary_Data =Dict_list)
......
......@@ -6,7 +6,7 @@ from reqman.apps.reqtool.rest_api.services.parse_ontologies import *
#HERE
#Ontology_file = "../../Ontologies/Autonomy_v1.ttl"
Ontology_file = "../../Ontologies/2022_AOCS.ttl"
#Ontology_file = "../../Ontologies/2022_AOCS.ttl"
list_of_inference_metrics = ["IncompleteRequirement", "AmbiguousRequirement",
"InconsistentRequirement","NoisyRequirement","OpaqueRequirement","RedundantRequirement"]
......@@ -118,7 +118,7 @@ def exportboiltottl(ontfile,project, prefix, boilerplate, main, suffix):
g.add((per_instances+(prefix_verb_splitter[0]+"OccuringFunctionality"+title), rbo+"isRelatedToFunction", rbo + prefix_attr_splitter[len(prefix_attr_splitter)-1].strip()))
except:
raise APIException("The instances in Prefix of Boilerplate "+title+" are not exist in the Ontology")
try:
#try:
if (boilerplate[i][0]['has_main'] == True): #If the requirement has Main
g.add((per_instances + title, rdo + 'hasMain', per_instances+(title+"_"+ main[i][0]['main_choices'])))
g.add((per_instances+(title+"_"+ main[i][0]['main_choices']), RDF.type, rbo + main[i][0]['main_choices']))
......@@ -173,8 +173,8 @@ def exportboiltottl(ontfile,project, prefix, boilerplate, main, suffix):
#isRelatedToTimeUnit
if(main[i][0]["mumerical_units"]!=""):
g.add((per_instances+(title+ "_Quantifier_Numerical"), rbo+"isRelatedToNumberUnit", lo+numunit[len(numunit)-1].strip() ))
except:
raise APIException("The instances in MAIN of Boilerplate "+title+" are not exist in the Ontology")
#except:
# raise APIException("The instances in MAIN of Boilerplate "+title+" are not exist in the Ontology")
try:
if (boilerplate[i][0]['has_suffix'] == True):
typesuffix = suffix[i][0]['s_choices'].split(':')
......
......@@ -15,7 +15,7 @@ subClass_instances = "../../Ontologies/instances_subclass.txt"
#HERE
#list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP#", "http://delab.csd.auth.gr/ontologies/2018/DSO-Automotive-AVP-instances#","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"]
list_of_DSO = ["http://delab.csd.auth.gr/ontologies/2018/SAO#","http://delab.csd.auth.gr/ontologies/2018/DSO#", "http://delab.csd.auth.gr/ontologies/2018/DSO-AOCS#", "http://delab.csd.auth.gr/ontologies/2018/DSO-AOCS-instances","http://delab.csd.auth.gr/ontologies/2018/RDO-instances#"]#,
list_of_DSO = []
list_of_rbo = ["http://delab.csd.auth.gr/ontologies/2018/RBO#"]
list_of_lo = ["http://delab.csd.auth.gr/ontologies/2018/LO#"]
#list_of_RMO = ["RMO", "RMO-instances", "DSO-AOCS-instances", "<http://delab.csd.auth.gr/ontologies/2018/RDO-instances"]
......@@ -28,7 +28,7 @@ Dictionary_Data_list = []
g = Graph()
g.load(Ontology_file, format="turtle")
def find_subclass_domain_range():
'''def find_subclass_domain_range():
file2 = open(r"../../Ontologies/instances.txt","w+")
for subj, obj in g.subject_objects(predicate=RDFS.subClassOf):
......@@ -39,10 +39,10 @@ def find_subclass_domain_range():
file2.write(subClass)
for subj, obj in g.subject_objects(predicate=RDFS.range):
subClass = subj + "$"+ obj + "\n"
file2.write(subClass)
file2.write(subClass)'''
def findsubclass():
def findsubclass(g):
subclasslist = []
for subj, obj in g.subject_objects(predicate=RDFS.subClassOf):
subClass = subj + "$"+ obj + "\n"
......@@ -99,17 +99,10 @@ def get_verb_file_and_instances(keyword):
'''Get Instaces - return tuple'''
def get_instances(keyword):
#get_file_and_instances(keyword)
def get_instances(keyword, list_of_DSO):
keywordspl = keyword.split("#")
in_list = [keywordspl[len(keywordspl)-1]]
out_list = []
#with open('../../Ontologies/instances.txt') as f:
# for data in f:
# if (data.strip()).endswith(keyword):
# spliter_1 = data.split('$')
# spliter_2 = spliter_1[0].split("#")
# in_list.append(spliter_2[len(spliter_2) - 1])
for extra in in_list:
for uri in list_of_DSO:
per = URIRef(uri +extra)
......@@ -136,16 +129,9 @@ info_dict = {}
'''Get Instaces - return tuple'''
def get_instances_g(keyword, g, list_of_DSO):
#get_file_and_instances(keyword)
keywordspl = keyword.split("#")
in_list = [keywordspl[len(keywordspl)-1]]
out_list = []
#with open('../../Ontologies/instances.txt') as f:
# for data in f:
# if (data.strip()).endswith(keyword):
# spliter_1 = data.split('$')
# spliter_2 = spliter_1[0].split("#")
# in_list.append(spliter_2[len(spliter_2) - 1])
for extra in in_list:
for uri in list_of_DSO:
per = URIRef(uri +extra)
......@@ -171,6 +157,11 @@ out_list = []
info_dict = {}
def get_file_and_instances(keyword, g):
for ns_prefix, namespace in g.namespaces():
if ('http://delab.csd.auth.gr/ontologies/2018/DSO' in namespace):
list_of_DSO.append(str(namespace))
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/SAO#")
list_of_DSO.append("http://delab.csd.auth.gr/ontologies/2018/RDO-instances#")
out_list = []
keywordspl = keyword.split("#")
in_list = [keywordspl[len(keywordspl)-1]]
......@@ -195,7 +186,7 @@ def get_file_and_instances(keyword, g):
#Instances and Comments
def get_dmo_instance_and_comment(keyword):
'''def get_dmo_instance_and_comment(keyword):
dmo_dic = {}
dic_list = []
mid_list = []
......@@ -228,12 +219,12 @@ def get_dmo_instance_and_comment(keyword):
uri = spl[len(spl) - 2].split("/")
out_list_com.append(str(extra))
out_list_com.append((str(o).replace("\r","")).replace("\n",""))
return dic_list
return dic_list'''
#Classes and Comments
def get_dmo_classes_and_comment(keyword, g):
def get_dmo_classes_and_comment(keyword, g, list_of_DSO):
dic_list = []
dic_list_in = []
dmo_dic_backup = {}
......@@ -301,7 +292,7 @@ def get_dmo_classes_and_comment(keyword, g):
sub_ins.append(sub_ins_dict)
###############################
#get Instance of all subclasses
subsubclass_list= get_Classes_Dictionary(subclas_name)
subsubclass_list= get_Classes_Dictionary(subclas_name, list_of_DSO)
suclass_dict = {'SubClass_name': subclas_name, 'SubclassComment':find_comment, 'InstanceOfSubclass': sub_ins, 'SubSubClass' : subsubclass_list}
subclass_list.append(suclass_dict)
############################################
......@@ -312,7 +303,7 @@ def get_dmo_classes_and_comment(keyword, g):
return Dictionary_Data
def get_Classes_Dictionary(name):
def get_Classes_Dictionary(name, list_of_DSO):
#subsubsub_ins_dict = {}
subsubsubsuclass_dict = {}
subsubsubsubclass_list = []
......@@ -339,7 +330,7 @@ def get_Classes_Dictionary(name):
subsubsubsub_ins_dict = {"Instance": spl2[len(spl2) - 1], "Comment": subsubsubfind_comment_instance}
subsubsubsub_ins.append(subsubsubsub_ins_dict)
#get current Subclass -Subclasses / Instances
newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name)
newsubclass_list = get_Classes_Dictionary(subsubsubsubclas_name, list_of_DSO)
subsubsubsuclass_dict = {'SubSubClass_name': subsubsubsubclas_name, 'SubSubclassComment':subsubsubfind_comment, 'SubInstanceOfSubclass': subsubsubsub_ins, 'SubSubClass': newsubclass_list}
subsubsubsubclass_list.append(subsubsubsuclass_dict)
return subsubsubsubclass_list
......@@ -348,7 +339,7 @@ def get_Classes_Dictionary(name):
#Classes of classes and Comments
def get_dmo_classes_of_classes_and_comment(keyword):
def get_dmo_classes_of_classes_and_comment(keyword, list_of_DSO):
dmo_dic = {}
dic_list = []
out_list_instance = []
......@@ -381,8 +372,8 @@ def get_dmo_classes_of_classes_and_comment(keyword):
'''Get Instaces - return list'''
def get_instances_list(keyword):
f = findsubclass()
def get_instances_list(keyword, g):
f = findsubclass(g)
an_list=[]
for data in f:
if (data.strip()).endswith(keyword):
......@@ -516,7 +507,7 @@ def get_attribute(main, bnodes_uriref, subjects, related_subjects, verbs):
'''Find Main syntax - return a dict with the syntax'''
def get_main_sytax_inference(g):
get_main = get_instances_list("RBO#Main")
get_main = get_instances_list("RBO#Main", g)
main_dict = {}
for main in get_main:
main_dict[main] = {}
......@@ -587,8 +578,8 @@ def get_main_sytax_inference(g):
'''Find Main syntax - return a dict with the syntax'''
def get_main_sytax():
get_main = get_instances_list("RBO#Main")
def get_main_sytax(g):
get_main = get_instances_list("RBO#Main",g)
main_dict = {}
main_dict_list = []
for main in get_main:
......@@ -665,9 +656,9 @@ def get_main_sytax():
def get_prefix_syntax(g):
prefix_dict={}
prefix_list = []
get_prefix = get_instances_list("RBO#Prefix")
get_prefix = get_instances_list("RBO#Prefix",g)
for prefix in get_prefix:
prefix_list.extend(get_instances_list(prefix))
prefix_list.extend(get_instances_list(prefix, g))
for prefix in prefix_list:
prefix_dict[prefix] = {}
#prefix_dict[prefix]["Verbs"] = []
......@@ -676,7 +667,7 @@ def get_prefix_syntax(g):
bnodes_uriref = find_triples(prefix)
subclass_data = []
for per in bnodes_uriref:
tmp = get_instances_list(str(per))
tmp = get_instances_list(str(per), g)
bnodes_uriref.extend(tmp)
for i in tmp:
subclass_data.append(i)
......@@ -749,14 +740,14 @@ def get_prefix_verbs(g):
prefix_list = []
verblist =[]
out_list = []
get_prefix = get_instances_list("RBO#Prefix")
get_prefix = get_instances_list("RBO#Prefix", g)
for prefix in get_prefix:
prefix_list.extend(get_instances_list(prefix))
prefix_list.extend(get_instances_list(prefix, g))
for prefix in prefix_list:
bnodes_uriref = find_triples(prefix)
subclass_data = []
for per in bnodes_uriref:
tmp = get_instances_list(str(per))
tmp = get_instances_list(str(per), g)
bnodes_uriref.extend(tmp)
for i in tmp:
subclass_data.append(i)
......@@ -781,12 +772,12 @@ def get_prefix_verbs(g):
'''The new prefix syntaxt parser'''
def get_prefix_new_syntax():
def get_prefix_new_syntax(g):
bnodes = []
prefix_list = []
get_prefix = get_instances_list("RBO#Prefix")
get_prefix = get_instances_list("RBO#Prefix", g)
for prefix in get_prefix:
prefix_list.extend(get_instances_list(prefix))
prefix_list.extend(get_instances_list(prefix, g))
#owl:allValuesFrom
for prefix in prefix_list:
per = URIRef("http://delab.csd.auth.gr/ontologies/2018/RBO#" + prefix)
......@@ -795,7 +786,7 @@ def get_prefix_new_syntax():
subclass_data = []
#print(bnodes_uriref)
for per in bnodes_uriref:
tmp = get_instances_list(str(per))
tmp = get_instances_list(str(per), g)
bnodes_uriref.extend(tmp)
for i in tmp:
subclass_data.append(i)
......
......@@ -45,7 +45,7 @@ urlpatterns = [
#Dictionary
path('groupboilerplates/<int:groupboil>/dictionary/', main_req_views.DictionaryViewSet.as_view(), name='dictionary'),
path('mainsyntax/', main_req_views.MainSyntaxViewSet.as_view(), name='mainsyntax'),
path('groupboilerplates/<int:groupboil>/mainsyntax/', main_req_views.MainSyntaxViewSet.as_view(), name='mainsyntax'),
#Search In Ontology
path('groupboilerplates/<int:groupboil>/search/<str:instance>/', main_req_views.SearchinOntologyAPIView.as_view(), name='setmainchoices'),
......
......@@ -12,12 +12,12 @@ from reqman.apps.reqtool.rest_api.serializers.main_req import MainSerializer
from reqman.apps.permissions import IsOwnerOrReadOnly
from reqman.apps.reqtool.rest_api.services import fuseki, inference
from reqman.apps.reqtool.rest_api.services.parse_ontologies import Dictionary_Data_list, Ontology_file
from reqman.apps.reqtool.rest_api.services.parse_ontologies import Dictionary_Data_list, Ontology_file, get_main_sytax, get_main_sytax_inference
from rest_framework.response import Response
from rest_framework.views import APIView
from reqman.apps.reqtool.models.main_req import SHALL_CHOICES, MAIN_SYNTAX
from reqman.apps.reqtool.models.main_req import SHALL_CHOICES #, MAIN_SYNTAX
from reqman.apps.reqtool.models.requirements import BoilerplateData, BoilerplateGroupClassesInstances, BoilerplateGroupDictionary
from rest_framework.exceptions import APIException
......@@ -512,8 +512,13 @@ class DictionaryViewSet(ListAPIView):
return queryset_cl
class MainSyntaxViewSet(APIView):
serializer_class = BoilerplateGroupClassesInstances
def get(self, request):
def get(self, request, groupboil, format=None):
data = BoilerplateGroupClassesInstances.objects.filter(classes_instances_group_of_boilerplate = groupboil)
g = Graph()
g.load(data.values('ontology_file')[0]['ontology_file'], format="turtle")
MAIN_SYNTAX = get_main_sytax(g)
response = Response(MAIN_SYNTAX)
return response
......
......@@ -66,8 +66,9 @@ pillow==6.1.0
rdflib==5.0.0
sparqlwrapper==1.8.5
django-jsonfield==1.4.1
djangorestframework-jwt==1.5.2
djangorestframework-jwt==1.6 #djangorestframework-jwt==1.5.2
drf-jwt==1.17.3
django-cors-headers
# The following packages are considered to be unsafe in a requirements file:
# setuptools==41.2.0 # via ipdb, ipython
FROM python:latest
FROM python:3.7
# Install OpenJDK-11 for shacl inferencing
RUN apt-get update && \
apt-get install -y openjdk-11-jre-headless && \
apt-get clean;
RUN mkdir Ontologies
WORKDIR /Ontologies
COPY Ontologies/ /Ontologies/
WORKDIR /api
COPY api/ /api/
RUN chmod -R 777 reqman/apps/reqtool/rest_api/services/shacl-1.4.2/
COPY build/docker-entrypoint-api.sh /api/
RUN ["chmod", "+x", "/api/docker-entrypoint-api.sh"]
RUN ["pip", "install", "-r", "requirements/dev.txt"]
RUN ["pip3", "install", "-r", "requirements/dev.txt"]
ENTRYPOINT ["/api/docker-entrypoint-api.sh"]
......
......@@ -2,6 +2,14 @@
cd /api/
python manage.py migrate
#create static file for css django admin
mkdir reqman/static
python manage.py collectstatic
#make migrations of models
python manage.py makemigrations account
python manage.py makemigrations common
python manage.py makemigrations reqtool
python manage.py migrate --run-syncdb
exec "$@"
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment