aboutsummaryrefslogtreecommitdiff
path: root/backend/microservice/api
diff options
context:
space:
mode:
Diffstat (limited to 'backend/microservice/api')
-rw-r--r--backend/microservice/api/controller.py18
-rw-r--r--backend/microservice/api/newmlservice.py10
2 files changed, 19 insertions, 9 deletions
diff --git a/backend/microservice/api/controller.py b/backend/microservice/api/controller.py
index 437690ee..f0f36907 100644
--- a/backend/microservice/api/controller.py
+++ b/backend/microservice/api/controller.py
@@ -1,4 +1,6 @@
+from cmath import log
from dataclasses import dataclass
+from distutils.command.upload import upload
from gc import callbacks
from xmlrpc.client import DateTime
import flask
@@ -31,16 +33,24 @@ class Predictor:
class train_callback(tf.keras.callbacks.Callback):
- def __init__(self, x_test, y_test):
+ def __init__(self, x_test, y_test,modelId):
self.x_test = x_test
self.y_test = y_test
+ self.modelId=modelId
#
def on_epoch_end(self, epoch, logs=None):
- print(epoch)
+ #print('Evaluation: ', self.model.evaluate(self.x_test,self.y_test),"\n")
+
+ #print(epoch)
+
+ #print(logs)
+
#ml_socket.send(epoch)
#file = request.files.get("file")
url = config.api_url + "/Model/epoch"
- requests.post(url, epoch).text
+ r=requests.post(url, json={"Stat":str(logs),"ModelId":str(self.modelId),"EpochNum":epoch}).text
+
+ #print(r)
#print('Evaluation: ', self.model.evaluate(self.x_test,self.y_test),"\n") #broj parametara zavisi od izabranih metrika loss je default
@app.route('/train', methods = ['POST'])
@@ -63,7 +73,7 @@ def train():
url = config.api_url + "/file/h5"
files = {'file': open(filepath, 'rb')}
- r=requests.post(url, files=files)
+ r=requests.post(url, files=files,data={"uploaderId":paramsExperiment['uploaderId']})
fileId=r.text
predictor = Predictor(
_id = "",
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py
index 585db480..a9bce3bb 100644
--- a/backend/microservice/api/newmlservice.py
+++ b/backend/microservice/api/newmlservice.py
@@ -252,7 +252,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
opt=tf.keras.optimizers.RMSprop(learning_rate=params['learningRate'])
###REGULARIZACIJA
- #regularisation={'kernelType':'l1 ili l2 ili l1_l2','krenelRate':default=0.01 ili jedna od vrednosti(0.0001,0.001,0.1,1,2,3) ili neka koju je korisnik zadao,'biasType':'','biasRate':'','activityType','activityRate'}
+ #regularisation={'kernelType':'l1 ili l2 ili l1_l2','kernelRate':default=0.01 ili jedna od vrednosti(0.0001,0.001,0.1,1,2,3) ili neka koju je korisnik zadao,'biasType':'','biasRate':'','activityType','activityRate'}
reg=params['regularisation']
###Kernel
@@ -279,7 +279,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
elif(reg['kernelType']=='l1l2'):
activityreg=tf.keras.regularizers.l1_l2(l1=reg['activityRate'][0],l2=reg['activityRate'][1])
"""
- filepath=os.path.join("temp/",paramsExperiment['_id']+"_"+paramsModel['_id'])
+ filepath=os.path.join("temp/",paramsExperiment['_id']+"_"+paramsModel['_id']+".h5")
if(problem_type=='multi-klasifikacioni'):
#print('multi')
classifier=tf.keras.Sequential()
@@ -294,7 +294,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'])
+ history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
hist=history.history
#plt.plot(hist['accuracy'])
@@ -326,7 +326,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'])
+ history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
hist=history.history
y_pred=classifier.predict(x_test)
y_pred=(y_pred>=0.5).astype('int')
@@ -352,7 +352,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'])
+ history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
hist=history.history
y_pred=classifier.predict(x_test)
#print(classifier.evaluate(x_test, y_test))