diff options
author | Danijel Anđelković <adanijel99@gmail.com> | 2022-05-26 22:13:00 +0200 |
---|---|---|
committer | Danijel Anđelković <adanijel99@gmail.com> | 2022-05-26 22:13:00 +0200 |
commit | 71c87626ff13be56ef028e1011b75f65af9b9914 (patch) | |
tree | e82673cd49d1dcc8825552c516c9873086b55514 /backend/microservice/api | |
parent | 6060950ed1d6ba890ca44ae557aaa17632b9c350 (diff) | |
parent | d9f2acad56bc42a05ae9c0b4fd25827fb6cea374 (diff) |
Merge branch 'redesign' of http://gitlab.pmf.kg.ac.rs/igrannonica/neuronstellar into redesign
Diffstat (limited to 'backend/microservice/api')
-rw-r--r-- | backend/microservice/api/newmlservice.py | 13 |
1 files changed, 8 insertions, 5 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index 99e3cae5..943e18a1 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -1,3 +1,4 @@ +from cmath import nan from enum import unique from itertools import count import os @@ -374,13 +375,15 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): - classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt, metrics = ['accuracy','mae','mse']) + classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt, metrics = ['accuracy']) history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val)) hist=history.history #plt.plot(hist['accuracy']) - #plt.show() + plt.plot(history.history['loss']) + plt.plot(history.history['val_loss']) + plt.show() y_pred=classifier.predict(x_test) y_pred=np.argmax(y_pred,axis=1) @@ -410,7 +413,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): #from ann_visualizer.visualize import ann_viz; #ann_viz(classifier, title="My neural network") - return filepath,[hist['loss'],hist['val_loss'],hist['accuracy'],hist['val_accuracy'],hist['mae'],hist['val_mae'],hist['mse'],hist['val_mse']] + return filepath,[hist['loss'],hist['val_loss'],hist['accuracy'],hist['val_accuracy'],[],[],[],[]] elif(problem_type=='binarni-klasifikacioni'): #print('*************************************************************************binarni') @@ -444,7 +447,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): classifier.add(tf.keras.layers.Dense(units=1, activation=paramsModel['outputLayerActivationFunction']))#izlazni sloj - classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt , metrics = ['accuracy','mae','mse']) + classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt , metrics = ['accuracy']) history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val)) hist=history.history @@ -468,7 +471,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): logloss = float(sm.log_loss(y_test, y_pred)) """ - return filepath,[hist['loss'],hist['val_loss'],hist['accuracy'],hist['val_accuracy'],hist['mae'],hist['val_mae'],hist['mse'],hist['val_mse']] + return filepath,[hist['loss'],hist['val_loss'],hist['accuracy'],hist['val_accuracy'],[],[],[],[]] elif(problem_type=='regresioni'): reg=paramsModel['layers'][0]['regularisation'] |