aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorNevena Bojovic <nenabojov@gmail.com>2022-05-10 23:19:30 +0200
committerNevena Bojovic <nenabojov@gmail.com>2022-05-10 23:19:30 +0200
commit620288faaae5cbdd70c50bd07672ede1fdf2394e (patch)
tree58c789f65bae6750fec004abd6002e5d6ac1c845
parentc02a7f0793a7b290029ec81859cdea5724a3f7dc (diff)
parent1690c70e86e5f79fa5708ea12ed034bf605cf259 (diff)
Merge branch 'redesign' of http://gitlab.pmf.kg.ac.rs/igrannonica/neuronstellar into redesign
-rw-r--r--backend/microservice/api/newmlservice.py48
-rw-r--r--frontend/src/app/_data/Model.ts21
-rw-r--r--frontend/src/app/_data/Predictor.ts2
-rw-r--r--frontend/src/app/_elements/_charts/line-chart/line-chart.component.ts2
-rw-r--r--frontend/src/app/_elements/form-model/form-model.component.html4
-rw-r--r--frontend/src/app/_elements/form-model/form-model.component.ts19
6 files changed, 61 insertions, 35 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py
index bca729e4..07735077 100644
--- a/backend/microservice/api/newmlservice.py
+++ b/backend/microservice/api/newmlservice.py
@@ -291,15 +291,16 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
random=123
else:
random=0
- x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=test, random_state=random)
+ #x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=test, random_state=random)
#print(x_train,x_test)
-
+ x, x_test, y, y_test = train_test_split(x, y, test_size=0.15, shuffle=True)
+ x_train, x_val, y_train, y_val = train_test_split(x, y, test_size=0.15, shuffle=True)
#
# Treniranje modela
#
#
###OPTIMIZATORI
-
+ print(paramsModel['optimizer'])
if(paramsModel['optimizer']=='Adam'):
opt=tf.keras.optimizers.Adam(learning_rate=3)
@@ -315,13 +316,16 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
elif(paramsModel['optimizer']=='Nadam'):
opt=tf.keras.optimizers.Nadam(learning_rate=float(paramsModel['learningRate']))
- elif(paramsModel['optimizer']=='Sgd'):
+ elif(paramsModel['optimizer']=='SGD'):
+ opt=tf.keras.optimizers.SGD(learning_rate=float(paramsModel['learningRate']))
+
+ if(paramsModel['optimizer']=='SGDMomentum'):
opt=tf.keras.optimizers.SGD(learning_rate=float(paramsModel['learningRate']))
elif(paramsModel['optimizer']=='Ftrl'):
opt=tf.keras.optimizers.Ftrl(learning_rate=float(paramsModel['learningRate']))
- elif(paramsModel['optimizer']=='Rmsprop'):
+ elif(paramsModel['optimizer']=='RMSprop'):
opt=tf.keras.optimizers.RMSprop(learning_rate=float(paramsModel['learningRate']))
###REGULARIZACIJA
@@ -331,7 +335,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
filepath=os.path.join("temp/",paramsExperiment['_id']+"_"+paramsModel['_id']+".h5")
if(problem_type=='multi-klasifikacioni'):
#print('multi')
- print(paramsModel)
+ #print(paramsModel)
reg=paramsModel['layers'][0]['regularisation']
regRate=float(paramsModel['layers'][0]['regularisationRate'])
if(reg=='l1'):
@@ -367,7 +371,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt, metrics = ['accuracy','mae','mse'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']))
+ history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val))
hist=history.history
#plt.plot(hist['accuracy'])
@@ -380,12 +384,30 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.save(filepath, save_format='h5')
-
+
+ accuracy=metrics.accuracy_score(y_test, y_pred)
+ macro_averaged_precision=metrics.precision_score(y_test, y_pred, average = 'macro')
+ micro_averaged_precision=metrics.precision_score(y_test, y_pred, average = 'micro')
+ macro_averaged_recall=metrics.recall_score(y_test, y_pred, average = 'macro')
+ micro_averaged_recall=metrics.recall_score(y_test, y_pred, average = 'micro')
+ macro_averaged_f1=metrics.f1_score(y_test, y_pred, average = 'macro')
+ micro_averaged_f1=metrics.f1_score(y_test, y_pred, average = 'micro')
+
+ metrics= {
+ "accuracy" : float(accuracy),
+ "macro_averaged_precision" :float(macro_averaged_precision),
+ "micro_averaged_precision" : float(micro_averaged_precision),
+ "macro_averaged_recall" : float(macro_averaged_recall),
+ "micro_averaged_recall" : float(micro_averaged_recall),
+ "macro_averaged_f1" : float(macro_averaged_f1),
+ "micro_averaged_f1" : float(micro_averaged_f1)
+ }
+
#vizuelizacija u python-u
#from ann_visualizer.visualize import ann_viz;
#ann_viz(classifier, title="My neural network")
- return filepath,hist,y_pred,y_test
+ return filepath,hist,metrics
elif(problem_type=='binarni-klasifikacioni'):
#print('*************************************************************************binarni')
@@ -421,7 +443,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer =opt , metrics = ['accuracy','mae','mse'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']))
+ history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val))
hist=history.history
y_pred=classifier.predict(x_test)
y_pred=(y_pred>=0.5).astype('int')
@@ -491,7 +513,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt , metrics = ['accuracy','mae','mse'])
- history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']))
+ history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val))
hist=history.history
y_pred=classifier.predict(x_test)
#print(classifier.evaluate(x_test, y_test))
@@ -543,7 +565,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
#
# Metrike
#
-
+ '''
if(problem_type=="regresioni"):
# https://www.analyticsvidhya.com/blog/2021/05/know-the-best-evaluation-metrics-for-your-regression-model/
mse = float(sm.mean_squared_error(y_test,y_pred))
@@ -565,7 +587,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
"r2" : r2,
"adj_r2" : adj_r2
}
- '''
+
elif(problem_type=="multi-klasifikacioni"):
cr=sm.classification_report(y_test, y_pred)
diff --git a/frontend/src/app/_data/Model.ts b/frontend/src/app/_data/Model.ts
index 526a8290..d1e89e84 100644
--- a/frontend/src/app/_data/Model.ts
+++ b/frontend/src/app/_data/Model.ts
@@ -13,7 +13,7 @@ export default class Model extends FolderFile {
// Neural net training settings
public type: ProblemType = ProblemType.Regression,
public optimizer: Optimizer = Optimizer.Adam,
- public lossFunction: LossFunction = LossFunction.MeanSquaredError,
+ public lossFunction: LossFunction = LossFunctionRegression[0],
public inputNeurons: number = 1,
public hiddenLayers: number = 1,
public batchSize: BatchSize = BatchSize.O3,
@@ -132,21 +132,10 @@ export enum LossFunction {
MeanSquaredLogarithmicError = 'mean_squared_logarithmic_error',
HuberLoss = 'Huber'
}
-export enum LossFunctionRegression {
- MeanAbsoluteError = 'mean_absolute_error',
- MeanSquaredError = 'mean_squared_error',
- MeanSquaredLogarithmicError = 'mean_squared_logarithmic_error',
-}
-export enum LossFunctionBinaryClassification {
- BinaryCrossEntropy = 'binary_crossentropy',
- SquaredHingeLoss = 'squared_hinge_loss',
- HingeLoss = 'hinge_loss',
-}
-export enum LossFunctionMultiClassification {
- CategoricalCrossEntropy = 'categorical_crossentropy',
- SparseCategoricalCrossEntropy = 'sparse_categorical_crossentropy',
- KLDivergence = 'kullback_leibler_divergence',
-}
+export const LossFunctionRegression =[LossFunction.MeanAbsoluteError,LossFunction.MeanSquaredError,LossFunction.MeanSquaredLogarithmicError]
+export const LossFunctionBinaryClassification=[LossFunction.BinaryCrossEntropy,LossFunction.SquaredHingeLoss,LossFunction.HingeLoss]
+
+export const LossFunctionMultiClassification=[LossFunction.CategoricalCrossEntropy,LossFunction.SparseCategoricalCrossEntropy,LossFunction.KLDivergence]
export enum Optimizer {
Adam = 'Adam',
diff --git a/frontend/src/app/_data/Predictor.ts b/frontend/src/app/_data/Predictor.ts
index c5cb2218..e15ae8f9 100644
--- a/frontend/src/app/_data/Predictor.ts
+++ b/frontend/src/app/_data/Predictor.ts
@@ -9,7 +9,7 @@ export default class Predictor {
public accessibleByLink: boolean = false,
public dateCreated: Date = new Date(),
public uploaderId: string = '',
- public finalMetrics: Metric[] = []
+ //public finalMetrics: Metric[] = []
) { }
}
diff --git a/frontend/src/app/_elements/_charts/line-chart/line-chart.component.ts b/frontend/src/app/_elements/_charts/line-chart/line-chart.component.ts
index 9ead9232..0924801e 100644
--- a/frontend/src/app/_elements/_charts/line-chart/line-chart.component.ts
+++ b/frontend/src/app/_elements/_charts/line-chart/line-chart.component.ts
@@ -87,7 +87,7 @@ export class LineChartComponent implements AfterViewInit {
// This more specific font property overrides the global property
color:'white',
font: {
- size: 11
+ size: 10
}
}
}
diff --git a/frontend/src/app/_elements/form-model/form-model.component.html b/frontend/src/app/_elements/form-model/form-model.component.html
index 4e0d1cfb..d13cb3aa 100644
--- a/frontend/src/app/_elements/form-model/form-model.component.html
+++ b/frontend/src/app/_elements/form-model/form-model.component.html
@@ -11,7 +11,7 @@
<div class="ns-col">
<mat-form-field appearance="fill" class="mat-fix">
<mat-label>Tip problema</mat-label>
- <mat-select [(ngModel)]="newModel.type">
+ <mat-select [(ngModel)]="newModel.type" (selectionChange)="filterLossFunction()" disabled="true">
<mat-option *ngFor="let option of Object.keys(ProblemType); let optionName of Object.values(ProblemType)" [value]="option">
{{ optionName }}
</mat-option>
@@ -36,7 +36,7 @@
<mat-form-field appearance="fill" class="mat-fix">
<mat-label>Funkcija troška</mat-label>
<mat-select [(ngModel)]="newModel.lossFunction">
- <mat-option *ngFor="let option of Object.keys(LossFunction); let optionName of Object.values(LossFunction)" [value]="option">
+ <mat-option *ngFor="let option of Object.keys(lossFunction); let optionName of Object.values(lossFunction)" [value]="option">
{{ optionName }}
</mat-option>
</mat-select>
diff --git a/frontend/src/app/_elements/form-model/form-model.component.ts b/frontend/src/app/_elements/form-model/form-model.component.ts
index b9976b84..a98ceaec 100644
--- a/frontend/src/app/_elements/form-model/form-model.component.ts
+++ b/frontend/src/app/_elements/form-model/form-model.component.ts
@@ -136,7 +136,22 @@ export class FormModelComponent implements AfterViewInit {
this.testSetDistribution = event.value!;
}
- getInputColumns() {
+ filterLossFunction() {
+ if(this.newModel.type==ProblemType.Regression){
+ this.lossFunction = LossFunctionRegression;
+ this.newModel.lossFunction=LossFunction.MeanSquaredError;
+ }
+ else if(this.newModel.type==ProblemType.BinaryClassification){
+ this.lossFunction= LossFunctionBinaryClassification;
+ this.newModel.lossFunction=LossFunction.BinaryCrossEntropy;
+ }
+ else if(this.newModel.type==ProblemType.MultiClassification){
+ this.lossFunction = LossFunctionMultiClassification;
+ this.newModel.lossFunction=LossFunction.SparseCategoricalCrossEntropy;
+ }
+
+}
+getInputColumns() {
return this.forExperiment.inputColumns.filter(x => x != this.forExperiment.outputColumn);
- }
+}
}