aboutsummaryrefslogtreecommitdiff
path: root/backend/microservice
diff options
context:
space:
mode:
Diffstat (limited to 'backend/microservice')
-rw-r--r--backend/microservice/api/newmlservice.py22
1 files changed, 18 insertions, 4 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py
index 604e4d3c..219f8a20 100644
--- a/backend/microservice/api/newmlservice.py
+++ b/backend/microservice/api/newmlservice.py
@@ -155,7 +155,19 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
data.pop(col)
#
### Enkodiranje
+<<<<<<< HEAD
encodings=paramsExperiment["encodings"]
+=======
+
+ from sklearn.preprocessing import LabelEncoder
+ kategorijskekolone=data.select_dtypes(include=['object']).columns
+ encoder=LabelEncoder()
+ for kolona in data.columns:
+ if(kolona in kategorijskekolone):
+ data[kolona]=encoder.fit_transform(data[kolona])
+ '''
+ encoding=paramsExperiment["encoding"]
+>>>>>>> 7d57bb9 (Dodate su komponente za grafik.)
datafront=dataset.copy()
svekolone=datafront.columns
kategorijskekolone=datafront.select_dtypes(include=['object']).columns
@@ -207,6 +219,8 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
category_columns.append(col)
encoder=ce.BaseNEncoder(cols=category_columns, return_df=True, base=5)
encoder.fit_transform(data)
+
+ '''
#
# Input - output
#
@@ -301,7 +315,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
- classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
+ classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =['accuracy','mae','mse'])
history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
@@ -333,7 +347,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.add(tf.keras.layers.Dense(units=paramsModel['hiddenLayerNeurons'], activation=paramsModel['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj
classifier.add(tf.keras.layers.Dense(units=1, activation=paramsModel['outputLayerActivationFunction']))#izlazni sloj
- classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
+ classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =['accuracy','mae','mse'])
history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
hist=history.history
@@ -359,7 +373,7 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback):
classifier.add(tf.keras.layers.Dense(units=paramsModel['hiddenLayerNeurons'], activation=paramsModel['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj
classifier.add(tf.keras.layers.Dense(units=1))
- classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =paramsModel['metrics'])
+ classifier.compile(loss =paramsModel["lossFunction"] , optimizer = paramsModel['optimizer'] , metrics =['accuracy','mae','mse'])
history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=paramsModel['batchSize'],callbacks=callback(x_test, y_test,paramsModel['_id']))
hist=history.history
@@ -529,7 +543,7 @@ def manageH5(dataset,params,h5model):
h5model.summary()
#ann_viz(h5model, title="My neural network")
- h5model.compile(loss=params['lossFunction'], optimizer=params['optimizer'], metrics=params['metrics'])
+ h5model.compile(loss=params['lossFunction'], optimizer=params['optimizer'], metrics=params['accuracy',''])
history=h5model.fit(x2, y2, epochs = params['epochs'],batch_size=params['batchSize'])