diff options
Diffstat (limited to 'backend/microservice/api')
-rw-r--r-- | backend/microservice/api/newmlservice.py | 40 |
1 files changed, 18 insertions, 22 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index d84d9567..2f08d4b4 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -130,7 +130,7 @@ def returnColumnsInfo(dataset): #print(NullRows) #print(len(NullRows)) allNullRows=len(NullRows) - print(cMatrix.to_json(orient='index')) + #print(cMatrix.to_json(orient='index')) #json.loads()['data'] return {'columnInfo':dict,'allNullColl':int(allNullCols),'allNullRows':int(allNullRows),'rowCount':int(rowCount),'colCount':int(colCount),'cMatrix':json.loads(cMatrix.to_json(orient='split'))['data']} @@ -180,17 +180,20 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): kategorijskekolone=[] ###PRETVARANJE NUMERICKIH U KATREGORIJSKE AKO JE KORISNIK TAKO OZNACIO columnInfo=paramsDataset['columnInfo'] - for col in columnInfo: - if(col['columnType']=="Kategorijski"): + columnTypes=paramsExperiment['columnTypes'] + for i in range(len(columnInfo)): + col=columnInfo[i] + if(columnTypes[i]=="categorical" and col['columnName'] in paramsExperiment['inputColumns']): data[col['columnName']]=data[col['columnName']].apply(str) - kategorijskekolone.append(col['coumnName']) - + kategorijskekolone.append(col['columnName']) + #kategorijskekolone=data.select_dtypes(include=['object']).columns + print(kategorijskekolone) ###NULL null_value_options = paramsExperiment["nullValues"] null_values_replacers = paramsExperiment["nullValuesReplacers"] if(null_value_options=='replace'): - #print("replace null") # + #print("replace null") dict=null_values_replacers while(len(dict)>0): replace=dict.pop() @@ -362,9 +365,9 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): - classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt, metrics =paramsModel['metrics']) + classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt, metrics = ['accuracy','mae','mse']) - history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=float(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) + history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) hist=history.history #plt.plot(hist['accuracy']) @@ -416,16 +419,9 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): classifier.add(tf.keras.layers.Dense(units=1, activation=paramsModel['outputLayerActivationFunction']))#izlazni sloj - classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt , metrics =paramsModel['metrics']) - - print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA') - print(x_train) - print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA') - print(y_train) - print('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA') - + classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt , metrics = ['accuracy','mae','mse']) - history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=float(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) + history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) hist=history.history y_pred=classifier.predict(x_test) y_pred=(y_pred>=0.5).astype('int') @@ -470,11 +466,11 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): classifier.add(tf.keras.layers.Dense(units=paramsModel['layers'][i+1]['neurons'], activation=paramsModel['layers'][i+1]['activationFunction'],kernel_regularizer=kernelreg, bias_regularizer=biasreg, activity_regularizer=activityreg))#i-ti skriveni sloj - classifier.add(tf.keras.layers.Dense(units=1),activation=paramsModel['outputLayerActivationFunction']) + classifier.add(tf.keras.layers.Dense(units=1,activation=paramsModel['outputLayerActivationFunction'])) - classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt , metrics =paramsModel['metrics']) + classifier.compile(loss =paramsModel["lossFunction"] , optimizer = opt , metrics = ['accuracy','mae','mse']) - history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=float(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) + history=classifier.fit(x_train, y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id'])) hist=history.history y_pred=classifier.predict(x_test) #print(classifier.evaluate(x_test, y_test)) @@ -642,9 +638,9 @@ def manageH5(dataset,params,h5model): h5model.summary() #ann_viz(h5model, title="My neural network") - h5model.compile(loss=params['lossFunction'], optimizer=params['optimizer'], metrics=params['metrics']) + h5model.compile(loss=params['lossFunction'], optimizer=params['optimizer'], metrics = ['accuracy','mae','mse']) - history=h5model.fit(x2, y2, epochs = params['epochs'],batch_size=params['batchSize']) + history=h5model.fit(x2, y2, epochs = params['epochs'],batch_size=int(params['batchSize'])) y_pred2=h5model.predict(x2) |