diff options
Diffstat (limited to 'backend/microservice/api')
-rw-r--r-- | backend/microservice/api/config.py | 2 | ||||
-rw-r--r-- | backend/microservice/api/controller.py | 19 | ||||
-rw-r--r-- | backend/microservice/api/ml_service.py | 2 | ||||
-rw-r--r-- | backend/microservice/api/ml_socket.py | 30 | ||||
-rw-r--r-- | backend/microservice/api/newmlservice.py | 28 |
5 files changed, 30 insertions, 51 deletions
diff --git a/backend/microservice/api/config.py b/backend/microservice/api/config.py new file mode 100644 index 00000000..2b6fbe89 --- /dev/null +++ b/backend/microservice/api/config.py @@ -0,0 +1,2 @@ +api_url = "http://localhost:5283/api" + diff --git a/backend/microservice/api/controller.py b/backend/microservice/api/controller.py index 4d9f8f2a..08f953a6 100644 --- a/backend/microservice/api/controller.py +++ b/backend/microservice/api/controller.py @@ -1,10 +1,11 @@ import flask from flask import request, jsonify -import ml_socket import newmlservice import tensorflow as tf import pandas as pd import json +import requests +import config app = flask.Flask(__name__) app.config["DEBUG"] = True @@ -17,16 +18,22 @@ class train_callback(tf.keras.callbacks.Callback): # def on_epoch_end(self, epoch, logs=None): print(epoch) - ml_socket.send(epoch) + #ml_socket.send(epoch) + #file = request.files.get("file") + url = config.api_url + "/Model/epoch" + requests.post(url, epoch).text #print('Evaluation: ', self.model.evaluate(self.x_test,self.y_test),"\n") #broj parametara zavisi od izabranih metrika loss je default @app.route('/train', methods = ['POST']) def train(): print("******************************TRAIN*************************************************") - f = request.json["dataset"] - dataset = pd.read_csv(f) - # - result = newmlservice.train(dataset, request.json["model"], train_callback) + f = request.files.get("file") + data = pd.read_csv(f) + paramsModel = json.loads(request.form["model"]) + paramsExperiment = json.loads(request.form["experiment"]) + paramsDataset = json.loads(request.form["dataset"]) + #dataset, paramsModel, paramsExperiment, callback) + result = newmlservice.train(data, paramsModel, paramsExperiment,paramsDataset, train_callback) print(result) return jsonify(result) diff --git a/backend/microservice/api/ml_service.py b/backend/microservice/api/ml_service.py index 4d2212f7..16ee7cc6 100644 --- a/backend/microservice/api/ml_service.py +++ b/backend/microservice/api/ml_service.py @@ -101,7 +101,7 @@ class TrainingResultRegression: class TrainingResult: metrics: dict ''' -def train(dataset, paramsModel, paramsExperiment, callback): +def train(dataset, paramsModel, paramsExperiment, paramsDataset, callback): problem_type = paramsModel["type"] dataModel = pd.DataFrame() dataExperiment = pd.DataFrame() diff --git a/backend/microservice/api/ml_socket.py b/backend/microservice/api/ml_socket.py deleted file mode 100644 index cab157eb..00000000 --- a/backend/microservice/api/ml_socket.py +++ /dev/null @@ -1,30 +0,0 @@ -import asyncio -import websockets -import json - -def get_or_create_eventloop(): - try: - return asyncio.get_event_loop() - except RuntimeError as ex: - if "There is no current event loop in thread" in str(ex): - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - return asyncio.get_event_loop() - -# create handler for each connection -async def handler(websocket, path): - #data = json.loads(await websocket.recv()) - #print(data['test']) - msg = await websocket.recv() - print(msg) - -async def start(): - start_server = websockets.serve(handler, "localhost", 5027) - print('Websocket starting...') - get_or_create_eventloop().run_until_complete(start_server) - get_or_create_eventloop().run_forever() - -async def send(msg): - print("WS sending message:") - print(msg) - await websocket.send(msg)
\ No newline at end of file diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index 02ce2250..d19a4e44 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -225,28 +225,28 @@ def train(dataset, params, callback): # ###OPTIMIZATORI - if(params['optimizer']=='Adam'): + if(params['optimizer']=='adam'): opt=tf.keras.optimizers.Adam(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adadelta'): + elif(params['optimizer']=='adadelta'): opt=tf.keras.optimizers.Adadelta(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adagrad'): + elif(params['optimizer']=='adagrad'): opt=tf.keras.optimizers.Adagrad(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adamax'): + elif(params['optimizer']=='adamax'): opt=tf.keras.optimizers.Adamax(learning_rate=params['learningRate']) - elif(params['optimizer']=='Nadam'): + elif(params['optimizer']=='nadam'): opt=tf.keras.optimizers.Nadam(learning_rate=params['learningRate']) - elif(params['optimizer']=='SGD'): + elif(params['optimizer']=='sgd'): opt=tf.keras.optimizers.SGD(learning_rate=params['learningRate']) - elif(params['optimizer']=='Ftrl'): + elif(params['optimizer']=='ftrl'): opt=tf.keras.optimizers.Ftrl(learning_rate=params['learningRate']) - elif(params['optimizer']=='RMSprop'): + elif(params['optimizer']=='rmsprop'): opt=tf.keras.optimizers.RMSprop(learning_rate=params['learningRate']) ###REGULARIZACIJA @@ -282,10 +282,10 @@ def train(dataset, params, callback): #print('multi') classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]),kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg)#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=5, activation=params['outputLayerActivationFunction']))#izlazni sloj @@ -309,10 +309,10 @@ def train(dataset, params, callback): #print('*************************************************************************binarni') classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=1, activation=params['outputLayerActivationFunction']))#izlazni sloj classifier.compile(loss =params["lossFunction"] , optimizer = params['optimizer'] , metrics =params['metrics']) @@ -334,10 +334,10 @@ def train(dataset, params, callback): elif(problem_type=='regresioni'): classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=1)) classifier.compile(loss =params["lossFunction"] , optimizer = params['optimizer'] , metrics =params['metrics']) |