aboutsummaryrefslogtreecommitdiff
path: root/backend/microservice/api/controller.py
diff options
context:
space:
mode:
Diffstat (limited to 'backend/microservice/api/controller.py')
-rw-r--r--backend/microservice/api/controller.py94
1 files changed, 76 insertions, 18 deletions
diff --git a/backend/microservice/api/controller.py b/backend/microservice/api/controller.py
index 8e12c41d..f870b2b1 100644
--- a/backend/microservice/api/controller.py
+++ b/backend/microservice/api/controller.py
@@ -1,3 +1,7 @@
+from cmath import log
+from dataclasses import dataclass
+from distutils.command.upload import upload
+from gc import callbacks
import flask
from flask import request, jsonify
import newmlservice
@@ -9,47 +13,101 @@ import config
app = flask.Flask(__name__)
app.config["DEBUG"] = True
-app.config["SERVER_NAME"] = "127.0.0.1:5543"
-
+app.config["SERVER_NAME"] = config.hostIP
+
+#@dataclass
+#class Predictor:
+# _id : str
+ # username: str
+ # inputs : list
+ # output : str
+ # isPublic: bool
+ # accessibleByLink: bool
+ # dateCreated: DateTime
+ # experimentId: str
+ # modelId: str
+ # h5FileId: str
+ # metrics: list
+
+
class train_callback(tf.keras.callbacks.Callback):
- def __init__(self, x_test, y_test):
+ def __init__(self, x_test, y_test,modelId):
self.x_test = x_test
self.y_test = y_test
+ self.modelId=modelId
#
def on_epoch_end(self, epoch, logs=None):
- print(epoch)
+ #print('Evaluation: ', self.model.evaluate(self.x_test,self.y_test),"\n")
+
+ #print(epoch)
+
+ #print(logs)
+
#ml_socket.send(epoch)
#file = request.files.get("file")
url = config.api_url + "/Model/epoch"
- requests.post(url, epoch).text
+ r=requests.post(url, json={"Stat":str(logs),"ModelId":str(self.modelId),"EpochNum":epoch}).text
+
+ #print(r)
#print('Evaluation: ', self.model.evaluate(self.x_test,self.y_test),"\n") #broj parametara zavisi od izabranih metrika loss je default
@app.route('/train', methods = ['POST'])
def train():
- print("******************************TRAIN*************************************************")
+ #print("******************************TRAIN*************************************************")
+
f = request.files.get("file")
data = pd.read_csv(f)
paramsModel = json.loads(request.form["model"])
paramsExperiment = json.loads(request.form["experiment"])
paramsDataset = json.loads(request.form["dataset"])
#dataset, paramsModel, paramsExperiment, callback)
- result = newmlservice.train(data, paramsModel, paramsExperiment,paramsDataset, train_callback)
+ filepath,result = newmlservice.train(data, paramsModel, paramsExperiment,paramsDataset, train_callback)
+ """
+ f = request.json['filepath']
+ dataset = pd.read_csv(f)
+ filepath,result=newmlservice.train(dataset,request.json['model'],train_callback)
print(result)
- return jsonify(result)
+ """
+
+ url = config.api_url + "/file/h5"
+ files = {'file': open(filepath, 'rb')}
+ r=requests.post(url, files=files,data={"uploaderId":paramsExperiment['uploaderId']})
+ fileId=r.text
+ m = []
+ for attribute, value in result.items():
+ m.append({"Name" : attribute, "JsonValue" : value})
+ predictor = {
+ "_id" : "",
+ "uploaderId" : paramsModel["uploaderId"],
+ "inputs" : paramsExperiment["inputColumns"],
+ "output" : paramsExperiment["outputColumn"],
+ "isPublic" : False,
+ "accessibleByLink" : False,
+ "experimentId" : paramsExperiment["_id"],
+ "modelId" : paramsModel["_id"],
+ "h5FileId" : fileId,
+ "metrics" : m
+ }
+ #print(predictor)
+ #print('\n')
+ url = config.api_url + "/Predictor/add"
+ r = requests.post(url, json=predictor).text
+ #print(r)
+ return r
@app.route('/predict', methods = ['POST'])
def predict():
- f = request.json['filepath']
- dataset = pd.read_csv(f)
- m = request.json['modelpath']
- model = tf.keras.models.load_model(m)
- print("********************************model loaded*******************************")
- newmlservice.manageH5(dataset,request.json['model'],model)
- return "done"
+ h5 = request.files.get("h5file")
+ model = tf.keras.models.load_model(h5)
+ paramsExperiment = json.loads(request.form["experiment"])
+ paramsPredictor = json.loads(request.form["predictor"])
+ #print("********************************model loaded*******************************")
+ result = newmlservice.predict(paramsExperiment, paramsPredictor, model)
+ return result
@app.route('/preprocess',methods=['POST'])
def returnColumnsInfo():
- print("********************************PREPROCESS*******************************")
+ #print("********************************PREPROCESS*******************************")
dataset = json.loads(request.form["dataset"])
file = request.files.get("file")
data=pd.read_csv(file)
@@ -69,8 +127,8 @@ def returnColumnsInfo():
dataset["colCount"] = preprocess["colCount"]
dataset["rowCount"] = preprocess["rowCount"]
dataset["isPreProcess"] = True
- print(dataset)
+ #print(dataset)
return jsonify(dataset)
-print("App loaded.")
+#print("App loaded.")
app.run() \ No newline at end of file