diff options
Diffstat (limited to 'backend')
-rw-r--r-- | backend/api/api/Controllers/DatasetController.cs | 20 | ||||
-rw-r--r-- | backend/api/api/Controllers/ModelController.cs | 40 | ||||
-rw-r--r-- | backend/api/api/Models/Model.cs | 6 | ||||
-rw-r--r-- | backend/api/api/Models/Predictor.cs | 25 | ||||
-rw-r--r-- | backend/api/api/Services/DatasetService.cs | 12 | ||||
-rw-r--r-- | backend/api/api/Services/FillAnEmptyDb.cs | 13 | ||||
-rw-r--r-- | backend/api/api/Services/MlConnectionService.cs | 13 | ||||
-rw-r--r-- | backend/microservice/api/controller.py | 8 | ||||
-rw-r--r-- | backend/microservice/api/newmlservice.py | 66 |
9 files changed, 88 insertions, 115 deletions
diff --git a/backend/api/api/Controllers/DatasetController.cs b/backend/api/api/Controllers/DatasetController.cs index e4741412..849d9884 100644 --- a/backend/api/api/Controllers/DatasetController.cs +++ b/backend/api/api/Controllers/DatasetController.cs @@ -144,7 +144,6 @@ namespace api.Controllers [Authorize(Roles = "User,Guest")] public async Task<ActionResult<Dataset>> Post([FromBody] Dataset dataset) { - Console.WriteLine("PROBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"); string uploaderId = getUserId(); dataset.uploaderId = uploaderId; @@ -196,7 +195,7 @@ namespace api.Controllers string ext = ".csv"; - //nesto + //Check Directory if (!Directory.Exists(folderPath)) @@ -279,19 +278,4 @@ namespace api.Controllers } } -} - -/* -{ - "_id": "", - "name": "name", - "description": "description", - "header" : ["ag","rt"], - "fileId" : "652", - "extension": "csb", - "isPublic" : true, - "accessibleByLink": true, - "dateCreated": "dateCreated", - "lastUpdated" : "proba12" -} -*/
\ No newline at end of file +}
\ No newline at end of file diff --git a/backend/api/api/Controllers/ModelController.cs b/backend/api/api/Controllers/ModelController.cs index 39eb7830..be30ae6f 100644 --- a/backend/api/api/Controllers/ModelController.cs +++ b/backend/api/api/Controllers/ModelController.cs @@ -211,6 +211,8 @@ namespace api.Controllers return BadRequest("Bad parameters!");*/ model.uploaderId = getUserId(); + model.dateCreated = DateTime.Now; + model.lastUpdated = DateTime.Now; var existingModel = _modelService.GetOneModel(model.uploaderId, model.name); @@ -232,6 +234,44 @@ namespace api.Controllers } } + // POST api/<ModelController>/stealModel + [HttpPost("stealModel")] + [Authorize(Roles = "User,Guest")] + public ActionResult<Model> StealModel([FromBody] Model model)//, bool overwrite) + { + bool overwrite = false; + //username="" ako je GUEST + //Experiment e = _experimentService.Get(model.experimentId); umesto 1 ide e.inputColumns.Length TODO!!!!!!!!!!!!!!!!! + //model.inputNeurons = e.inputColumns.Length; + /*if (_modelService.CheckHyperparameters(1, model.hiddenLayerNeurons, model.hiddenLayers, model.outputNeurons) == false) + return BadRequest("Bad parameters!");*/ + + model.uploaderId = getUserId(); + model._id = ""; + model.dateCreated = DateTime.Now; + model.lastUpdated = DateTime.Now; + model.isPublic = false; + + var existingModel = _modelService.GetOneModel(model.uploaderId, model.name); + + + if (existingModel != null && !overwrite && model.validationSize < 1 && model.validationSize > 0) + return NotFound($"Model already exisits or validation size is not between 0-1"); + else + { + //_modelService.Create(model); + //return Ok(); + if (existingModel == null) + _modelService.Create(model); + else + { + _modelService.Replace(model); + } + + return CreatedAtAction(nameof(Get), new { id = model._id }, model); + } + } + // PUT api/<ModelController>/{name} [HttpPut("{name}")] [Authorize(Roles = "User,Guest")] diff --git a/backend/api/api/Models/Model.cs b/backend/api/api/Models/Model.cs index a807316f..bbbf201e 100644 --- a/backend/api/api/Models/Model.cs +++ b/backend/api/api/Models/Model.cs @@ -14,18 +14,14 @@ namespace api.Models public string name { get; set; } public string description { get; set; } - //datetime public DateTime dateCreated { get; set; } public DateTime lastUpdated { get; set; } - //proveriti id - //public string experimentId { get; set; } //Neural net training public string type { get; set; } public string optimizer { get; set; } public string lossFunction { get; set; } - //public int inputNeurons { get; set; } public int hiddenLayers { get; set; } public string batchSize { get; set; } public string learningRate { get; set; } @@ -36,8 +32,6 @@ namespace api.Models public string[] metrics { get; set; } public int epochs { get; set; } - //public bool isTrained { get; set; } - //public NullValues[] nullValues { get; set; } public bool randomOrder { get; set; } public bool randomTestSet { get; set; } public float randomTestSetDistribution { get; set; } diff --git a/backend/api/api/Models/Predictor.cs b/backend/api/api/Models/Predictor.cs index 530257b2..bfe95a0f 100644 --- a/backend/api/api/Models/Predictor.cs +++ b/backend/api/api/Models/Predictor.cs @@ -29,27 +29,4 @@ namespace api.Models } -} - -/** -* Paste one or more documents here - -{ - "_id": { - "$oid": "625dc348b7856ace8a6f8702" - - }, - "uploaderId" : "6242ea59486c664208d4255c", - "inputs": ["proba", - "proba2", - "proba3" - ], - "output" : "izlaz", - "isPublic" : true, - "accessibleByLink" : true, - "dateCreated" : "2022-04-11T20:33:26.937+00:00", - "experimentId" : "Neki id eksperimenta", - "modelId" : "Neki id eksperimenta", - "h5FileId" : "Neki id eksperimenta", - "metrics" : [{ }] -}*/
\ No newline at end of file +}
\ No newline at end of file diff --git a/backend/api/api/Services/DatasetService.cs b/backend/api/api/Services/DatasetService.cs index f38a363b..0b84721e 100644 --- a/backend/api/api/Services/DatasetService.cs +++ b/backend/api/api/Services/DatasetService.cs @@ -104,16 +104,6 @@ namespace api.Services return dataset._id; } - /* -public bool CheckDb() -{ - Dataset? dataset = null; - dataset = _dataset.Find(dataset => dataset.username == "igrannonica").FirstOrDefault(); - - if (dataset != null) - return false; - else - return true; -}*/ + } } diff --git a/backend/api/api/Services/FillAnEmptyDb.cs b/backend/api/api/Services/FillAnEmptyDb.cs index 811e723a..cd35dc78 100644 --- a/backend/api/api/Services/FillAnEmptyDb.cs +++ b/backend/api/api/Services/FillAnEmptyDb.cs @@ -54,7 +54,7 @@ namespace api.Services dataset._id = ""; dataset.uploaderId = "000000000000000000000000"; - dataset.name = "Titanik dataset(public)"; + dataset.name = "Titanik dataset (public)"; dataset.description = "Titanik dataset"; dataset.fileId = _fileService.GetFileId(fullPath); dataset.extension = ".csv"; @@ -277,6 +277,7 @@ namespace api.Services model.lossFunction = "sparse_categorical_crossentropy"; model.hiddenLayers = 5; model.batchSize = "64"; + model.learningRate = "1"; model.outputNeurons = 0; model.layers = new[] { @@ -368,7 +369,7 @@ namespace api.Services dataset._id = ""; dataset.uploaderId = "000000000000000000000000"; dataset.name = "IMDB-Movie-Data Dataset (public)"; - dataset.description = "IMDB-Movie-Data Dataset(public)"; + dataset.description = "IMDB-Movie-Data Dataset (public)"; dataset.fileId = _fileService.GetFileId(fullPath); dataset.extension = ".csv"; dataset.isPublic = true; @@ -421,8 +422,8 @@ namespace api.Services model._id = ""; model.uploaderId = "000000000000000000000000"; - model.name = "IMDB model"; - model.description = "IMDB model"; + model.name = "IMDB model (public)"; + model.description = "IMDB model (public)"; model.dateCreated = DateTime.Now; model.lastUpdated = DateTime.Now; model.type = "regresioni"; @@ -579,8 +580,8 @@ namespace api.Services model._id = ""; model.uploaderId = "000000000000000000000000"; - model.name = "Churn model"; - model.description = "Churn model"; + model.name = "Churn model (public)"; + model.description = "Churn model (public)"; model.dateCreated = DateTime.Now; model.lastUpdated = DateTime.Now; model.type = "binarni-klasifikacioni"; diff --git a/backend/api/api/Services/MlConnectionService.cs b/backend/api/api/Services/MlConnectionService.cs index 0ecdb1af..6a307e0d 100644 --- a/backend/api/api/Services/MlConnectionService.cs +++ b/backend/api/api/Services/MlConnectionService.cs @@ -63,19 +63,6 @@ namespace api.Services foreach (var connection in ChatHub.getAllConnectionsOfUser(id)) await _ichat.Clients.Client(connection).SendAsync("NotifyDataset",newDataset.name,newDataset._id); - - string proba = ""; - - for (int i = 0; i < newDataset.cMatrix.Length; i++) - { - proba = i +" "; - for (int j = 0; j < newDataset.cMatrix[i].Length; j++) - proba += newDataset.cMatrix[i][j] + "f, "; - - Console.WriteLine(proba); - proba = ""; - } - return; } diff --git a/backend/microservice/api/controller.py b/backend/microservice/api/controller.py index 7852b63d..6f483008 100644 --- a/backend/microservice/api/controller.py +++ b/backend/microservice/api/controller.py @@ -82,9 +82,6 @@ def train(): files = {'file': open(filepath, 'rb')} r=requests.post(url, files=files,data={"uploaderId":paramsExperiment['uploaderId']}) fileId=r.text - m = [] - for attribute, value in result.items(): - m.append({"Name" : attribute, "JsonValue" : value}) predictor = { "_id" : "", "uploaderId" : paramsModel["uploaderId"], @@ -95,13 +92,14 @@ def train(): "experimentId" : paramsExperiment["_id"], "modelId" : paramsModel["_id"], "h5FileId" : fileId, - "metrics" : m, + "metrics" : result, "finalMetrics":finalMetrics - } #print(predictor) + url = config.api_url + "/Predictor/add" r = requests.post(url, json=predictor).text + print(r) return r diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index fd21f8ce..bcff5a33 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -393,14 +393,14 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): macro_averaged_f1=sm.f1_score(y_test, y_pred, average = 'macro') micro_averaged_f1=sm.f1_score(y_test, y_pred, average = 'micro') - metrics= { - "macro_averaged_precision" :float(macro_averaged_precision), - "micro_averaged_precision" : float(micro_averaged_precision), - "macro_averaged_recall" : float(macro_averaged_recall), - "micro_averaged_recall" : float(micro_averaged_recall), - "macro_averaged_f1" : float(macro_averaged_f1), - "micro_averaged_f1" : float(micro_averaged_f1) - } + metrics= [ + {"Name":"macro_averaged_precision", "JsonValue":str(macro_averaged_precision)}, + {"Name":"micro_averaged_precision" ,"JsonValue":str(micro_averaged_precision)}, + {"Name":"macro_averaged_recall", "JsonValue":str(macro_averaged_recall)}, + {"Name":"micro_averaged_recall" ,"JsonValue":str(micro_averaged_recall)}, + {"Name":"macro_averaged_f1","JsonValue": str(macro_averaged_f1)}, + {"Name":"micro_averaged_f1", "JsonValue": str(micro_averaged_f1)} + ] #vizuelizacija u python-u #from ann_visualizer.visualize import ann_viz; @@ -461,20 +461,20 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): f1 = float(sm.f1_score(y_test,y_pred)) fpr, tpr, _ = sm.roc_curve(y_test,y_pred) logloss = float(sm.log_loss(y_test, y_pred)) - metrics= { - "accuracy" : accuracy, - "precision" : precision, - "recall" : recall, - "specificity" : specificity, - "f1" : f1, - "tn" : float(tn), - "fp" : float(fp), - "fn" : float(fn), - "tp" : float(tp), - "fpr" : fpr.tolist(), - "tpr" : tpr.tolist(), - "logloss" : logloss - } + metrics= [ + {"Name":"accuracy" ,"JsonValue": str(accuracy)}, + {"Name":"precision","JsonValue": str(precision)}, + {"Name":"recall" , "JsonValue":str(recall)}, + {"Name":"specificity" ,"JsonValue":str(specificity)}, + {"Name":"f1" ,"JsonValue": str(f1)}, + {"Name":"tn" , "JsonValue":str(tn)}, + {"Name":"fp" , "JsonValue":str(fp)}, + {"Name":"fn" , "JsonValue":str(fn)}, + {"Name":"tp" , "JsonValue":str(tp)}, + {"Name":"fpr" ,"JsonValue": str(fpr.tolist())}, + {"Name":"tpr" , "JsonValue":str(tpr.tolist())}, + {"Name":"logloss" , "JsonValue":str(logloss)} + ] return filepath,hist,metrics @@ -514,12 +514,12 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): history=classifier.fit( x=x_train, y=y_train, epochs = paramsModel['epochs'],batch_size=int(paramsModel['batchSize']),callbacks=callback(x_test, y_test,paramsModel['_id']),validation_data=(x_val, y_val)) hist=history.history + y_pred=classifier.predict(x_test) #print(classifier.evaluate(x_test, y_test)) classifier.save(filepath, save_format='h5') - - + mse = float(sm.mean_squared_error(y_test,y_pred)) mae = float(sm.mean_absolute_error(y_test,y_pred)) mape = float(sm.mean_absolute_percentage_error(y_test,y_pred)) @@ -531,14 +531,16 @@ def train(dataset, paramsModel,paramsExperiment,paramsDataset,callback): n = 40 k = 2 adj_r2 = float(1 - ((1-r2)*(n-1)/(n-k-1))) - metrics= {"mse" : mse, - "mae" : mae, - "mape" : mape, - "rmse" : rmse, - "rmsle" : rmsle, - "r2" : r2, - "adj_r2" : adj_r2 - } + metrics= [ + {"Name":"mse","JsonValue":str(mse)}, + + {"Name":"mae","JsonValue":str(mae)}, + {"Name":"mape","JsonValue":str( mape)}, + {"Name":"rmse","JsonValue":str(rmse)}, + {"Name":"rmsle","JsonValue":str(rmsle)}, + {"Name":"r2","JsonValue":str( r2)}, + {"Name":"adj_r2","JsonValue":str(adj_r2)} + ] return filepath,hist,metrics |