diff options
author | Sonja Galovic <galovicsonja@gmail.com> | 2022-04-13 22:44:06 +0200 |
---|---|---|
committer | Sonja Galovic <galovicsonja@gmail.com> | 2022-04-13 22:44:06 +0200 |
commit | 5d856cf20177711074b473cc2d2133b85826fe63 (patch) | |
tree | c7029d75af6d53e40bf806c51f1f4fcfc2136104 /backend/microservice/api/newmlservice.py | |
parent | 3c4bfb16f55aa51a50ee99327caca639e072cdab (diff) | |
parent | 3cb8b60ed4e36f39187e64270479f472e08fb615 (diff) |
Merge branch 'dev' of http://gitlab.pmf.kg.ac.rs/igrannonica/neuronstellar into dev
Diffstat (limited to 'backend/microservice/api/newmlservice.py')
-rw-r--r-- | backend/microservice/api/newmlservice.py | 28 |
1 files changed, 14 insertions, 14 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index 02ce2250..d19a4e44 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -225,28 +225,28 @@ def train(dataset, params, callback): # ###OPTIMIZATORI - if(params['optimizer']=='Adam'): + if(params['optimizer']=='adam'): opt=tf.keras.optimizers.Adam(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adadelta'): + elif(params['optimizer']=='adadelta'): opt=tf.keras.optimizers.Adadelta(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adagrad'): + elif(params['optimizer']=='adagrad'): opt=tf.keras.optimizers.Adagrad(learning_rate=params['learningRate']) - elif(params['optimizer']=='Adamax'): + elif(params['optimizer']=='adamax'): opt=tf.keras.optimizers.Adamax(learning_rate=params['learningRate']) - elif(params['optimizer']=='Nadam'): + elif(params['optimizer']=='nadam'): opt=tf.keras.optimizers.Nadam(learning_rate=params['learningRate']) - elif(params['optimizer']=='SGD'): + elif(params['optimizer']=='sgd'): opt=tf.keras.optimizers.SGD(learning_rate=params['learningRate']) - elif(params['optimizer']=='Ftrl'): + elif(params['optimizer']=='ftrl'): opt=tf.keras.optimizers.Ftrl(learning_rate=params['learningRate']) - elif(params['optimizer']=='RMSprop'): + elif(params['optimizer']=='rmsprop'): opt=tf.keras.optimizers.RMSprop(learning_rate=params['learningRate']) ###REGULARIZACIJA @@ -282,10 +282,10 @@ def train(dataset, params, callback): #print('multi') classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]),kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg)#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=5, activation=params['outputLayerActivationFunction']))#izlazni sloj @@ -309,10 +309,10 @@ def train(dataset, params, callback): #print('*************************************************************************binarni') classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=1, activation=params['outputLayerActivationFunction']))#izlazni sloj classifier.compile(loss =params["lossFunction"] , optimizer = params['optimizer'] , metrics =params['metrics']) @@ -334,10 +334,10 @@ def train(dataset, params, callback): elif(problem_type=='regresioni'): classifier=tf.keras.Sequential() - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#prvi skriveni + definisanje prethodnog-ulaznog + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][0],input_dim=x_train.shape[1]))#prvi skriveni + definisanje prethodnog-ulaznog for i in range(params['hiddenLayers']-1):#ako postoji vise od jednog skrivenog sloja #print(i) - classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1],kernel_regularizer=kernelreg,bias_regularizer=biasreg,activity_regularizer=activityreg))#i-ti skriveni sloj + classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=1)) classifier.compile(loss =params["lossFunction"] , optimizer = params['optimizer'] , metrics =params['metrics']) |