diff options
author | TAMARA JERINIC <tamara.jerinic@gmail.com> | 2022-04-08 20:03:01 +0200 |
---|---|---|
committer | TAMARA JERINIC <tamara.jerinic@gmail.com> | 2022-04-08 20:03:42 +0200 |
commit | 046f40af17e6144f98c9ca7fdaf069f270895136 (patch) | |
tree | 65c3492a016934acad0b5dc9e8a7621e57d84fa2 /backend | |
parent | 512390ec434fdea464807add0c40687ade73dfa5 (diff) |
Omogućen izbor learning rate-a.
Diffstat (limited to 'backend')
-rw-r--r-- | backend/microservice/api/newmlservice.py | 25 |
1 files changed, 25 insertions, 0 deletions
diff --git a/backend/microservice/api/newmlservice.py b/backend/microservice/api/newmlservice.py index 02f2ad6d..a92307c5 100644 --- a/backend/microservice/api/newmlservice.py +++ b/backend/microservice/api/newmlservice.py @@ -222,6 +222,31 @@ def train(dataset, params, callback): classifier.add(tf.keras.layers.Dense(units=params['hiddenLayerNeurons'], activation=params['hiddenLayerActivationFunctions'][i+1]))#i-ti skriveni sloj classifier.add(tf.keras.layers.Dense(units=5, activation=params['outputLayerActivationFunction']))#izlazni sloj + if(params['optimizer']=='Adam'): + opt=tf.keras.optimizers.Adam(learning_rate=params['learningRate']) + + elif(params['optimizer']=='Adadelta'): + opt=tf.keras.optimizers.Adadelta(learning_rate=params['learningRate']) + + elif(params['optimizer']=='Adagrad'): + opt=tf.keras.optimizers.Adagrad(learning_rate=params['learningRate']) + + elif(params['optimizer']=='Adamax'): + opt=tf.keras.optimizers.Adamax(learning_rate=params['learningRate']) + + elif(params['optimizer']=='Nadam'): + opt=tf.keras.optimizers.Nadam(learning_rate=params['learningRate']) + + elif(params['optimizer']=='SGD'): + opt=tf.keras.optimizers.SGD(learning_rate=params['learningRate']) + + elif(params['optimizer']=='Ftrl'): + opt=tf.keras.optimizers.Ftrl(learning_rate=params['learningRate']) + + elif(params['optimizer']=='RMSprop'): + opt=tf.keras.optimizers.RMSprop(learning_rate=params['learningRate']) + + classifier.compile(loss =params["lossFunction"] , optimizer = params['optimizer'] , metrics =params['metrics']) history=classifier.fit(x_train, y_train, epochs = params['epochs'],batch_size=params['batchSize']) |