diff options
author | Sonja Galovic <galovicsonja@gmail.com> | 2022-03-30 23:55:26 +0200 |
---|---|---|
committer | Sonja Galovic <galovicsonja@gmail.com> | 2022-03-30 23:55:26 +0200 |
commit | 5a223516a7481098fadab81ad062e5ec5b38144c (patch) | |
tree | 24dc2fe7cde46194b6addfd35e1e1a0ea44242dc /frontend/src/app/_data | |
parent | 0467667df8e5beaa08f6546cb6ef93ebd3c8db8d (diff) | |
parent | 39fc1f0cc9871b4436b839acb6ce4260e6c33931 (diff) |
Merge branch 'dev' of http://gitlab.pmf.kg.ac.rs/igrannonica/neuronstellar into dev
Diffstat (limited to 'frontend/src/app/_data')
-rw-r--r-- | frontend/src/app/_data/Model.ts | 60 |
1 files changed, 53 insertions, 7 deletions
diff --git a/frontend/src/app/_data/Model.ts b/frontend/src/app/_data/Model.ts index dd3cb760..ff9f8329 100644 --- a/frontend/src/app/_data/Model.ts +++ b/frontend/src/app/_data/Model.ts @@ -1,3 +1,5 @@ +import { NgIf } from "@angular/common"; + export default class Model { _id: string = ''; constructor( @@ -45,6 +47,7 @@ export enum ProblemType { export enum Encoding { Label = 'label', OneHot = 'one hot', + /* BackwardDifference = 'backward difference', BaseN = 'baseN', Binary = 'binary', @@ -62,34 +65,77 @@ export enum Encoding { Target = 'target', WOE = 'woe', Quantile = 'quantile' + */ } export enum ActivationFunction { // linear Binary_Step = 'binaryStep', - Linear = 'linear', // non-linear - Relu = 'relu', Leaky_Relu = 'leakyRelu', Parameterised_Relu = 'parameterisedRelu', Exponential_Linear_Unit = 'exponentialLinearUnit', Swish = 'swish', - Sigmoid = 'sigmoid', - Tanh = 'tanh', - Softmax = 'softmax' -} + //hiddenLayers + Relu='relu', + Sigmoid='sigmoid', + Tanh='tanh', + + //outputLayer + Linear = 'linear', + //Sigmoid='sigmoid', + Softmax='softmax', +} +/* +export enum ActivationFunctionHiddenLayer +{ + Relu='relu', + Sigmoid='sigmoid', + Tanh='tanh' +} +export enum ActivationFunctionOutputLayer +{ + Linear = 'linear', + Sigmoid='sigmoid', + Softmax='softmax' +} +*/ export enum LossFunction { // binary classification loss functions BinaryCrossEntropy = 'binary_crossentropy', + SquaredHingeLoss='squared_hinge_loss', HingeLoss = 'hinge_loss', // multi-class classiication loss functions CategoricalCrossEntropy = 'categorical_crossentropy', + SparseCategoricalCrossEntropy='sparse_categorical_crosentropy', KLDivergence = 'kullback_leibler_divergence', + // regression loss functions + + MeanAbsoluteError = 'mean_absolute_error', MeanSquaredError = 'mean_squared_error', + MeanSquaredLogarithmicError='mean_squared_logarithmic_error', + HuberLoss = 'Huber' + +} +export enum LossFunctionRegression +{ MeanAbsoluteError = 'mean_absolute_error', - HuberLoss = 'Huber', + MeanSquaredError = 'mean_squared_error', + MeanSquaredLogarithmicError='mean_squared_logarithmic_error', +} +export enum LossFunctionBinaryClassification +{ + BinaryCrossEntropy = 'binary_crossentropy', + SquaredHingeLoss='squared_hinge_loss', + HingeLoss = 'hinge_loss', +} +export enum LossFunctionMultiClassification +{ + CategoricalCrossEntropy = 'categorical_crossentropy', + SparseCategoricalCrossEntropy='sparse_categorical_crosentropy', + KLDivergence = 'kullback_leibler_divergence', } export enum Optimizer { |