mirror of
https://github.com/pjreddie/darknet.git
synced 2023-08-10 21:13:14 +03:00
not much changed...
This commit is contained in:
@ -3,7 +3,7 @@
|
||||
#define ACTIVATIONS_H
|
||||
|
||||
typedef enum{
|
||||
LOGISTIC, RELU, LINEAR, RAMP, TANH, PLSE
|
||||
LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE
|
||||
}ACTIVATION;
|
||||
|
||||
ACTIVATION get_activation(char *s);
|
||||
@ -21,6 +21,7 @@ void gradient_array_ongpu(float *x, int n, ACTIVATION a, float *delta);
|
||||
static inline float linear_activate(float x){return x;}
|
||||
static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
|
||||
static inline float relu_activate(float x){return x*(x>0);}
|
||||
static inline float relie_activate(float x){return x*(x>0);}
|
||||
static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
|
||||
static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
|
||||
static inline float plse_activate(float x)
|
||||
@ -33,6 +34,7 @@ static inline float plse_activate(float x)
|
||||
static inline float linear_gradient(float x){return 1;}
|
||||
static inline float logistic_gradient(float x){return (1-x)*x;}
|
||||
static inline float relu_gradient(float x){return (x>0);}
|
||||
static inline float relie_gradient(float x){return (x>0) ? 1 : .01;}
|
||||
static inline float ramp_gradient(float x){return (x>0)+.1;}
|
||||
static inline float tanh_gradient(float x){return 1-x*x;}
|
||||
static inline float plse_gradient(float x){return (x < 0 || x > 1) ? .01 : .125;}
|
||||
|
Reference in New Issue
Block a user