| | |
| | | #include "cuda.h" |
| | | #ifndef ACTIVATIONS_H |
| | | #define ACTIVATIONS_H |
| | | #include "cuda.h" |
| | | #include "math.h" |
| | | |
| | | typedef enum{ |
| | | LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE |
| | | LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY |
| | | }ACTIVATION; |
| | | |
| | | ACTIVATION get_activation(char *s); |
| | |
| | | |
| | | static inline float linear_activate(float x){return x;} |
| | | static inline float logistic_activate(float x){return 1./(1. + exp(-x));} |
| | | static inline float loggy_activate(float x){return 2./(1. + exp(-x)) - 1;} |
| | | static inline float relu_activate(float x){return x*(x>0);} |
| | | static inline float elu_activate(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);} |
| | | static inline float relie_activate(float x){return x*(x>0);} |
| | | static inline float ramp_activate(float x){return x*(x>0)+.1*x;} |
| | | static inline float leaky_activate(float x){return (x>0) ? x : .1*x;} |
| | | static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);} |
| | | static inline float plse_activate(float x) |
| | | { |
| | |
| | | |
| | | static inline float linear_gradient(float x){return 1;} |
| | | static inline float logistic_gradient(float x){return (1-x)*x;} |
| | | static inline float loggy_gradient(float x) |
| | | { |
| | | float y = (x+1.)/2.; |
| | | return 2*(1-y)*y; |
| | | } |
| | | static inline float relu_gradient(float x){return (x>0);} |
| | | static inline float elu_gradient(float x){return (x >= 0) + (x < 0)*(x + 1);} |
| | | static inline float relie_gradient(float x){return (x>0) ? 1 : .01;} |
| | | static inline float ramp_gradient(float x){return (x>0)+.1;} |
| | | static inline float leaky_gradient(float x){return (x>0) ? 1 : .1;} |
| | | static inline float tanh_gradient(float x){return 1-x*x;} |
| | | static inline float plse_gradient(float x){return (x < 0 || x > 1) ? .01 : .125;} |
| | | |