| | |
| | | #define ACTIVATIONS_H |
| | | |
| | | typedef enum{ |
| | | SIGMOID, RELU, LINEAR, RAMP, TANH |
| | | LOGISTIC, RELU, LINEAR, RAMP, TANH |
| | | }ACTIVATION; |
| | | |
| | | ACTIVATION get_activation(char *s); |
| | |
| | | #endif |
| | | |
| | | static inline float linear_activate(float x){return x;} |
| | | static inline float sigmoid_activate(float x){return 1./(1. + exp(-x));} |
| | | static inline float logistic_activate(float x){return 1./(1. + exp(-x));} |
| | | static inline float relu_activate(float x){return x*(x>0);} |
| | | static inline float ramp_activate(float x){return x*(x>0)+.1*x;} |
| | | static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);} |
| | | |
| | | static inline float linear_gradient(float x){return 1;} |
| | | static inline float sigmoid_gradient(float x){return (1-x)*x;} |
| | | static inline float logistic_gradient(float x){return (1-x)*x;} |
| | | static inline float relu_gradient(float x){return (x>0);} |
| | | static inline float ramp_gradient(float x){return (x>0)+.1;} |
| | | static inline float tanh_gradient(float x){return 1-x*x;} |