| | |
| | | |
| | | #include <math.h> |
| | | #include <stdio.h> |
| | | #include <stdlib.h> |
| | | #include <string.h> |
| | | |
| | | char *get_activation_string(ACTIVATION a) |
| | |
| | | float relu_activate(float x){return x*(x>0);} |
| | | float ramp_activate(float x){return x*(x>0)+.1*x;} |
| | | float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);} |
| | | //float tanh_activate(float x){return x - (x*x*x)/3;} |
| | | |
| | | float activate(float x, ACTIVATION a){ |
| | | float linear_gradient(float x){return 1;} |
| | | float sigmoid_gradient(float x){return (1-x)*x;} |
| | | float relu_gradient(float x){return (x>0);} |
| | | float ramp_gradient(float x){return (x>0)+.1;} |
| | | float tanh_gradient(float x){return 1-x*x;} |
| | | |
| | | float activate(float x, ACTIVATION a) |
| | | { |
| | | switch(a){ |
| | | case LINEAR: |
| | | return linear_activate(x); |
| | |
| | | } |
| | | } |
| | | |
| | | |
| | | float gradient(float x, ACTIVATION a){ |
| | | float gradient(float x, ACTIVATION a) |
| | | { |
| | | switch(a){ |
| | | case LINEAR: |
| | | return 1; |
| | | return linear_gradient(x); |
| | | case SIGMOID: |
| | | return (1.-x)*x; |
| | | return sigmoid_gradient(x); |
| | | case RELU: |
| | | return (x>0); |
| | | return relu_gradient(x); |
| | | case RAMP: |
| | | return (x>0) + .1; |
| | | return ramp_gradient(x); |
| | | case TANH: |
| | | return 1-x*x; |
| | | return tanh_gradient(x); |
| | | } |
| | | return 0; |
| | | } |