| | |
| | | #include "activations.h" |
| | | |
| | | #include <math.h> |
| | | #include <stdio.h> |
| | | #include <stdlib.h> |
| | | #include <string.h> |
| | | |
| | | double identity_activation(double x) |
| | | char *get_activation_string(ACTIVATION a) |
| | | { |
| | | return x; |
| | | } |
| | | double identity_gradient(double x) |
| | | { |
| | | return 1; |
| | | switch(a){ |
| | | case LOGISTIC: |
| | | return "logistic"; |
| | | case RELU: |
| | | return "relu"; |
| | | case RELIE: |
| | | return "relie"; |
| | | case RAMP: |
| | | return "ramp"; |
| | | case LINEAR: |
| | | return "linear"; |
| | | case TANH: |
| | | return "tanh"; |
| | | case PLSE: |
| | | return "plse"; |
| | | default: |
| | | break; |
| | | } |
| | | return "relu"; |
| | | } |
| | | |
| | | double relu_activation(double x) |
| | | ACTIVATION get_activation(char *s) |
| | | { |
| | | return x*(x>0); |
| | | } |
| | | double relu_gradient(double x) |
| | | { |
| | | return (x>=0); |
| | | if (strcmp(s, "logistic")==0) return LOGISTIC; |
| | | if (strcmp(s, "relu")==0) return RELU; |
| | | if (strcmp(s, "relie")==0) return RELIE; |
| | | if (strcmp(s, "plse")==0) return PLSE; |
| | | if (strcmp(s, "linear")==0) return LINEAR; |
| | | if (strcmp(s, "ramp")==0) return RAMP; |
| | | if (strcmp(s, "tanh")==0) return TANH; |
| | | fprintf(stderr, "Couldn't find activation function %s, going with ReLU\n", s); |
| | | return RELU; |
| | | } |
| | | |
| | | double sigmoid_activation(double x) |
| | | float activate(float x, ACTIVATION a) |
| | | { |
| | | return 1./(1.+exp(-x)); |
| | | switch(a){ |
| | | case LINEAR: |
| | | return linear_activate(x); |
| | | case LOGISTIC: |
| | | return logistic_activate(x); |
| | | case RELU: |
| | | return relu_activate(x); |
| | | case RELIE: |
| | | return relie_activate(x); |
| | | case RAMP: |
| | | return ramp_activate(x); |
| | | case TANH: |
| | | return tanh_activate(x); |
| | | case PLSE: |
| | | return plse_activate(x); |
| | | } |
| | | return 0; |
| | | } |
| | | |
| | | double sigmoid_gradient(double x) |
| | | void activate_array(float *x, const int n, const ACTIVATION a) |
| | | { |
| | | return x*(1.-x); |
| | | int i; |
| | | for(i = 0; i < n; ++i){ |
| | | x[i] = activate(x[i], a); |
| | | } |
| | | } |
| | | |
| | | float gradient(float x, ACTIVATION a) |
| | | { |
| | | switch(a){ |
| | | case LINEAR: |
| | | return linear_gradient(x); |
| | | case LOGISTIC: |
| | | return logistic_gradient(x); |
| | | case RELU: |
| | | return relu_gradient(x); |
| | | case RELIE: |
| | | return relie_gradient(x); |
| | | case RAMP: |
| | | return ramp_gradient(x); |
| | | case TANH: |
| | | return tanh_gradient(x); |
| | | case PLSE: |
| | | return plse_gradient(x); |
| | | } |
| | | return 0; |
| | | } |
| | | |
| | | void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta) |
| | | { |
| | | int i; |
| | | for(i = 0; i < n; ++i){ |
| | | delta[i] *= gradient(x[i], a); |
| | | } |
| | | } |
| | | |