Joseph Redmon
2015-03-08 f047cfff99e00e28c02eb59b6d32386c122f9af6
src/activation_kernels.cu
@@ -4,13 +4,13 @@
}
__device__ float linear_activate_kernel(float x){return x;}
__device__ float sigmoid_activate_kernel(float x){return 1./(1. + exp(-x));}
__device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
__device__ float relu_activate_kernel(float x){return x*(x>0);}
__device__ float ramp_activate_kernel(float x){return x*(x>0)+.1*x;}
__device__ float tanh_activate_kernel(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
 
__device__ float linear_gradient_kernel(float x){return 1;}
__device__ float sigmoid_gradient_kernel(float x){return (1-x)*x;}
__device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
__device__ float relu_gradient_kernel(float x){return (x>0);}
__device__ float ramp_gradient_kernel(float x){return (x>0)+.1;}
__device__ float tanh_gradient_kernel(float x){return 1-x*x;}
@@ -20,8 +20,8 @@
    switch(a){
        case LINEAR:
            return linear_activate_kernel(x);
        case SIGMOID:
            return sigmoid_activate_kernel(x);
        case LOGISTIC:
            return logistic_activate_kernel(x);
        case RELU:
            return relu_activate_kernel(x);
        case RAMP:
@@ -37,8 +37,8 @@
    switch(a){
        case LINEAR:
            return linear_gradient_kernel(x);
        case SIGMOID:
            return sigmoid_gradient_kernel(x);
        case LOGISTIC:
            return logistic_gradient_kernel(x);
        case RELU:
            return relu_gradient_kernel(x);
        case RAMP: