From e92f7d301c971b4d27aa3dcd1e4047e94f04b3fc Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Wed, 25 Mar 2015 01:27:12 +0000
Subject: [PATCH] smaller gridsize in bias

---
 src/activation_kernels.cu |   24 +++++++++++++++++-------
 1 files changed, 17 insertions(+), 7 deletions(-)

diff --git a/src/activation_kernels.cu b/src/activation_kernels.cu
index 7703d43..32c032c 100644
--- a/src/activation_kernels.cu
+++ b/src/activation_kernels.cu
@@ -4,31 +4,39 @@
 }
 
 __device__ float linear_activate_kernel(float x){return x;}
-__device__ float sigmoid_activate_kernel(float x){return 1./(1. + exp(-x));}
+__device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
 __device__ float relu_activate_kernel(float x){return x*(x>0);}
 __device__ float ramp_activate_kernel(float x){return x*(x>0)+.1*x;}
-//__device__ float ramp_activate_kernel(float x){return 0;}
 __device__ float tanh_activate_kernel(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+__device__ float plse_activate_kernel(float x)
+{
+    if(x < -4) return .01 * (x + 4);
+    if(x > 4)  return .01 * (x - 4) + 1;
+    return .125*x + .5;
+}
  
 __device__ float linear_gradient_kernel(float x){return 1;}
-__device__ float sigmoid_gradient_kernel(float x){return (1-x)*x;}
+__device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
 __device__ float relu_gradient_kernel(float x){return (x>0);}
 __device__ float ramp_gradient_kernel(float x){return (x>0)+.1;}
 __device__ float tanh_gradient_kernel(float x){return 1-x*x;}
+__device__ float plse_gradient_kernel(float x){return (x < 0 || x > 1) ? .01 : .125;}
 
 __device__ float activate_kernel(float x, ACTIVATION a)
 {
     switch(a){
         case LINEAR:
             return linear_activate_kernel(x);
-        case SIGMOID:
-            return sigmoid_activate_kernel(x);
+        case LOGISTIC:
+            return logistic_activate_kernel(x);
         case RELU:
             return relu_activate_kernel(x);
         case RAMP:
             return ramp_activate_kernel(x);
         case TANH:
             return tanh_activate_kernel(x);
+        case PLSE:
+            return plse_activate_kernel(x);
     }
     return 0;
 }
@@ -38,14 +46,16 @@
     switch(a){
         case LINEAR:
             return linear_gradient_kernel(x);
-        case SIGMOID:
-            return sigmoid_gradient_kernel(x);
+        case LOGISTIC:
+            return logistic_gradient_kernel(x);
         case RELU:
             return relu_gradient_kernel(x);
         case RAMP:
             return ramp_gradient_kernel(x);
         case TANH:
             return tanh_gradient_kernel(x);
+        case PLSE:
+            return plse_gradient_kernel(x);
     }
     return 0;
 }

--
Gitblit v1.10.0