From 68213b835b9f15cb449ad2037a8b51c17a3de07b Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Mon, 14 Mar 2016 22:10:14 +0000
Subject: [PATCH] Makefile

---
 src/activation_kernels.cu |   56 +++++++++++++++++++++++++++++++++++++++++++++++++-------
 1 files changed, 49 insertions(+), 7 deletions(-)

diff --git a/src/activation_kernels.cu b/src/activation_kernels.cu
index 7703d43..99933c8 100644
--- a/src/activation_kernels.cu
+++ b/src/activation_kernels.cu
@@ -1,34 +1,66 @@
+#include "cuda_runtime.h"
+#include "curand.h"
+#include "cublas_v2.h"
+
 extern "C" {
 #include "activations.h"
 #include "cuda.h"
 }
 
 __device__ float linear_activate_kernel(float x){return x;}
-__device__ float sigmoid_activate_kernel(float x){return 1./(1. + exp(-x));}
+__device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
+__device__ float loggy_activate_kernel(float x){return 2./(1. + exp(-x)) - 1;}
 __device__ float relu_activate_kernel(float x){return x*(x>0);}
+__device__ float elu_activate_kernel(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);}
+__device__ float relie_activate_kernel(float x){return x*(x>0);}
 __device__ float ramp_activate_kernel(float x){return x*(x>0)+.1*x;}
-//__device__ float ramp_activate_kernel(float x){return 0;}
+__device__ float leaky_activate_kernel(float x){return (x>0) ? x : .1*x;}
 __device__ float tanh_activate_kernel(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+__device__ float plse_activate_kernel(float x)
+{
+    if(x < -4) return .01 * (x + 4);
+    if(x > 4)  return .01 * (x - 4) + 1;
+    return .125*x + .5;
+}
  
 __device__ float linear_gradient_kernel(float x){return 1;}
-__device__ float sigmoid_gradient_kernel(float x){return (1-x)*x;}
+__device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
+__device__ float loggy_gradient_kernel(float x)
+{
+    float y = (x+1.)/2.;
+    return 2*(1-y)*y;
+}
 __device__ float relu_gradient_kernel(float x){return (x>0);}
+__device__ float elu_gradient_kernel(float x){return (x >= 0) + (x < 0)*(x + 1);}
+__device__ float relie_gradient_kernel(float x){return (x>0) ? 1 : .01;}
 __device__ float ramp_gradient_kernel(float x){return (x>0)+.1;}
+__device__ float leaky_gradient_kernel(float x){return (x>0) ? 1 : .1;}
 __device__ float tanh_gradient_kernel(float x){return 1-x*x;}
+__device__ float plse_gradient_kernel(float x){return (x < 0 || x > 1) ? .01 : .125;}
 
 __device__ float activate_kernel(float x, ACTIVATION a)
 {
     switch(a){
         case LINEAR:
             return linear_activate_kernel(x);
-        case SIGMOID:
-            return sigmoid_activate_kernel(x);
+        case LOGISTIC:
+            return logistic_activate_kernel(x);
+        case LOGGY:
+            return loggy_activate_kernel(x);
         case RELU:
             return relu_activate_kernel(x);
+        case ELU:
+            return elu_activate_kernel(x);
+        case RELIE:
+            return relie_activate_kernel(x);
         case RAMP:
             return ramp_activate_kernel(x);
+        case LEAKY:
+            return leaky_activate_kernel(x);
         case TANH:
             return tanh_activate_kernel(x);
+        case PLSE:
+            return plse_activate_kernel(x);
     }
     return 0;
 }
@@ -38,14 +70,24 @@
     switch(a){
         case LINEAR:
             return linear_gradient_kernel(x);
-        case SIGMOID:
-            return sigmoid_gradient_kernel(x);
+        case LOGISTIC:
+            return logistic_gradient_kernel(x);
+        case LOGGY:
+            return loggy_gradient_kernel(x);
         case RELU:
             return relu_gradient_kernel(x);
+        case ELU:
+            return elu_gradient_kernel(x);
+        case RELIE:
+            return relie_gradient_kernel(x);
         case RAMP:
             return ramp_gradient_kernel(x);
+        case LEAKY:
+            return leaky_gradient_kernel(x);
         case TANH:
             return tanh_gradient_kernel(x);
+        case PLSE:
+            return plse_gradient_kernel(x);
     }
     return 0;
 }

--
Gitblit v1.10.0