From db0397cfaaf488364e3d2e1669dfefae2ee6ea73 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Mon, 14 Dec 2015 19:57:10 +0000
Subject: [PATCH] shortcut layers, msr networks

---
 src/activation_kernels.cu |   22 ++++++++++++++++++++++
 1 files changed, 22 insertions(+), 0 deletions(-)

diff --git a/src/activation_kernels.cu b/src/activation_kernels.cu
index 32c032c..d5607da 100644
--- a/src/activation_kernels.cu
+++ b/src/activation_kernels.cu
@@ -1,3 +1,7 @@
+#include "cuda_runtime.h"
+#include "curand.h"
+#include "cublas_v2.h"
+
 extern "C" {
 #include "activations.h"
 #include "cuda.h"
@@ -6,7 +10,10 @@
 __device__ float linear_activate_kernel(float x){return x;}
 __device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
 __device__ float relu_activate_kernel(float x){return x*(x>0);}
+__device__ float elu_activate_kernel(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);}
+__device__ float relie_activate_kernel(float x){return x*(x>0);}
 __device__ float ramp_activate_kernel(float x){return x*(x>0)+.1*x;}
+__device__ float leaky_activate_kernel(float x){return (x>0) ? x : .1*x;}
 __device__ float tanh_activate_kernel(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
 __device__ float plse_activate_kernel(float x)
 {
@@ -18,7 +25,10 @@
 __device__ float linear_gradient_kernel(float x){return 1;}
 __device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
 __device__ float relu_gradient_kernel(float x){return (x>0);}
+__device__ float elu_gradient_kernel(float x){return (x >= 0) + (x < 0)*(x + 1);}
+__device__ float relie_gradient_kernel(float x){return (x>0) ? 1 : .01;}
 __device__ float ramp_gradient_kernel(float x){return (x>0)+.1;}
+__device__ float leaky_gradient_kernel(float x){return (x>0) ? 1 : .1;}
 __device__ float tanh_gradient_kernel(float x){return 1-x*x;}
 __device__ float plse_gradient_kernel(float x){return (x < 0 || x > 1) ? .01 : .125;}
 
@@ -31,8 +41,14 @@
             return logistic_activate_kernel(x);
         case RELU:
             return relu_activate_kernel(x);
+        case ELU:
+            return elu_activate_kernel(x);
+        case RELIE:
+            return relie_activate_kernel(x);
         case RAMP:
             return ramp_activate_kernel(x);
+        case LEAKY:
+            return leaky_activate_kernel(x);
         case TANH:
             return tanh_activate_kernel(x);
         case PLSE:
@@ -50,8 +66,14 @@
             return logistic_gradient_kernel(x);
         case RELU:
             return relu_gradient_kernel(x);
+        case ELU:
+            return elu_gradient_kernel(x);
+        case RELIE:
+            return relie_gradient_kernel(x);
         case RAMP:
             return ramp_gradient_kernel(x);
+        case LEAKY:
+            return leaky_gradient_kernel(x);
         case TANH:
             return tanh_gradient_kernel(x);
         case PLSE:

--
Gitblit v1.10.0