From d9f1b0b16edeb59281355a855e18a8be343fc33c Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 08 Aug 2014 19:04:15 +0000
Subject: [PATCH] probably how maxpool layers should be

---
 src/activations.cl |   23 ++++++++++++++---------
 1 files changed, 14 insertions(+), 9 deletions(-)

diff --git a/src/activations.cl b/src/activations.cl
index 19428b1..65131c5 100644
--- a/src/activations.cl
+++ b/src/activations.cl
@@ -2,27 +2,32 @@
     SIGMOID, RELU, LINEAR, RAMP, TANH
 }ACTIVATION;
 
-float activate(float x, ACTIVATION a, float dropout)
+float linear_activate(float x){return x;}
+float sigmoid_activate(float x){return 1./(1. + exp(-x));}
+float relu_activate(float x){return x*(x>0);}
+float ramp_activate(float x){return x*(x>0)+.1*x;}
+float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+
+float activate(float x, ACTIVATION a)
 {
-    //if((float)rand()/RAND_MAX < dropout) return 0;
     switch(a){
         case LINEAR:
-            return linear_activate(x)/(1-dropout);
+            return linear_activate(x);
         case SIGMOID:
-            return sigmoid_activate(x)/(1-dropout);
+            return sigmoid_activate(x);
         case RELU:
-            return relu_activate(x)/(1-dropout);
+            return relu_activate(x);
         case RAMP:
-            return ramp_activate(x)/(1-dropout);
+            return ramp_activate(x);
         case TANH:
-            return tanh_activate(x)/(1-dropout);
+            return tanh_activate(x);
     }
     return 0;
 }
 
 __kernel void activate_array(__global float *x,
-    const int n, const ACTIVATION a, const float dropout)
+    const int n, const ACTIVATION a)
 {
     int i = get_global_id(0);
-    x[i] = activate(x[i], a, dropout);
+    x[i] = activate(x[i], a);
 }

--
Gitblit v1.10.0