From e36182cd8c5dd5c6d0aa1f77cf5cdca87e8bb1f0 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 21 Nov 2014 23:35:19 +0000
Subject: [PATCH] cleaned up data parsing a lot. probably nothing broken?

---
 src/activations.cl |   54 ++++++++++++++++++++++++++++++++++++++++++++----------
 1 files changed, 44 insertions(+), 10 deletions(-)

diff --git a/src/activations.cl b/src/activations.cl
index 19428b1..da06e8a 100644
--- a/src/activations.cl
+++ b/src/activations.cl
@@ -2,27 +2,61 @@
     SIGMOID, RELU, LINEAR, RAMP, TANH
 }ACTIVATION;
 
-float activate(float x, ACTIVATION a, float dropout)
+float linear_activate(float x){return x;}
+float sigmoid_activate(float x){return 1./(1. + exp(-x));}
+float relu_activate(float x){return x*(x>0);}
+float ramp_activate(float x){return x*(x>0)+.1*x;}
+float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+
+float linear_gradient(float x){return 1;}
+float sigmoid_gradient(float x){return (1-x)*x;}
+float relu_gradient(float x){return (x>0);}
+float ramp_gradient(float x){return (x>0)+.1;}
+float tanh_gradient(float x){return 1-x*x;}
+
+float activate(float x, ACTIVATION a)
 {
-    //if((float)rand()/RAND_MAX < dropout) return 0;
     switch(a){
         case LINEAR:
-            return linear_activate(x)/(1-dropout);
+            return linear_activate(x);
         case SIGMOID:
-            return sigmoid_activate(x)/(1-dropout);
+            return sigmoid_activate(x);
         case RELU:
-            return relu_activate(x)/(1-dropout);
+            return relu_activate(x);
         case RAMP:
-            return ramp_activate(x)/(1-dropout);
+            return ramp_activate(x);
         case TANH:
-            return tanh_activate(x)/(1-dropout);
+            return tanh_activate(x);
     }
     return 0;
 }
 
-__kernel void activate_array(__global float *x,
-    const int n, const ACTIVATION a, const float dropout)
+float gradient(float x, ACTIVATION a)
+{
+    switch(a){
+        case LINEAR:
+            return linear_gradient(x);
+        case SIGMOID:
+            return sigmoid_gradient(x);
+        case RELU:
+            return relu_gradient(x);
+        case RAMP:
+            return ramp_gradient(x);
+        case TANH:
+            return tanh_gradient(x);
+    }
+    return 0;
+}
+
+__kernel void activate_array(__global float *x, int n, ACTIVATION a)
 {
     int i = get_global_id(0);
-    x[i] = activate(x[i], a, dropout);
+    x[i] = activate(x[i], a);
 }
+
+__kernel void gradient_array(__global float *x, int n, ACTIVATION a, __global float *delta)
+{
+    int i = get_global_id(0);
+    delta[i] *= gradient(x[i], a);
+}
+

--
Gitblit v1.10.0