From 76ee68f96d864a27312c9aa09856ddda559a5cd9 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Thu, 28 Aug 2014 02:11:46 +0000
Subject: [PATCH] Trying some stuff w/ dropout

---
 src/activations.cl |   33 +++++++++++++++++++++++++++++++--
 1 files changed, 31 insertions(+), 2 deletions(-)

diff --git a/src/activations.cl b/src/activations.cl
index 65131c5..da06e8a 100644
--- a/src/activations.cl
+++ b/src/activations.cl
@@ -8,6 +8,12 @@
 float ramp_activate(float x){return x*(x>0)+.1*x;}
 float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
 
+float linear_gradient(float x){return 1;}
+float sigmoid_gradient(float x){return (1-x)*x;}
+float relu_gradient(float x){return (x>0);}
+float ramp_gradient(float x){return (x>0)+.1;}
+float tanh_gradient(float x){return 1-x*x;}
+
 float activate(float x, ACTIVATION a)
 {
     switch(a){
@@ -25,9 +31,32 @@
     return 0;
 }
 
-__kernel void activate_array(__global float *x,
-    const int n, const ACTIVATION a)
+float gradient(float x, ACTIVATION a)
+{
+    switch(a){
+        case LINEAR:
+            return linear_gradient(x);
+        case SIGMOID:
+            return sigmoid_gradient(x);
+        case RELU:
+            return relu_gradient(x);
+        case RAMP:
+            return ramp_gradient(x);
+        case TANH:
+            return tanh_gradient(x);
+    }
+    return 0;
+}
+
+__kernel void activate_array(__global float *x, int n, ACTIVATION a)
 {
     int i = get_global_id(0);
     x[i] = activate(x[i], a);
 }
+
+__kernel void gradient_array(__global float *x, int n, ACTIVATION a, __global float *delta)
+{
+    int i = get_global_id(0);
+    delta[i] *= gradient(x[i], a);
+}
+

--
Gitblit v1.10.0