From 655f636a42d6e1d4518b712cfac6d973424de693 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 08 Mar 2015 18:25:28 +0000
Subject: [PATCH] detection layer fixed

---
 src/activations.h |   19 +++++++++++++++----
 1 files changed, 15 insertions(+), 4 deletions(-)

diff --git a/src/activations.h b/src/activations.h
index c406c18..337e5f1 100644
--- a/src/activations.h
+++ b/src/activations.h
@@ -1,4 +1,4 @@
-#include "opencl.h"
+#include "cuda.h"
 #ifndef ACTIVATIONS_H
 #define ACTIVATIONS_H
 
@@ -14,10 +14,21 @@
 void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta);
 void activate_array(float *x, const int n, const ACTIVATION a);
 #ifdef GPU
-cl_kernel get_activation_kernel();
-void activate_array_ongpu(cl_mem x, int n, ACTIVATION a);
-void gradient_array_ongpu(cl_mem x, int n, ACTIVATION a, cl_mem delta);
+void activate_array_ongpu(float *x, int n, ACTIVATION a);
+void gradient_array_ongpu(float *x, int n, ACTIVATION a, float *delta);
 #endif
 
+static inline float linear_activate(float x){return x;}
+static inline float sigmoid_activate(float x){return 1./(1. + exp(-x));}
+static inline float relu_activate(float x){return x*(x>0);}
+static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
+static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+
+static inline float linear_gradient(float x){return 1;}
+static inline float sigmoid_gradient(float x){return (1-x)*x;}
+static inline float relu_gradient(float x){return (x>0);}
+static inline float ramp_gradient(float x){return (x>0)+.1;}
+static inline float tanh_gradient(float x){return 1-x*x;}
+
 #endif
 

--
Gitblit v1.10.0