From f047cfff99e00e28c02eb59b6d32386c122f9af6 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 08 Mar 2015 18:31:12 +0000
Subject: [PATCH] renamed sigmoid to logistic

---
 src/activations.h |   26 +++++++++++++++++++++++---
 1 files changed, 23 insertions(+), 3 deletions(-)

diff --git a/src/activations.h b/src/activations.h
index 15d96d3..0cb81af 100644
--- a/src/activations.h
+++ b/src/activations.h
@@ -1,14 +1,34 @@
+#include "cuda.h"
 #ifndef ACTIVATIONS_H
 #define ACTIVATIONS_H
 
 typedef enum{
-    SIGMOID, RELU, IDENTITY, RAMP
+    LOGISTIC, RELU, LINEAR, RAMP, TANH
 }ACTIVATION;
 
 ACTIVATION get_activation(char *s);
 
-double activate(double x, ACTIVATION a);
-double gradient(double x, ACTIVATION a);
+char *get_activation_string(ACTIVATION a);
+float activate(float x, ACTIVATION a);
+float gradient(float x, ACTIVATION a);
+void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta);
+void activate_array(float *x, const int n, const ACTIVATION a);
+#ifdef GPU
+void activate_array_ongpu(float *x, int n, ACTIVATION a);
+void gradient_array_ongpu(float *x, int n, ACTIVATION a, float *delta);
+#endif
+
+static inline float linear_activate(float x){return x;}
+static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
+static inline float relu_activate(float x){return x*(x>0);}
+static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
+static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+
+static inline float linear_gradient(float x){return 1;}
+static inline float logistic_gradient(float x){return (1-x)*x;}
+static inline float relu_gradient(float x){return (x>0);}
+static inline float ramp_gradient(float x){return (x>0)+.1;}
+static inline float tanh_gradient(float x){return 1-x*x;}
 
 #endif
 

--
Gitblit v1.10.0