From 5ef74c2031a040f30a670dc7d60790fc6a9ec720 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 02 May 2014 22:20:34 +0000
Subject: [PATCH] Slowly refactoring and pushing to GPU

---
 src/activations.c |   90 +++++++++++++++++++++++++++-----------------
 1 files changed, 55 insertions(+), 35 deletions(-)

diff --git a/src/activations.c b/src/activations.c
index a255f0f..24868a3 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -4,32 +4,70 @@
 #include <stdio.h>
 #include <string.h>
 
+char *get_activation_string(ACTIVATION a)
+{
+    switch(a){
+        case SIGMOID:
+            return "sigmoid";
+        case RELU:
+            return "relu";
+        case RAMP:
+            return "ramp";
+        case LINEAR:
+            return "linear";
+        case TANH:
+            return "tanh";
+        default:
+            break;
+    }
+    return "relu";
+}
+
 ACTIVATION get_activation(char *s)
 {
     if (strcmp(s, "sigmoid")==0) return SIGMOID;
     if (strcmp(s, "relu")==0) return RELU;
-    if (strcmp(s, "identity")==0) return IDENTITY;
+    if (strcmp(s, "linear")==0) return LINEAR;
     if (strcmp(s, "ramp")==0) return RAMP;
+    if (strcmp(s, "tanh")==0) return TANH;
     fprintf(stderr, "Couldn't find activation function %s, going with ReLU\n", s);
     return RELU;
 }
 
-double activate(double x, ACTIVATION a){
+float linear_activate(float x){return x;}
+float sigmoid_activate(float x){return 1./(1. + exp(-x));}
+float relu_activate(float x){return x*(x>0);}
+float ramp_activate(float x){return x*(x>0)+.1*x;}
+float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+
+float activate(float x, ACTIVATION a){
     switch(a){
-        case IDENTITY:
-            return x;
+        case LINEAR:
+            return linear_activate(x);
         case SIGMOID:
-            return 1./(1.+exp(-x));
+            return sigmoid_activate(x);
         case RELU:
-            return x*(x>0);
+            return relu_activate(x);
         case RAMP:
-            return x*(x>0) + .1*x;
+            return ramp_activate(x);
+        case TANH:
+            return tanh_activate(x);
     }
     return 0;
 }
-double gradient(double x, ACTIVATION a){
+
+void activate_array(float *x, const int n, const ACTIVATION a)
+{
+    int i;
+    for(i = 0; i < n; ++i){
+        x[i] = activate(x[i], a);
+    }
+}
+
+
+float gradient(float x, ACTIVATION a){
     switch(a){
-        case IDENTITY:
+        case LINEAR:
             return 1;
         case SIGMOID:
             return (1.-x)*x;
@@ -37,35 +75,17 @@
             return (x>0);
         case RAMP:
             return (x>0) + .1;
+        case TANH:
+            return 1-x*x;
     }
     return 0;
 }
 
-double identity_activation(double x)
+void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta)
 {
-    return x;
-}
-double identity_gradient(double x)
-{
-    return 1;
-}
-
-double relu_activation(double x)
-{
-    return x*(x>0);
-}
-double relu_gradient(double x)
-{
-    return (x>0);
-}
-
-double sigmoid_activation(double x)
-{
-    return 1./(1.+exp(-x));
-}
-
-double sigmoid_gradient(double x)
-{
-    return x*(1.-x);
-}
+    int i;
+    for(i = 0; i < n; ++i){
+        delta[i] *= gradient(x[i], a);
+    }
+} 
 

--
Gitblit v1.10.0