From ace5aeb0f59fdceb99e607af9780added20da37c Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 24 Jan 2014 22:51:17 +0000
Subject: [PATCH] MNIST connected network showing off matrices

---
 src/activations.c |   57 +++++++++++++++++++++++++++++++--------------------------
 1 files changed, 31 insertions(+), 26 deletions(-)

diff --git a/src/activations.c b/src/activations.c
index a128029..b8bb79d 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -8,36 +8,41 @@
 {
     if (strcmp(s, "sigmoid")==0) return SIGMOID;
     if (strcmp(s, "relu")==0) return RELU;
-    if (strcmp(s, "identity")==0) return IDENTITY;
+    if (strcmp(s, "linear")==0) return LINEAR;
+    if (strcmp(s, "ramp")==0) return RAMP;
+    if (strcmp(s, "tanh")==0) return TANH;
     fprintf(stderr, "Couldn't find activation function %s, going with ReLU\n", s);
     return RELU;
 }
 
-double identity_activation(double x)
-{
-    return x;
+double activate(double x, ACTIVATION a){
+    switch(a){
+        case LINEAR:
+            return x;
+        case SIGMOID:
+            return 1./(1.+exp(-x));
+        case RELU:
+            return x*(x>0);
+        case RAMP:
+            return x*(x>0) + .1*x;
+        case TANH:
+            return (exp(2*x)-1)/(exp(2*x)+1);
+    }
+    return 0;
 }
-double identity_gradient(double x)
-{
-    return 1;
-}
-
-double relu_activation(double x)
-{
-    return x*(x>0);
-}
-double relu_gradient(double x)
-{
-    return (x>=0);
-}
-
-double sigmoid_activation(double x)
-{
-    return 1./(1.+exp(-x));
-}
-
-double sigmoid_gradient(double x)
-{
-    return x*(1.-x);
+double gradient(double x, ACTIVATION a){
+    switch(a){
+        case LINEAR:
+            return 1;
+        case SIGMOID:
+            return (1.-x)*x;
+        case RELU:
+            return (x>0);
+        case RAMP:
+            return (x>0) + .1;
+        case TANH:
+            return 1-x*x;
+    }
+    return 0;
 }
 

--
Gitblit v1.10.0