From dbdd31ee211fe8b1ac7e93ceadf7b34b8d304f34 Mon Sep 17 00:00:00 2001
From: Roland Singer <roland.singer@desertbit.com>
Date: Wed, 22 Aug 2018 11:56:41 +0000
Subject: [PATCH] updated README to include information about learning rate adjustment for multiple GPUs

---
 src/activations.c |   21 ++++++++++++++++++---
 1 files changed, 18 insertions(+), 3 deletions(-)

diff --git a/src/activations.c b/src/activations.c
index 6ab4963..eab4e23 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -32,6 +32,8 @@
             return "stair";
         case HARDTAN:
             return "hardtan";
+        case LHTAN:
+            return "lhtan";
         default:
             break;
     }
@@ -47,6 +49,7 @@
     if (strcmp(s, "relie")==0) return RELIE;
     if (strcmp(s, "plse")==0) return PLSE;
     if (strcmp(s, "hardtan")==0) return HARDTAN;
+    if (strcmp(s, "lhtan")==0) return LHTAN;
     if (strcmp(s, "linear")==0) return LINEAR;
     if (strcmp(s, "ramp")==0) return RAMP;
     if (strcmp(s, "leaky")==0) return LEAKY;
@@ -83,6 +86,8 @@
             return stair_activate(x);
         case HARDTAN:
             return hardtan_activate(x);
+        case LHTAN:
+            return lhtan_activate(x);
     }
     return 0;
 }
@@ -90,8 +95,16 @@
 void activate_array(float *x, const int n, const ACTIVATION a)
 {
     int i;
-    for(i = 0; i < n; ++i){
-        x[i] = activate(x[i], a);
+    if (a == LINEAR) {}
+    else if (a == LEAKY) {
+        for (i = 0; i < n; ++i) {
+            x[i] = leaky_activate(x[i]);
+        }
+    }
+    else {
+        for (i = 0; i < n; ++i) {
+            x[i] = activate(x[i], a);
+        }
     }
 }
 
@@ -122,6 +135,8 @@
             return stair_gradient(x);
         case HARDTAN:
             return hardtan_gradient(x);
+        case LHTAN:
+            return lhtan_gradient(x);
     }
     return 0;
 }
@@ -132,5 +147,5 @@
     for(i = 0; i < n; ++i){
         delta[i] *= gradient(x[i], a);
     }
-} 
+}
 

--
Gitblit v1.10.0