From dbdd31ee211fe8b1ac7e93ceadf7b34b8d304f34 Mon Sep 17 00:00:00 2001
From: Roland Singer <roland.singer@desertbit.com>
Date: Wed, 22 Aug 2018 11:56:41 +0000
Subject: [PATCH] updated README to include information about learning rate adjustment for multiple GPUs
---
src/activations.h | 16 ++++++++++++++--
1 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/src/activations.h b/src/activations.h
index fed2908..1c36ff5 100644
--- a/src/activations.h
+++ b/src/activations.h
@@ -4,7 +4,7 @@
#include "math.h"
typedef enum{
- LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY, STAIR, HARDTAN
+ LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY, STAIR, HARDTAN, LHTAN
}ACTIVATION;
ACTIVATION get_activation(char *s);
@@ -36,7 +36,7 @@
static inline float loggy_activate(float x){return 2./(1. + exp(-x)) - 1;}
static inline float relu_activate(float x){return x*(x>0);}
static inline float elu_activate(float x){return (x >= 0)*x + (x < 0)*(exp(x)-1);}
-static inline float relie_activate(float x){return x*(x>0);}
+static inline float relie_activate(float x){return (x>0) ? x : .01*x;}
static inline float ramp_activate(float x){return x*(x>0)+.1*x;}
static inline float leaky_activate(float x){return (x>0) ? x : .1*x;}
static inline float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
@@ -47,6 +47,18 @@
return .125*x + .5;
}
+static inline float lhtan_activate(float x)
+{
+ if(x < 0) return .001*x;
+ if(x > 1) return .001*(x-1) + 1;
+ return x;
+}
+static inline float lhtan_gradient(float x)
+{
+ if(x > 0 && x < 1) return 1;
+ return .001;
+}
+
static inline float hardtan_gradient(float x)
{
if (x > -1 && x < 1) return 1;
--
Gitblit v1.10.0