From 0f645836f193e75c4c3b718369e6fab15b5d19c5 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Wed, 11 Feb 2015 03:41:03 +0000
Subject: [PATCH] Detection is back, baby\!
---
src/activations.c | 25 +++++++++++++++++--------
1 files changed, 17 insertions(+), 8 deletions(-)
diff --git a/src/activations.c b/src/activations.c
index 24868a3..48dce87 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -2,6 +2,7 @@
#include <math.h>
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
char *get_activation_string(ACTIVATION a)
@@ -39,8 +40,16 @@
float relu_activate(float x){return x*(x>0);}
float ramp_activate(float x){return x*(x>0)+.1*x;}
float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
+//float tanh_activate(float x){return x - (x*x*x)/3;}
-float activate(float x, ACTIVATION a){
+float linear_gradient(float x){return 1;}
+float sigmoid_gradient(float x){return (1-x)*x;}
+float relu_gradient(float x){return (x>0);}
+float ramp_gradient(float x){return (x>0)+.1;}
+float tanh_gradient(float x){return 1-x*x;}
+
+float activate(float x, ACTIVATION a)
+{
switch(a){
case LINEAR:
return linear_activate(x);
@@ -64,19 +73,19 @@
}
}
-
-float gradient(float x, ACTIVATION a){
+float gradient(float x, ACTIVATION a)
+{
switch(a){
case LINEAR:
- return 1;
+ return linear_gradient(x);
case SIGMOID:
- return (1.-x)*x;
+ return sigmoid_gradient(x);
case RELU:
- return (x>0);
+ return relu_gradient(x);
case RAMP:
- return (x>0) + .1;
+ return ramp_gradient(x);
case TANH:
- return 1-x*x;
+ return tanh_gradient(x);
}
return 0;
}
--
Gitblit v1.10.0