From 655f636a42d6e1d4518b712cfac6d973424de693 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 08 Mar 2015 18:25:28 +0000
Subject: [PATCH] detection layer fixed
---
src/activations.c | 44 ++++++++++++++++++++++++++++++++------------
1 files changed, 32 insertions(+), 12 deletions(-)
diff --git a/src/activations.c b/src/activations.c
index c81d6aa..4689046 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -2,6 +2,7 @@
#include <math.h>
#include <stdio.h>
+#include <stdlib.h>
#include <string.h>
char *get_activation_string(ACTIVATION a)
@@ -34,34 +35,53 @@
return RELU;
}
-float activate(float x, ACTIVATION a){
+float activate(float x, ACTIVATION a)
+{
switch(a){
case LINEAR:
- return x;
+ return linear_activate(x);
case SIGMOID:
- return 1./(1.+exp(-x));
+ return sigmoid_activate(x);
case RELU:
- return x*(x>0);
+ return relu_activate(x);
case RAMP:
- return x*(x>0) + .1*x;
+ return ramp_activate(x);
case TANH:
- return (exp(2*x)-1)/(exp(2*x)+1);
+ return tanh_activate(x);
}
return 0;
}
-float gradient(float x, ACTIVATION a){
+
+void activate_array(float *x, const int n, const ACTIVATION a)
+{
+ int i;
+ for(i = 0; i < n; ++i){
+ x[i] = activate(x[i], a);
+ }
+}
+
+float gradient(float x, ACTIVATION a)
+{
switch(a){
case LINEAR:
- return 1;
+ return linear_gradient(x);
case SIGMOID:
- return (1.-x)*x;
+ return sigmoid_gradient(x);
case RELU:
- return (x>0);
+ return relu_gradient(x);
case RAMP:
- return (x>0) + .1;
+ return ramp_gradient(x);
case TANH:
- return 1-x*x;
+ return tanh_gradient(x);
}
return 0;
}
+void gradient_array(const float *x, const int n, const ACTIVATION a, float *delta)
+{
+ int i;
+ for(i = 0; i < n; ++i){
+ delta[i] *= gradient(x[i], a);
+ }
+}
+
--
Gitblit v1.10.0