From 0f645836f193e75c4c3b718369e6fab15b5d19c5 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Wed, 11 Feb 2015 03:41:03 +0000
Subject: [PATCH] Detection is back, baby\!
---
src/softmax_layer.c | 43 ++++++++++++++++++++++++++++++-------------
1 files changed, 30 insertions(+), 13 deletions(-)
diff --git a/src/softmax_layer.c b/src/softmax_layer.c
index b213e5b..aa5ab06 100644
--- a/src/softmax_layer.c
+++ b/src/softmax_layer.c
@@ -1,34 +1,51 @@
#include "softmax_layer.h"
+#include "blas.h"
+#include "cuda.h"
+#include <float.h>
#include <math.h>
#include <stdlib.h>
#include <stdio.h>
-softmax_layer *make_softmax_layer(int inputs)
+softmax_layer *make_softmax_layer(int batch, int inputs)
{
fprintf(stderr, "Softmax Layer: %d inputs\n", inputs);
softmax_layer *layer = calloc(1, sizeof(softmax_layer));
+ layer->batch = batch;
layer->inputs = inputs;
- layer->output = calloc(inputs, sizeof(double));
- layer->delta = calloc(inputs, sizeof(double));
+ layer->output = calloc(inputs*batch, sizeof(float));
+ layer->delta = calloc(inputs*batch, sizeof(float));
+ layer->jacobian = calloc(inputs*inputs*batch, sizeof(float));
+ #ifdef GPU
+ layer->output_gpu = cuda_make_array(layer->output, inputs*batch);
+ layer->delta_gpu = cuda_make_array(layer->delta, inputs*batch);
+ #endif
return layer;
}
-void forward_softmax_layer(const softmax_layer layer, double *input)
+void forward_softmax_layer(const softmax_layer layer, float *input)
{
- int i;
- double sum = 0;
- for(i = 0; i < layer.inputs; ++i){
- sum += exp(input[i]);
- }
- for(i = 0; i < layer.inputs; ++i){
- layer.output[i] = exp(input[i])/sum;
+ int i,b;
+ for(b = 0; b < layer.batch; ++b){
+ float sum = 0;
+ float largest = -FLT_MAX;
+ for(i = 0; i < layer.inputs; ++i){
+ if(input[i+b*layer.inputs] > largest) largest = input[i+b*layer.inputs];
+ }
+ for(i = 0; i < layer.inputs; ++i){
+ sum += exp(input[i+b*layer.inputs]-largest);
+ }
+ if(sum) sum = largest+log(sum);
+ else sum = largest-100;
+ for(i = 0; i < layer.inputs; ++i){
+ layer.output[i+b*layer.inputs] = exp(input[i+b*layer.inputs]-sum);
+ }
}
}
-void backward_softmax_layer(const softmax_layer layer, double *input, double *delta)
+void backward_softmax_layer(const softmax_layer layer, float *delta)
{
int i;
- for(i = 0; i < layer.inputs; ++i){
+ for(i = 0; i < layer.inputs*layer.batch; ++i){
delta[i] = layer.delta[i];
}
}
--
Gitblit v1.10.0