From 153705226d8ca746478b69eeac9bc854766daa11 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Tue, 27 Jan 2015 21:31:06 +0000
Subject: [PATCH] Bias updates bug fix

---
 src/softmax_layer.c |   27 ++++++++++-----------------
 1 files changed, 10 insertions(+), 17 deletions(-)

diff --git a/src/softmax_layer.c b/src/softmax_layer.c
index 1268423..aa5ab06 100644
--- a/src/softmax_layer.c
+++ b/src/softmax_layer.c
@@ -1,4 +1,7 @@
 #include "softmax_layer.h"
+#include "blas.h"
+#include "cuda.h"
+#include <float.h>
 #include <math.h>
 #include <stdlib.h>
 #include <stdio.h>
@@ -11,36 +14,26 @@
     layer->inputs = inputs;
     layer->output = calloc(inputs*batch, sizeof(float));
     layer->delta = calloc(inputs*batch, sizeof(float));
+    layer->jacobian = calloc(inputs*inputs*batch, sizeof(float));
+    #ifdef GPU
+    layer->output_gpu = cuda_make_array(layer->output, inputs*batch); 
+    layer->delta_gpu = cuda_make_array(layer->delta, inputs*batch); 
+    #endif
     return layer;
 }
 
-/* UNSTABLE!
-void forward_softmax_layer(const softmax_layer layer, float *input)
-{
-    int i;
-    float sum = 0;
-    for(i = 0; i < layer.inputs; ++i){
-        sum += exp(input[i]);
-    }
-    for(i = 0; i < layer.inputs; ++i){
-        layer.output[i] = exp(input[i])/sum;
-    }
-}
-*/
 void forward_softmax_layer(const softmax_layer layer, float *input)
 {
     int i,b;
     for(b = 0; b < layer.batch; ++b){
         float sum = 0;
-        float largest = 0;
+        float largest = -FLT_MAX;
         for(i = 0; i < layer.inputs; ++i){
             if(input[i+b*layer.inputs] > largest) largest = input[i+b*layer.inputs];
         }
         for(i = 0; i < layer.inputs; ++i){
             sum += exp(input[i+b*layer.inputs]-largest);
-            //printf("%f, ", input[i]);
         }
-        //printf("\n");
         if(sum) sum = largest+log(sum);
         else sum = largest-100;
         for(i = 0; i < layer.inputs; ++i){
@@ -49,7 +42,7 @@
     }
 }
 
-void backward_softmax_layer(const softmax_layer layer, float *input, float *delta)
+void backward_softmax_layer(const softmax_layer layer, float *delta)
 {
     int i;
     for(i = 0; i < layer.inputs*layer.batch; ++i){

--
Gitblit v1.10.0