From b32a287e38f4c6a41828f18b4669dec9f3af4943 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Thu, 17 Jul 2014 17:17:52 +0000
Subject: [PATCH] Merge branch 'master' of pjreddie.com:jnet

---
 src/connected_layer.c |   19 +++++++++++--------
 1 files changed, 11 insertions(+), 8 deletions(-)

diff --git a/src/connected_layer.c b/src/connected_layer.c
index 72cb3fb..bebf2d9 100644
--- a/src/connected_layer.c
+++ b/src/connected_layer.c
@@ -57,8 +57,11 @@
 
 void forward_connected_layer(connected_layer layer, float *input, int train)
 {
+    int i;
     if(!train) layer.dropout = 0;
-    memcpy(layer.output, layer.biases, layer.outputs*sizeof(float));
+    for(i = 0; i < layer.batch; ++i){
+        memcpy(layer.output+i*layer.outputs, layer.biases, layer.outputs*sizeof(float));
+    }
     int m = layer.batch;
     int k = layer.inputs;
     int n = layer.outputs;
@@ -74,7 +77,7 @@
     int i;
     for(i = 0; i < layer.outputs*layer.batch; ++i){
         layer.delta[i] *= gradient(layer.output[i], layer.activation);
-        layer.bias_updates[i%layer.batch] += layer.delta[i];
+        layer.bias_updates[i%layer.outputs] += layer.delta[i];
     }
     int m = layer.inputs;
     int k = layer.batch;
@@ -82,16 +85,16 @@
     float *a = input;
     float *b = layer.delta;
     float *c = layer.weight_updates;
-    gemm(0,0,m,n,k,1,a,k,b,n,1,c,n);
+    gemm(1,0,m,n,k,1,a,m,b,n,1,c,n);
 
-    m = layer.inputs;
+    m = layer.batch;
     k = layer.outputs;
-    n = layer.batch;
+    n = layer.inputs;
 
-    a = layer.weights;
-    b = layer.delta;
+    a = layer.delta;
+    b = layer.weights;
     c = delta;
 
-    if(c) gemm(0,0,m,n,k,1,a,k,b,n,0,c,n);
+    if(c) gemm(0,1,m,n,k,1,a,k,b,k,0,c,n);
 }
 

--
Gitblit v1.10.0