From cb1f33c6ae840e8dc0f43518daf76e6ed01034f0 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Mon, 08 Dec 2014 19:48:57 +0000
Subject: [PATCH] Fixed race condition in server

---
 src/network_gpu.c |   17 ++++++++++++++++-
 1 files changed, 16 insertions(+), 1 deletions(-)

diff --git a/src/network_gpu.c b/src/network_gpu.c
index 7302664..fe53d0c 100644
--- a/src/network_gpu.c
+++ b/src/network_gpu.c
@@ -22,7 +22,9 @@
 {
     //printf("start\n");
     int i;
+   // printf("Truth: %f\n", cl_checksum(truth, 1000*net.batch));
     for(i = 0; i < net.n; ++i){
+        //printf("Truth %i: %f\n", i, cl_checksum(truth, 1000*net.batch));
         //clock_t time = clock();
         if(net.types[i] == CONVOLUTIONAL){
             convolutional_layer layer = *(convolutional_layer *)net.layers[i];
@@ -48,6 +50,11 @@
             forward_softmax_layer_gpu(layer, input);
             input = layer.output_cl;
         }
+        else if(net.types[i] == DROPOUT){
+            if(!train) continue;
+            dropout_layer layer = *(dropout_layer *)net.layers[i];
+            forward_dropout_layer_gpu(layer, input);
+        }
         //printf("%d %f\n", i, sec(clock()-time));
         /*
            else if(net.types[i] == CROP){
@@ -80,7 +87,7 @@
         }
         if(net.types[i] == CONVOLUTIONAL){
             convolutional_layer layer = *(convolutional_layer *)net.layers[i];
-            backward_convolutional_layer_gpu(layer, prev_delta);
+            backward_convolutional_layer_gpu(layer, prev_input, prev_delta);
         }
         else if(net.types[i] == COST){
             cost_layer layer = *(cost_layer *)net.layers[i];
@@ -134,6 +141,8 @@
     else if(net.types[i] == SOFTMAX){
         softmax_layer layer = *(softmax_layer *)net.layers[i];
         return layer.output_cl;
+    } else if(net.types[i] == DROPOUT){
+        return get_network_output_cl_layer(net, i-1);
     }
     return 0;
 }
@@ -155,6 +164,8 @@
     else if(net.types[i] == SOFTMAX){
         softmax_layer layer = *(softmax_layer *)net.layers[i];
         return layer.delta_cl;
+    } else if(net.types[i] == DROPOUT){
+        return get_network_delta_cl_layer(net, i-1);
     }
     return 0;
 }
@@ -173,14 +184,18 @@
     }
     //printf("trans %f\n", sec(clock()-time));
     //time = clock();
+
     forward_network_gpu(net, *net.input_cl, *net.truth_cl, 1);
+
     //printf("forw %f\n", sec(clock()-time));
     //time = clock();
     backward_network_gpu(net, *net.input_cl);
     //printf("back %f\n", sec(clock()-time));
     //time = clock();
+
     update_network_gpu(net);
     float error = get_network_cost(net);
+
     //printf("updt %f\n", sec(clock()-time));
     //time = clock();
     return error;

--
Gitblit v1.10.0