From cf0300ea55538d4ca139d68cd24b0ee452cce015 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sat, 28 Mar 2015 00:32:01 +0000
Subject: [PATCH] dropout probably ok

---
 src/dropout_layer_kernels.cu |   11 +++++++++--
 1 files changed, 9 insertions(+), 2 deletions(-)

diff --git a/src/dropout_layer_kernels.cu b/src/dropout_layer_kernels.cu
index 4561d89..3dfa641 100644
--- a/src/dropout_layer_kernels.cu
+++ b/src/dropout_layer_kernels.cu
@@ -11,17 +11,24 @@
     if(id < size) input[id] = (rand[id] < prob) ? 0 : input[id]*scale;
 }
 
-extern "C" void forward_dropout_layer_gpu(dropout_layer layer, network_state state)
+void forward_dropout_layer_gpu(dropout_layer layer, network_state state)
 {
     if (!state.train) return;
     int size = layer.inputs*layer.batch;
     cuda_random(layer.rand_gpu, size);
+    /*
+    int i;
+    for(i = 0; i < size; ++i){
+        layer.rand[i] = rand_uniform();
+    }
+    cuda_push_array(layer.rand_gpu, layer.rand, size);
+    */
 
     yoloswag420blazeit360noscope<<<cuda_gridsize(size), BLOCK>>>(state.input, size, layer.rand_gpu, layer.probability, layer.scale);
     check_error(cudaPeekAtLastError());
 }
 
-extern "C" void backward_dropout_layer_gpu(dropout_layer layer, network_state state)
+void backward_dropout_layer_gpu(dropout_layer layer, network_state state)
 {
     if(!state.delta) return;
     int size = layer.inputs*layer.batch;

--
Gitblit v1.10.0