From 08c7cf9c88befd845f00c00d85e40a9eead4b1b3 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 19 Jun 2016 21:28:15 +0000
Subject: [PATCH] no mean on input binarization
---
src/normalization_layer.c | 25 +++++++++++++------------
1 files changed, 13 insertions(+), 12 deletions(-)
diff --git a/src/normalization_layer.c b/src/normalization_layer.c
index dce2fcc..0551337 100644
--- a/src/normalization_layer.c
+++ b/src/normalization_layer.c
@@ -22,10 +22,10 @@
layer.inputs = w*h*c;
layer.outputs = layer.inputs;
#ifdef GPU
- layer.output_gpu = cuda_make_array(0, h * w * c * batch);
- layer.delta_gpu = cuda_make_array(0, h * w * c * batch);
- layer.squared_gpu = cuda_make_array(0, h * w * c * batch);
- layer.norms_gpu = cuda_make_array(0, h * w * c * batch);
+ layer.output_gpu = cuda_make_array(layer.output, h * w * c * batch);
+ layer.delta_gpu = cuda_make_array(layer.delta, h * w * c * batch);
+ layer.squared_gpu = cuda_make_array(layer.squared, h * w * c * batch);
+ layer.norms_gpu = cuda_make_array(layer.norms, h * w * c * batch);
#endif
return layer;
}
@@ -40,19 +40,19 @@
layer->out_w = w;
layer->inputs = w*h*c;
layer->outputs = layer->inputs;
- layer->output = realloc(layer->output, h * w * layer->c * layer->batch * sizeof(float));
- layer->delta = realloc(layer->delta, h * w * layer->c * layer->batch * sizeof(float));
- layer->squared = realloc(layer->squared, h * w * layer->c * layer->batch * sizeof(float));
- layer->norms = realloc(layer->norms, h * w * layer->c * layer->batch * sizeof(float));
+ layer->output = realloc(layer->output, h * w * c * batch * sizeof(float));
+ layer->delta = realloc(layer->delta, h * w * c * batch * sizeof(float));
+ layer->squared = realloc(layer->squared, h * w * c * batch * sizeof(float));
+ layer->norms = realloc(layer->norms, h * w * c * batch * sizeof(float));
#ifdef GPU
cuda_free(layer->output_gpu);
cuda_free(layer->delta_gpu);
cuda_free(layer->squared_gpu);
cuda_free(layer->norms_gpu);
- layer->output_gpu = cuda_make_array(0, h * w * c * batch);
- layer->delta_gpu = cuda_make_array(0, h * w * c * batch);
- layer->squared_gpu = cuda_make_array(0, h * w * c * batch);
- layer->norms_gpu = cuda_make_array(0, h * w * c * batch);
+ layer->output_gpu = cuda_make_array(layer->output, h * w * c * batch);
+ layer->delta_gpu = cuda_make_array(layer->delta, h * w * c * batch);
+ layer->squared_gpu = cuda_make_array(layer->squared, h * w * c * batch);
+ layer->norms_gpu = cuda_make_array(layer->norms, h * w * c * batch);
#endif
}
@@ -90,6 +90,7 @@
void backward_normalization_layer(const layer layer, network_state state)
{
// TODO This is approximate ;-)
+ // Also this should add in to delta instead of overwritting.
int w = layer.w;
int h = layer.h;
--
Gitblit v1.10.0