From f1ad9a0487dc60c01e56bacb2a67de1e1006b4b7 Mon Sep 17 00:00:00 2001
From: AlexeyAB <alexeyab84@gmail.com>
Date: Thu, 05 Apr 2018 22:31:04 +0000
Subject: [PATCH] Compile fix
---
src/network.c | 47 ++++++++++++++++++++++++++++++++++++++++++++++-
1 files changed, 46 insertions(+), 1 deletions(-)
diff --git a/src/network.c b/src/network.c
index 8619158..438829a 100644
--- a/src/network.c
+++ b/src/network.c
@@ -374,10 +374,14 @@
resize_maxpool_layer(&l, w, h);
}else if(l.type == REGION){
resize_region_layer(&l, w, h);
+ }else if (l.type == YOLO) {
+ resize_yolo_layer(&l, w, h);
}else if(l.type == ROUTE){
resize_route_layer(&l, net);
}else if (l.type == SHORTCUT) {
resize_shortcut_layer(&l, w, h);
+ }else if (l.type == UPSAMPLE) {
+ resize_upsample_layer(&l, w, h);
}else if(l.type == REORG){
resize_reorg_layer(&l, w, h);
}else if(l.type == AVGPOOL){
@@ -544,7 +548,9 @@
dets[j].classes = l.classes;
dets[j].bbox = boxes[j];
dets[j].objectness = 1;
- for (i = 0; i < l.classes; ++i) dets[j].prob[i] = probs[j][i];
+ for (i = 0; i < l.classes; ++i) {
+ dets[j].prob[i] = probs[j][i];
+ }
}
free(boxes);
@@ -742,3 +748,42 @@
free(net.workspace);
#endif
}
+
+
+void fuse_conv_batchnorm(network net)
+{
+ int j;
+ for (j = 0; j < net.n; ++j) {
+ layer *l = &net.layers[j];
+
+ if (l->type == CONVOLUTIONAL) {
+ //printf(" Merges Convolutional-%d and batch_norm \n", j);
+
+ if (l->batch_normalize) {
+ int f;
+ for (f = 0; f < l->n; ++f)
+ {
+ l->biases[f] = l->biases[f] - l->scales[f] * l->rolling_mean[f] / (sqrtf(l->rolling_variance[f]) + .000001f);
+
+ const size_t filter_size = l->size*l->size*l->c;
+ int i;
+ for (i = 0; i < filter_size; ++i) {
+ int w_index = f*filter_size + i;
+
+ l->weights[w_index] = l->weights[w_index] * l->scales[f] / (sqrtf(l->rolling_variance[f]) + .000001f);
+ }
+ }
+
+ l->batch_normalize = 0;
+#ifdef GPU
+ if (gpu_index >= 0) {
+ push_convolutional_layer(*l);
+ }
+#endif
+ }
+ }
+ else {
+ //printf(" Fusion skip layer type: %d \n", l->type);
+ }
+ }
+}
--
Gitblit v1.10.0