From 8bcdee86585f496afe1a8a38d608ea0504a11243 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Tue, 01 Sep 2015 18:22:03 +0000
Subject: [PATCH] Some bug fixes, random stuff
---
src/network_kernels.cu | 8 +++++++-
1 files changed, 7 insertions(+), 1 deletions(-)
diff --git a/src/network_kernels.cu b/src/network_kernels.cu
index 6562590..a73ddd9 100644
--- a/src/network_kernels.cu
+++ b/src/network_kernels.cu
@@ -1,6 +1,7 @@
extern "C" {
#include <stdio.h>
#include <time.h>
+#include <assert.h>
#include "network.h"
#include "image.h"
@@ -12,6 +13,7 @@
#include "crop_layer.h"
#include "connected_layer.h"
#include "detection_layer.h"
+#include "region_layer.h"
#include "convolutional_layer.h"
#include "deconvolutional_layer.h"
#include "maxpool_layer.h"
@@ -33,7 +35,7 @@
int i;
for(i = 0; i < net.n; ++i){
layer l = net.layers[i];
- if(l.delta){
+ if(l.delta_gpu){
scal_ongpu(l.outputs * l.batch, 0, l.delta_gpu, 1);
}
if(l.type == CONVOLUTIONAL){
@@ -42,6 +44,8 @@
forward_deconvolutional_layer_gpu(l, state);
} else if(l.type == DETECTION){
forward_detection_layer_gpu(l, state);
+ } else if(l.type == REGION){
+ forward_region_layer_gpu(l, state);
} else if(l.type == CONNECTED){
forward_connected_layer_gpu(l, state);
} else if(l.type == CROP){
@@ -92,6 +96,8 @@
backward_dropout_layer_gpu(l, state);
} else if(l.type == DETECTION){
backward_detection_layer_gpu(l, state);
+ } else if(l.type == REGION){
+ backward_region_layer_gpu(l, state);
} else if(l.type == NORMALIZATION){
backward_normalization_layer_gpu(l, state);
} else if(l.type == SOFTMAX){
--
Gitblit v1.10.0