From 75db98db253adf7fbde293f102ab095b02402f9e Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 10 Jul 2015 23:38:30 +0000
Subject: [PATCH] normalization layer
---
src/network_kernels.cu | 20 ++++++--------------
1 files changed, 6 insertions(+), 14 deletions(-)
diff --git a/src/network_kernels.cu b/src/network_kernels.cu
index 36f5594..9cc8be8 100644
--- a/src/network_kernels.cu
+++ b/src/network_kernels.cu
@@ -15,6 +15,7 @@
#include "convolutional_layer.h"
#include "deconvolutional_layer.h"
#include "maxpool_layer.h"
+#include "normalization_layer.h"
#include "cost_layer.h"
#include "softmax_layer.h"
#include "dropout_layer.h"
@@ -44,6 +45,8 @@
forward_cost_layer_gpu(l, state);
} else if(l.type == SOFTMAX){
forward_softmax_layer_gpu(l, state);
+ } else if(l.type == NORMALIZATION){
+ forward_normalization_layer_gpu(l, state);
} else if(l.type == MAXPOOL){
forward_maxpool_layer_gpu(l, state);
} else if(l.type == DROPOUT){
@@ -80,6 +83,8 @@
backward_dropout_layer_gpu(l, state);
} else if(l.type == DETECTION){
backward_detection_layer_gpu(l, state);
+ } else if(l.type == NORMALIZATION){
+ backward_normalization_layer_gpu(l, state);
} else if(l.type == SOFTMAX){
if(i != 0) backward_softmax_layer_gpu(l, state);
} else if(l.type == CONNECTED){
@@ -136,20 +141,7 @@
{
layer l = net.layers[i];
cuda_pull_array(l.output_gpu, l.output, l.outputs*l.batch);
- if(l.type == CONVOLUTIONAL){
- return l.output;
- } else if(l.type == DECONVOLUTIONAL){
- return l.output;
- } else if(l.type == CONNECTED){
- return l.output;
- } else if(l.type == DETECTION){
- return l.output;
- } else if(l.type == MAXPOOL){
- return l.output;
- } else if(l.type == SOFTMAX){
- return l.output;
- }
- return 0;
+ return l.output;
}
float *get_network_output_gpu(network net)
--
Gitblit v1.10.0