From f047cfff99e00e28c02eb59b6d32386c122f9af6 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 08 Mar 2015 18:31:12 +0000
Subject: [PATCH] renamed sigmoid to logistic
---
src/network_kernels.cu | 33 +++++++++++++++++++++++++++++++--
1 files changed, 31 insertions(+), 2 deletions(-)
diff --git a/src/network_kernels.cu b/src/network_kernels.cu
index 1f3f2e0..928c7f9 100644
--- a/src/network_kernels.cu
+++ b/src/network_kernels.cu
@@ -9,6 +9,7 @@
#include "crop_layer.h"
#include "connected_layer.h"
+#include "detection_layer.h"
#include "convolutional_layer.h"
#include "deconvolutional_layer.h"
#include "maxpool_layer.h"
@@ -21,6 +22,7 @@
extern "C" float * get_network_output_gpu_layer(network net, int i);
extern "C" float * get_network_delta_gpu_layer(network net, int i);
+float *get_network_output_gpu(network net);
void forward_network_gpu(network net, float * input, float * truth, int train)
{
@@ -46,6 +48,11 @@
forward_connected_layer_gpu(layer, input);
input = layer.output_gpu;
}
+ else if(net.types[i] == DETECTION){
+ detection_layer layer = *(detection_layer *)net.layers[i];
+ forward_detection_layer_gpu(layer, input, truth);
+ input = layer.output_gpu;
+ }
else if(net.types[i] == MAXPOOL){
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
forward_maxpool_layer_gpu(layer, input);
@@ -72,7 +79,7 @@
}
}
-void backward_network_gpu(network net, float * input)
+void backward_network_gpu(network net, float * input, float *truth)
{
int i;
float * prev_input;
@@ -102,6 +109,10 @@
connected_layer layer = *(connected_layer *)net.layers[i];
backward_connected_layer_gpu(layer, prev_input, prev_delta);
}
+ else if(net.types[i] == DETECTION){
+ detection_layer layer = *(detection_layer *)net.layers[i];
+ backward_detection_layer_gpu(layer, prev_input, prev_delta);
+ }
else if(net.types[i] == MAXPOOL){
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
backward_maxpool_layer_gpu(layer, prev_delta);
@@ -147,6 +158,10 @@
deconvolutional_layer layer = *(deconvolutional_layer *)net.layers[i];
return layer.output_gpu;
}
+ else if(net.types[i] == DETECTION){
+ detection_layer layer = *(detection_layer *)net.layers[i];
+ return layer.output_gpu;
+ }
else if(net.types[i] == CONNECTED){
connected_layer layer = *(connected_layer *)net.layers[i];
return layer.output_gpu;
@@ -175,6 +190,10 @@
convolutional_layer layer = *(convolutional_layer *)net.layers[i];
return layer.delta_gpu;
}
+ else if(net.types[i] == DETECTION){
+ detection_layer layer = *(detection_layer *)net.layers[i];
+ return layer.delta_gpu;
+ }
else if(net.types[i] == DECONVOLUTIONAL){
deconvolutional_layer layer = *(deconvolutional_layer *)net.layers[i];
return layer.delta_gpu;
@@ -214,11 +233,15 @@
forward_network_gpu(net, *net.input_gpu, *net.truth_gpu, 1);
//printf("forw %f\n", sec(clock() - time));
//time = clock();
- backward_network_gpu(net, *net.input_gpu);
+ backward_network_gpu(net, *net.input_gpu, *net.truth_gpu);
//printf("back %f\n", sec(clock() - time));
//time = clock();
update_network_gpu(net);
float error = get_network_cost(net);
+
+ //print_letters(y, 50);
+ //float *out = get_network_output_gpu(net);
+ //print_letters(out, 50);
//printf("updt %f\n", sec(clock() - time));
//time = clock();
return error;
@@ -239,6 +262,12 @@
cuda_pull_array(layer.output_gpu, layer.output, layer.outputs*layer.batch);
return layer.output;
}
+ else if(net.types[i] == DETECTION){
+ detection_layer layer = *(detection_layer *)net.layers[i];
+ int outputs = get_detection_layer_output_size(layer);
+ cuda_pull_array(layer.output_gpu, layer.output, outputs*layer.batch);
+ return layer.output;
+ }
else if(net.types[i] == MAXPOOL){
maxpool_layer layer = *(maxpool_layer *)net.layers[i];
return layer.output;
--
Gitblit v1.10.0