From a6c51e3b758aee7fd3a6f1d37daa8dcad4891e52 Mon Sep 17 00:00:00 2001
From: AlexeyAB <alexeyab84@gmail.com>
Date: Thu, 08 Mar 2018 22:42:58 +0000
Subject: [PATCH] Fixes for small objects. And fixes of densenet201_yolo.cfg, resnet50_yolo.cfg, resnet152_yolo.cfg
---
src/convolutional_layer.h | 56 ++++++++++++++++++++++++++++++++++++++++++++------------
1 files changed, 44 insertions(+), 12 deletions(-)
diff --git a/src/convolutional_layer.h b/src/convolutional_layer.h
index b42f5e9..6d1e517 100644
--- a/src/convolutional_layer.h
+++ b/src/convolutional_layer.h
@@ -1,21 +1,53 @@
#ifndef CONVOLUTIONAL_LAYER_H
#define CONVOLUTIONAL_LAYER_H
+#include "cuda.h"
#include "image.h"
+#include "activations.h"
+#include "layer.h"
+#include "network.h"
-typedef struct {
- int n;
- int stride;
- image *kernels;
- image *kernel_updates;
- image upsampled;
- image output;
-} convolutional_layer;
+typedef layer convolutional_layer;
-convolutional_layer make_convolutional_layer(int w, int h, int c, int n, int size, int stride);
-void run_convolutional_layer(const image input, const convolutional_layer layer);
-void backpropagate_layer(image input, convolutional_layer layer);
-void backpropagate_layer_convolve(image input, convolutional_layer layer);
+#ifdef GPU
+void forward_convolutional_layer_gpu(convolutional_layer layer, network_state state);
+void backward_convolutional_layer_gpu(convolutional_layer layer, network_state state);
+void update_convolutional_layer_gpu(convolutional_layer layer, int batch, float learning_rate, float momentum, float decay);
+
+void push_convolutional_layer(convolutional_layer layer);
+void pull_convolutional_layer(convolutional_layer layer);
+
+void add_bias_gpu(float *output, float *biases, int batch, int n, int size);
+void backward_bias_gpu(float *bias_updates, float *delta, int batch, int n, int size);
+#ifdef CUDNN
+void cudnn_convolutional_setup(layer *l, int cudnn_preference);
+void cuda_convert_f32_to_f16(float* input_f32, size_t size, float *output_f16);
+#endif
+#endif
+
+convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int padding, ACTIVATION activation, int batch_normalize, int binary, int xnor, int adam);
+void denormalize_convolutional_layer(convolutional_layer l);
+void resize_convolutional_layer(convolutional_layer *layer, int w, int h);
+void forward_convolutional_layer(const convolutional_layer layer, network_state state);
+void update_convolutional_layer(convolutional_layer layer, int batch, float learning_rate, float momentum, float decay);
+image *visualize_convolutional_layer(convolutional_layer layer, char *window, image *prev_weights);
+void binarize_weights(float *weights, int n, int size, float *binary);
+void swap_binary(convolutional_layer *l);
+void binarize_weights2(float *weights, int n, int size, char *binary, float *scales);
+
+void backward_convolutional_layer(convolutional_layer layer, network_state state);
+
+void add_bias(float *output, float *biases, int batch, int n, int size);
+void backward_bias(float *bias_updates, float *delta, int batch, int n, int size);
+
+image get_convolutional_image(convolutional_layer layer);
+image get_convolutional_delta(convolutional_layer layer);
+image get_convolutional_weight(convolutional_layer layer, int i);
+
+int convolutional_out_height(convolutional_layer layer);
+int convolutional_out_width(convolutional_layer layer);
+void rescale_weights(convolutional_layer l, float scale, float trans);
+void rgbgr_weights(convolutional_layer l);
#endif
--
Gitblit v1.10.0