From afb8b4f98bab53e85f2e5a3e7b3d11f0c9207ec7 Mon Sep 17 00:00:00 2001 From: Joseph Redmon <pjreddie@gmail.com> Date: Thu, 23 Jun 2016 04:46:32 +0000 Subject: [PATCH] CVPR prep --- src/convolutional_layer.h | 9 +++++++-- 1 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/convolutional_layer.h b/src/convolutional_layer.h index e22a396..972b765 100644 --- a/src/convolutional_layer.h +++ b/src/convolutional_layer.h @@ -2,7 +2,6 @@ #define CONVOLUTIONAL_LAYER_H #include "cuda.h" -#include "params.h" #include "image.h" #include "activations.h" #include "layer.h" @@ -20,14 +19,20 @@ void add_bias_gpu(float *output, float *biases, int batch, int n, int size); void backward_bias_gpu(float *bias_updates, float *delta, int batch, int n, int size); +#ifdef CUDNN +void cudnn_convolutional_setup(layer *l); +#endif #endif -convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int pad, ACTIVATION activation, int batch_normalization); +convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int pad, ACTIVATION activation, int batch_normalization, int binary, int xnor); void denormalize_convolutional_layer(convolutional_layer l); void resize_convolutional_layer(convolutional_layer *layer, int w, int h); void forward_convolutional_layer(const convolutional_layer layer, network_state state); void update_convolutional_layer(convolutional_layer layer, int batch, float learning_rate, float momentum, float decay); image *visualize_convolutional_layer(convolutional_layer layer, char *window, image *prev_filters); +void binarize_filters(float *filters, int n, int size, float *binary); +void swap_binary(convolutional_layer *l); +void binarize_filters2(float *filters, int n, int size, char *binary, float *scales); void backward_convolutional_layer(convolutional_layer layer, network_state state); -- Gitblit v1.10.0