From a723e1c62a27aeb39aaf7fcdeb3beb4e89fba32d Mon Sep 17 00:00:00 2001 From: Alexey <AlexeyAB@users.noreply.github.com> Date: Wed, 15 Aug 2018 20:52:09 +0000 Subject: [PATCH] Merge pull request #766 from HotChick91/AlexeyAB-mask --- src/convolutional_layer.h | 7 +++++-- 1 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/convolutional_layer.h b/src/convolutional_layer.h index b7953ee..8869a3c 100644 --- a/src/convolutional_layer.h +++ b/src/convolutional_layer.h @@ -20,11 +20,12 @@ void add_bias_gpu(float *output, float *biases, int batch, int n, int size); void backward_bias_gpu(float *bias_updates, float *delta, int batch, int n, int size); #ifdef CUDNN -void cudnn_convolutional_setup(layer *l); +void cudnn_convolutional_setup(layer *l, int cudnn_preference); +void cuda_convert_f32_to_f16(float* input_f32, size_t size, float *output_f16); #endif #endif -convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int pad, ACTIVATION activation, int batch_normalization, int binary, int xnor); +convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int padding, ACTIVATION activation, int batch_normalize, int binary, int xnor, int adam); void denormalize_convolutional_layer(convolutional_layer l); void resize_convolutional_layer(convolutional_layer *layer, int w, int h); void forward_convolutional_layer(const convolutional_layer layer, network_state state); @@ -34,6 +35,8 @@ void swap_binary(convolutional_layer *l); void binarize_weights2(float *weights, int n, int size, char *binary, float *scales); +void binary_align_weights(convolutional_layer *l); + void backward_convolutional_layer(convolutional_layer layer, network_state state); void add_bias(float *output, float *biases, int batch, int n, int size); -- Gitblit v1.10.0