From 0f7f2899b65343e56b0a1188f703d459d824d398 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Mon, 16 Nov 2015 03:51:26 +0000
Subject: [PATCH] Fix for cuda 7.5

---
 src/convolutional_layer.h |    7 +++++--
 1 files changed, 5 insertions(+), 2 deletions(-)

diff --git a/src/convolutional_layer.h b/src/convolutional_layer.h
index 3954f8a..70a3d05 100644
--- a/src/convolutional_layer.h
+++ b/src/convolutional_layer.h
@@ -17,11 +17,12 @@
 void push_convolutional_layer(convolutional_layer layer);
 void pull_convolutional_layer(convolutional_layer layer);
 
-void bias_output_gpu(float *output, float *biases, int batch, int n, int size);
+void add_bias_gpu(float *output, float *biases, int batch, int n, int size);
 void backward_bias_gpu(float *bias_updates, float *delta, int batch, int n, int size);
 #endif
 
-convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int pad, ACTIVATION activation);
+convolutional_layer make_convolutional_layer(int batch, int h, int w, int c, int n, int size, int stride, int pad, ACTIVATION activation, int batch_normalization);
+void denormalize_convolutional_layer(convolutional_layer l);
 void resize_convolutional_layer(convolutional_layer *layer, int w, int h);
 void forward_convolutional_layer(const convolutional_layer layer, network_state state);
 void update_convolutional_layer(convolutional_layer layer, int batch, float learning_rate, float momentum, float decay);
@@ -38,6 +39,8 @@
 
 int convolutional_out_height(convolutional_layer layer);
 int convolutional_out_width(convolutional_layer layer);
+void rescale_filters(convolutional_layer l, float scale, float trans);
+void rgbgr_filters(convolutional_layer l);
 
 #endif
 

--
Gitblit v1.10.0