From d7286c273211ffeb1f56594f863d1ee9922be6d4 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Thu, 07 Nov 2013 00:09:41 +0000
Subject: [PATCH] Loading may or may not work. But probably.

---
 src/connected_layer.c |   34 +++++++++++++++++-----------------
 1 files changed, 17 insertions(+), 17 deletions(-)

diff --git a/src/connected_layer.c b/src/connected_layer.c
index 11143b9..9fafc38 100644
--- a/src/connected_layer.c
+++ b/src/connected_layer.c
@@ -4,34 +4,34 @@
 #include <stdlib.h>
 #include <string.h>
 
-connected_layer make_connected_layer(int inputs, int outputs, ACTIVATOR_TYPE activator)
+connected_layer *make_connected_layer(int inputs, int outputs, ACTIVATION activator)
 {
     int i;
-    connected_layer layer;
-    layer.inputs = inputs;
-    layer.outputs = outputs;
+    connected_layer *layer = calloc(1, sizeof(connected_layer));
+    layer->inputs = inputs;
+    layer->outputs = outputs;
 
-    layer.output = calloc(outputs, sizeof(double*));
+    layer->output = calloc(outputs, sizeof(double*));
 
-    layer.weight_updates = calloc(inputs*outputs, sizeof(double));
-    layer.weights = calloc(inputs*outputs, sizeof(double));
+    layer->weight_updates = calloc(inputs*outputs, sizeof(double));
+    layer->weights = calloc(inputs*outputs, sizeof(double));
     for(i = 0; i < inputs*outputs; ++i)
-        layer.weights[i] = .5 - (double)rand()/RAND_MAX;
+        layer->weights[i] = .5 - (double)rand()/RAND_MAX;
 
-    layer.bias_updates = calloc(outputs, sizeof(double));
-    layer.biases = calloc(outputs, sizeof(double));
+    layer->bias_updates = calloc(outputs, sizeof(double));
+    layer->biases = calloc(outputs, sizeof(double));
     for(i = 0; i < outputs; ++i)
-        layer.biases[i] = (double)rand()/RAND_MAX;
+        layer->biases[i] = (double)rand()/RAND_MAX;
 
     if(activator == SIGMOID){
-        layer.activation = sigmoid_activation;
-        layer.gradient = sigmoid_gradient;
+        layer->activation = sigmoid_activation;
+        layer->gradient = sigmoid_gradient;
     }else if(activator == RELU){
-        layer.activation = relu_activation;
-        layer.gradient = relu_gradient;
+        layer->activation = relu_activation;
+        layer->gradient = relu_gradient;
     }else if(activator == IDENTITY){
-        layer.activation = identity_activation;
-        layer.gradient = identity_gradient;
+        layer->activation = identity_activation;
+        layer->gradient = identity_gradient;
     }
 
     return layer;

--
Gitblit v1.10.0