From 08b757a0bf76efe8c76b453063a1bb19315bcaa6 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Wed, 14 Jan 2015 20:18:57 +0000
Subject: [PATCH] Stable, needs to be way faster
---
src/network.c | 141 +++++++++++++++++++++++++++++++++-------------
1 files changed, 101 insertions(+), 40 deletions(-)
diff --git a/src/network.c b/src/network.c
index 64a6032..641d782 100644
--- a/src/network.c
+++ b/src/network.c
@@ -15,6 +15,35 @@
#include "softmax_layer.h"
#include "dropout_layer.h"
+char *get_layer_string(LAYER_TYPE a)
+{
+ switch(a){
+ case CONVOLUTIONAL:
+ return "convolutional";
+ case CONNECTED:
+ return "connected";
+ case MAXPOOL:
+ return "maxpool";
+ case SOFTMAX:
+ return "softmax";
+ case NORMALIZATION:
+ return "normalization";
+ case DROPOUT:
+ return "dropout";
+ case FREEWEIGHT:
+ return "freeweight";
+ case CROP:
+ return "crop";
+ case COST:
+ return "cost";
+ default:
+ break;
+ }
+ return "none";
+}
+
+
+
network make_network(int n, int batch)
{
network net;
@@ -74,6 +103,7 @@
if(!train) continue;
dropout_layer layer = *(dropout_layer *)net.layers[i];
forward_dropout_layer(layer, input);
+ input = layer.output;
}
else if(net.types[i] == FREEWEIGHT){
if(!train) continue;
@@ -102,6 +132,7 @@
}
else if(net.types[i] == CONNECTED){
connected_layer layer = *(connected_layer *)net.layers[i];
+ //secret_update_connected_layer((connected_layer *)net.layers[i]);
update_connected_layer(layer);
}
}
@@ -119,12 +150,16 @@
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.output;
} else if(net.types[i] == DROPOUT){
- return get_network_output_layer(net, i-1);
+ dropout_layer layer = *(dropout_layer *)net.layers[i];
+ return layer.output;
} else if(net.types[i] == FREEWEIGHT){
return get_network_output_layer(net, i-1);
} else if(net.types[i] == CONNECTED){
connected_layer layer = *(connected_layer *)net.layers[i];
return layer.output;
+ } else if(net.types[i] == CROP){
+ crop_layer layer = *(crop_layer *)net.layers[i];
+ return layer.output;
} else if(net.types[i] == NORMALIZATION){
normalization_layer layer = *(normalization_layer *)net.layers[i];
return layer.output;
@@ -150,6 +185,7 @@
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.delta;
} else if(net.types[i] == DROPOUT){
+ if(i == 0) return 0;
return get_network_delta_layer(net, i-1);
} else if(net.types[i] == FREEWEIGHT){
return get_network_delta_layer(net, i-1);
@@ -242,17 +278,15 @@
}
}
-
-
-
float train_network_datum(network net, float *x, float *y)
{
+ #ifdef GPU
+ if(gpu_index >= 0) return train_network_datum_gpu(net, x, y);
+ #endif
forward_network(net, x, y, 1);
- //int class = get_predicted_class_network(net);
backward_network(net, x);
float error = get_network_cost(net);
update_network(net);
- //return (y[class]?1:0);
return error;
}
@@ -274,6 +308,25 @@
return (float)sum/(n*batch);
}
+float train_network(network net, data d)
+{
+ int batch = net.batch;
+ int n = d.X.rows / batch;
+ float *X = calloc(batch*d.X.cols, sizeof(float));
+ float *y = calloc(batch*d.y.cols, sizeof(float));
+
+ int i;
+ float sum = 0;
+ for(i = 0; i < n; ++i){
+ get_next_batch(d, batch, i*batch, X, y);
+ float err = train_network_datum(net, X, y);
+ sum += err;
+ }
+ free(X);
+ free(y);
+ return (float)sum/(n*batch);
+}
+
float train_network_batch(network net, data d, int n)
{
int i,j;
@@ -293,40 +346,6 @@
return (float)sum/(n*batch);
}
-float train_network_data_cpu(network net, data d, int n)
-{
- int batch = net.batch;
- float *X = calloc(batch*d.X.cols, sizeof(float));
- float *y = calloc(batch*d.y.cols, sizeof(float));
-
- int i;
- float sum = 0;
- for(i = 0; i < n; ++i){
- get_next_batch(d, batch, i*batch, X, y);
- float err = train_network_datum(net, X, y);
- sum += err;
- }
- free(X);
- free(y);
- return (float)sum/(n*batch);
-}
-
-void train_network(network net, data d)
-{
- int i;
- int correct = 0;
- for(i = 0; i < d.X.rows; ++i){
- correct += train_network_datum(net, d.X.vals[i], d.y.vals[i]);
- if(i%100 == 0){
- visualize_network(net);
- cvWaitKey(10);
- }
- }
- visualize_network(net);
- cvWaitKey(100);
- fprintf(stderr, "Accuracy: %f\n", (float)correct/d.X.rows);
-}
-
void set_learning_network(network *net, float rate, float momentum, float decay)
{
int i;
@@ -382,6 +401,10 @@
cost_layer *layer = (cost_layer *)net->layers[i];
layer->batch = b;
}
+ else if(net->types[i] == CROP){
+ crop_layer *layer = (crop_layer *)net->layers[i];
+ layer->batch = b;
+ }
}
}
@@ -402,6 +425,9 @@
} else if(net.types[i] == DROPOUT){
dropout_layer layer = *(dropout_layer *) net.layers[i];
return layer.inputs;
+ } else if(net.types[i] == CROP){
+ crop_layer layer = *(crop_layer *) net.layers[i];
+ return layer.c*layer.h*layer.w;
}
else if(net.types[i] == FREEWEIGHT){
freeweight_layer layer = *(freeweight_layer *) net.layers[i];
@@ -411,6 +437,7 @@
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.inputs;
}
+ printf("Can't find input size\n");
return 0;
}
@@ -426,6 +453,10 @@
image output = get_maxpool_image(layer);
return output.h*output.w*output.c;
}
+ else if(net.types[i] == CROP){
+ crop_layer layer = *(crop_layer *) net.layers[i];
+ return layer.c*layer.crop_height*layer.crop_width;
+ }
else if(net.types[i] == CONNECTED){
connected_layer layer = *(connected_layer *)net.layers[i];
return layer.outputs;
@@ -442,6 +473,7 @@
softmax_layer layer = *(softmax_layer *)net.layers[i];
return layer.inputs;
}
+ printf("Can't find output size\n");
return 0;
}
@@ -549,6 +581,10 @@
float *network_predict(network net, float *input)
{
+ #ifdef GPU
+ if(gpu_index >= 0) return network_predict_gpu(net, input);
+ #endif
+
forward_network(net, input, 0, 0);
float *out = get_network_output(net);
return out;
@@ -646,6 +682,31 @@
}
}
+void compare_networks(network n1, network n2, data test)
+{
+ matrix g1 = network_predict_data(n1, test);
+ matrix g2 = network_predict_data(n2, test);
+ int i;
+ int a,b,c,d;
+ a = b = c = d = 0;
+ for(i = 0; i < g1.rows; ++i){
+ int truth = max_index(test.y.vals[i], test.y.cols);
+ int p1 = max_index(g1.vals[i], g1.cols);
+ int p2 = max_index(g2.vals[i], g2.cols);
+ if(p1 == truth){
+ if(p2 == truth) ++d;
+ else ++c;
+ }else{
+ if(p2 == truth) ++b;
+ else ++a;
+ }
+ }
+ printf("%5d %5d\n%5d %5d\n", a, b, c, d);
+ float num = pow((abs(b - c) - 1.), 2.);
+ float den = b + c;
+ printf("%f\n", num/den);
+}
+
float network_accuracy(network net, data d)
{
matrix guess = network_predict_data(net, d);
--
Gitblit v1.10.0