From 08b757a0bf76efe8c76b453063a1bb19315bcaa6 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Wed, 14 Jan 2015 20:18:57 +0000
Subject: [PATCH] Stable, needs to be way faster
---
src/network.c | 29 +++++++++++++++++++++++++++++
src/network_gpu.c | 8 ++++----
src/network.h | 1 +
src/cnn.c | 6 +++---
4 files changed, 37 insertions(+), 7 deletions(-)
diff --git a/src/cnn.c b/src/cnn.c
index e587a1b..10705fd 100644
--- a/src/cnn.c
+++ b/src/cnn.c
@@ -105,7 +105,7 @@
time=clock();
float loss = train_network(net, train);
avg_loss = avg_loss*.9 + loss*.1;
- printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs*net.batch);
+ printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs);
if(i%100==0){
char buff[256];
sprintf(buff, "/home/pjreddie/imagenet_backup/detnet_%d.cfg", i);
@@ -213,7 +213,7 @@
set_learning_network(&net, net.learning_rate, 0, net.decay);
printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
int imgs = 1024;
- int i = 77700;
+ int i = 0;
char **labels = get_labels("/home/pjreddie/data/imagenet/cls.labels.list");
list *plist = get_paths("/data/imagenet/cls.train.list");
char **paths = (char **)list_to_array(plist);
@@ -240,7 +240,7 @@
free_data(train);
if(i%100==0){
char buff[256];
- sprintf(buff, "/home/pjreddie/imagenet_backup/net_%d.cfg", i);
+ sprintf(buff, "/home/pjreddie/imagenet_backup/alexnet_%d.cfg", i);
save_network(net, buff);
}
}
diff --git a/src/network.c b/src/network.c
index 5c5ce9d..641d782 100644
--- a/src/network.c
+++ b/src/network.c
@@ -15,6 +15,35 @@
#include "softmax_layer.h"
#include "dropout_layer.h"
+char *get_layer_string(LAYER_TYPE a)
+{
+ switch(a){
+ case CONVOLUTIONAL:
+ return "convolutional";
+ case CONNECTED:
+ return "connected";
+ case MAXPOOL:
+ return "maxpool";
+ case SOFTMAX:
+ return "softmax";
+ case NORMALIZATION:
+ return "normalization";
+ case DROPOUT:
+ return "dropout";
+ case FREEWEIGHT:
+ return "freeweight";
+ case CROP:
+ return "crop";
+ case COST:
+ return "cost";
+ default:
+ break;
+ }
+ return "none";
+}
+
+
+
network make_network(int n, int batch)
{
network net;
diff --git a/src/network.h b/src/network.h
index 7a401bd..c6c7790 100644
--- a/src/network.h
+++ b/src/network.h
@@ -41,6 +41,7 @@
#endif
void compare_networks(network n1, network n2, data d);
+char *get_layer_string(LAYER_TYPE a);
network make_network(int n, int batch);
void forward_network(network net, float *input, float *truth, int train);
diff --git a/src/network_gpu.c b/src/network_gpu.c
index b53d534..c958056 100644
--- a/src/network_gpu.c
+++ b/src/network_gpu.c
@@ -24,7 +24,7 @@
{
int i;
for(i = 0; i < net.n; ++i){
- clock_t time = clock();
+ //clock_t time = clock();
if(net.types[i] == CONVOLUTIONAL){
convolutional_layer layer = *(convolutional_layer *)net.layers[i];
forward_convolutional_layer_gpu(layer, input);
@@ -61,7 +61,7 @@
input = layer.output_cl;
}
check_error(cl);
- //printf("Forw %d %f\n", i, sec(clock() - time));
+ //printf("Forward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time));
}
}
@@ -71,7 +71,7 @@
cl_mem prev_input;
cl_mem prev_delta;
for(i = net.n-1; i >= 0; --i){
- clock_t time = clock();
+ //clock_t time = clock();
if(i == 0){
prev_input = input;
prev_delta = 0;
@@ -104,7 +104,7 @@
backward_softmax_layer_gpu(layer, prev_delta);
}
check_error(cl);
- //printf("Back %d %f\n", i, sec(clock() - time));
+ //printf("Backward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time));
}
}
--
Gitblit v1.10.0