From 809f924db2823b9e1eaf3efb9370380edc1f76ed Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 23 Jan 2015 00:38:24 +0000
Subject: [PATCH] CUDA so fast
---
src/cnn.c | 153 ++++++++++++++++++++++++++++++++++++++++----------
1 files changed, 122 insertions(+), 31 deletions(-)
diff --git a/src/cnn.c b/src/cnn.c
index 59948aa..c3b7b2c 100644
--- a/src/cnn.c
+++ b/src/cnn.c
@@ -7,7 +7,7 @@
#include "data.h"
#include "matrix.h"
#include "utils.h"
-#include "mini_blas.h"
+#include "blas.h"
#include "matrix.h"
#include "server.h"
@@ -71,11 +71,11 @@
}
-void train_detection_net()
+void train_detection_net(char *cfgfile)
{
float avg_loss = 1;
//network net = parse_network_cfg("/home/pjreddie/imagenet_backup/alexnet_1270.cfg");
- network net = parse_network_cfg("cfg/detnet.cfg");
+ network net = parse_network_cfg(cfgfile);
printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
int imgs = 1024;
srand(time(0));
@@ -84,11 +84,15 @@
list *plist = get_paths("/home/pjreddie/data/imagenet/horse.txt");
char **paths = (char **)list_to_array(plist);
printf("%d\n", plist->size);
+ data train, buffer;
+ pthread_t load_thread = load_data_detection_thread(imgs, paths, plist->size, 256, 256, 7, 7, 256, &buffer);
clock_t time;
while(1){
i += 1;
time=clock();
- data train = load_data_detection_jitter_random(imgs, paths, plist->size, 256, 256, 7, 7, 256);
+ pthread_join(load_thread, 0);
+ train = buffer;
+ load_thread = load_data_detection_thread(imgs, paths, plist->size, 256, 256, 7, 7, 256, &buffer);
//data train = load_data_detection_random(imgs, paths, plist->size, 224, 224, 7, 7, 256);
/*
@@ -101,8 +105,8 @@
time=clock();
float loss = train_network(net, train);
avg_loss = avg_loss*.9 + loss*.1;
- printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs*net.batch);
- if(i%10==0){
+ printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs);
+ if(i%100==0){
char buff[256];
sprintf(buff, "/home/pjreddie/imagenet_backup/detnet_%d.cfg", i);
save_network(net, buff);
@@ -111,6 +115,58 @@
}
}
+void validate_detection_net(char *cfgfile)
+{
+ network net = parse_network_cfg(cfgfile);
+ fprintf(stderr, "Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
+ srand(time(0));
+
+ list *plist = get_paths("/home/pjreddie/data/imagenet/detection.val");
+ char **paths = (char **)list_to_array(plist);
+
+ int m = plist->size;
+ int i = 0;
+ int splits = 50;
+ int num = (i+1)*m/splits - i*m/splits;
+
+ fprintf(stderr, "%d\n", m);
+ data val, buffer;
+ pthread_t load_thread = load_data_thread(paths, num, 0, 0, 245, 224, 224, &buffer);
+ clock_t time;
+ for(i = 1; i <= splits; ++i){
+ time=clock();
+ pthread_join(load_thread, 0);
+ val = buffer;
+ normalize_data_rows(val);
+
+ num = (i+1)*m/splits - i*m/splits;
+ char **part = paths+(i*m/splits);
+ if(i != splits) load_thread = load_data_thread(part, num, 0, 0, 245, 224, 224, &buffer);
+
+ fprintf(stderr, "Loaded: %lf seconds\n", sec(clock()-time));
+ matrix pred = network_predict_data(net, val);
+ int j, k;
+ for(j = 0; j < pred.rows; ++j){
+ for(k = 0; k < pred.cols; k += 5){
+ if (pred.vals[j][k] > .005){
+ int index = k/5;
+ int r = index/7;
+ int c = index%7;
+ float y = (32.*(r + pred.vals[j][k+1]))/224.;
+ float x = (32.*(c + pred.vals[j][k+2]))/224.;
+ float h = (256.*(pred.vals[j][k+3]))/224.;
+ float w = (256.*(pred.vals[j][k+4]))/224.;
+ printf("%d %f %f %f %f %f\n", (i-1)*m/splits + j + 1, pred.vals[j][k], y, x, h, w);
+ }
+ }
+ }
+
+ time=clock();
+ free_data(val);
+ }
+}
+/*
+
void train_imagenet_distributed(char *address)
{
float avg_loss = 1;
@@ -148,6 +204,7 @@
free_data(train);
}
}
+*/
void train_imagenet(char *cfgfile)
{
@@ -155,10 +212,10 @@
//network net = parse_network_cfg("/home/pjreddie/imagenet_backup/alexnet_1270.cfg");
srand(time(0));
network net = parse_network_cfg(cfgfile);
- set_learning_network(&net, net.learning_rate/10., .5, .0005);
+ set_learning_network(&net, net.learning_rate, net.momentum, net.decay);
printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
- int imgs = 1024;
- int i = 44700;
+ int imgs = 3072;
+ int i = net.seen/imgs;
char **labels = get_labels("/home/pjreddie/data/imagenet/cls.labels.list");
list *plist = get_paths("/data/imagenet/cls.train.list");
char **paths = (char **)list_to_array(plist);
@@ -169,21 +226,24 @@
data buffer;
load_thread = load_data_thread(paths, imgs, plist->size, labels, 1000, 256, 256, &buffer);
while(1){
- i += 1;
+ ++i;
time=clock();
pthread_join(load_thread, 0);
train = buffer;
- normalize_data_rows(train);
+ //normalize_data_rows(train);
+ //translate_data_rows(train, -128);
+ //scale_data_rows(train, 1./128);
load_thread = load_data_thread(paths, imgs, plist->size, labels, 1000, 256, 256, &buffer);
printf("Loaded: %lf seconds\n", sec(clock()-time));
time=clock();
float loss = train_network(net, train);
+ net.seen += imgs;
avg_loss = avg_loss*.9 + loss*.1;
- printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs);
+ printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), net.seen);
free_data(train);
if(i%100==0){
char buff[256];
- sprintf(buff, "/home/pjreddie/imagenet_backup/net_%d.cfg", i);
+ sprintf(buff, "/home/pjreddie/imagenet_backup/alexnet_%d.cfg", i);
save_network(net, buff);
}
}
@@ -215,7 +275,7 @@
pthread_join(load_thread, 0);
val = buffer;
- normalize_data_rows(val);
+ //normalize_data_rows(val);
num = (i+1)*m/splits - i*m/splits;
char **part = paths+(i*m/splits);
@@ -261,8 +321,10 @@
int i = 0;
char *filename = "data/test.jpg";
- image im = load_image_color(filename, 224, 224);
- z_normalize_image(im);
+ image im = load_image_color(filename, 256, 256);
+ //z_normalize_image(im);
+ translate_image(im, -128);
+ scale_image(im, 1/128.);
float *X = im.data;
forward_network(net, X, 0, 1);
for(i = 0; i < net.n; ++i){
@@ -348,9 +410,9 @@
if(count%10 == 0){
float test_acc = network_accuracy(net, test);
printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds\n", count, loss, test_acc,sec(clock()-time));
- char buff[256];
- sprintf(buff, "unikitty/cifar10_%d.cfg", count);
- save_network(net, buff);
+ //char buff[256];
+ //sprintf(buff, "unikitty/cifar10_%d.cfg", count);
+ //save_network(net, buff);
}else{
printf("%d: Loss: %f, Time: %lf seconds\n", count, loss, sec(clock()-time));
}
@@ -407,6 +469,7 @@
save_network(net, buff);
}
+/*
void train_nist_distributed(char *address)
{
srand(time(0));
@@ -428,6 +491,7 @@
printf("%d: Loss: %f, Time: %lf seconds\n", count, loss, (float)(end-start)/CLOCKS_PER_SEC);
}
}
+*/
void test_ensemble()
{
@@ -478,14 +542,33 @@
cvWaitKey(0);
}
-void test_gpu_net()
+void test_convolutional_layer()
{
+ network net = parse_network_cfg("cfg/nist_conv.cfg");
+ int size = get_network_input_size(net);
+ float *in = calloc(size, sizeof(float));
+ int i;
+ for(i = 0; i < size; ++i) in[i] = rand_normal();
+ float *in_gpu = cuda_make_array(in, size);
+ convolutional_layer layer = *(convolutional_layer *)net.layers[0];
+ int out_size = convolutional_out_height(layer)*convolutional_out_width(layer)*layer.batch;
+ cuda_compare(layer.output_gpu, layer.output, out_size, "nothing");
+ cuda_compare(layer.biases_gpu, layer.biases, layer.n, "biases");
+ cuda_compare(layer.filters_gpu, layer.filters, layer.n*layer.size*layer.size*layer.c, "filters");
+ bias_output(layer);
+ bias_output_gpu(layer);
+ cuda_compare(layer.output_gpu, layer.output, out_size, "biased output");
+}
+
+void test_correct_nist()
+{
+ network net = parse_network_cfg("cfg/nist_conv.cfg");
srand(222222);
- network net = parse_network_cfg("cfg/nist.cfg");
+ net = parse_network_cfg("cfg/nist_conv.cfg");
data train = load_categorical_data_csv("data/mnist/mnist_train.csv", 0, 10);
data test = load_categorical_data_csv("data/mnist/mnist_test.csv",0,10);
- translate_data_rows(train, -144);
- translate_data_rows(test, -144);
+ normalize_data_rows(train);
+ normalize_data_rows(test);
int count = 0;
int iters = 1000/net.batch;
@@ -496,11 +579,12 @@
float test_acc = network_accuracy(net, test);
printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", count, loss, test_acc,(float)(end-start)/CLOCKS_PER_SEC, net.learning_rate, net.momentum, net.decay);
}
+ save_network(net, "cfg/nist_gpu.cfg");
gpu_index = -1;
count = 0;
srand(222222);
- net = parse_network_cfg("cfg/nist.cfg");
+ net = parse_network_cfg("cfg/nist_conv.cfg");
while(++count <= 5){
clock_t start = clock(), end;
float loss = train_network_sgd(net, train, iters);
@@ -508,6 +592,7 @@
float test_acc = network_accuracy(net, test);
printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", count, loss, test_acc,(float)(end-start)/CLOCKS_PER_SEC, net.learning_rate, net.momentum, net.decay);
}
+ save_network(net, "cfg/nist_cpu.cfg");
}
void test_correct_alexnet()
@@ -519,11 +604,12 @@
clock_t time;
int count = 0;
network net;
+
+ srand(222222);
+ net = parse_network_cfg("cfg/net.cfg");
int imgs = net.batch;
count = 0;
- srand(222222);
- net = parse_network_cfg("cfg/net.cfg");
while(++count <= 5){
time=clock();
data train = load_data(paths, imgs, plist->size, labels, 1000, 256, 256);
@@ -552,6 +638,7 @@
}
}
+/*
void run_server()
{
srand(time(0));
@@ -572,6 +659,7 @@
printf("3\n");
printf("Transfered: %lf seconds\n", sec(clock()-time));
}
+*/
void del_arg(int argc, char **argv, int index)
{
@@ -605,6 +693,7 @@
int main(int argc, char **argv)
{
+ //test_convolutional_layer();
if(argc < 2){
fprintf(stderr, "usage: %s <function>\n", argv[0]);
return 0;
@@ -616,15 +705,15 @@
gpu_index = -1;
#else
if(gpu_index >= 0){
- cl_setup();
+ cudaSetDevice(gpu_index);
}
#endif
- if(0==strcmp(argv[1], "detection")) train_detection_net();
- else if(0==strcmp(argv[1], "cifar")) train_cifar10();
+ if(0==strcmp(argv[1], "cifar")) train_cifar10();
else if(0==strcmp(argv[1], "test_correct")) test_correct_alexnet();
+ else if(0==strcmp(argv[1], "test_correct_nist")) test_correct_nist();
else if(0==strcmp(argv[1], "test")) test_imagenet();
- else if(0==strcmp(argv[1], "server")) run_server();
+ //else if(0==strcmp(argv[1], "server")) run_server();
#ifdef GPU
else if(0==strcmp(argv[1], "test_gpu")) test_gpu_blas();
@@ -634,14 +723,16 @@
fprintf(stderr, "usage: %s <function> <filename>\n", argv[0]);
return 0;
}
+ else if(0==strcmp(argv[1], "detection")) train_detection_net(argv[2]);
else if(0==strcmp(argv[1], "nist")) train_nist(argv[2]);
else if(0==strcmp(argv[1], "train")) train_imagenet(argv[2]);
- else if(0==strcmp(argv[1], "client")) train_imagenet_distributed(argv[2]);
+ //else if(0==strcmp(argv[1], "client")) train_imagenet_distributed(argv[2]);
else if(0==strcmp(argv[1], "detect")) test_detection(argv[2]);
else if(0==strcmp(argv[1], "init")) test_init(argv[2]);
else if(0==strcmp(argv[1], "visualize")) test_visualize(argv[2]);
else if(0==strcmp(argv[1], "valid")) validate_imagenet(argv[2]);
else if(0==strcmp(argv[1], "testnist")) test_nist(argv[2]);
+ else if(0==strcmp(argv[1], "validetect")) validate_detection_net(argv[2]);
else if(argc < 4){
fprintf(stderr, "usage: %s <function> <filename> <filename>\n", argv[0]);
return 0;
--
Gitblit v1.10.0