From 158bb1bee9951875dbe3474d84c6663431e18301 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Tue, 21 Oct 2014 21:49:18 +0000
Subject: [PATCH] softmax on gpu
---
src/cnn.c | 34 ++++++++++++++--------------------
1 files changed, 14 insertions(+), 20 deletions(-)
diff --git a/src/cnn.c b/src/cnn.c
index 472aa03..7e90a80 100644
--- a/src/cnn.c
+++ b/src/cnn.c
@@ -278,29 +278,22 @@
free_data(train);
}
-void train_full()
+void train_assira()
{
- network net = parse_network_cfg("cfg/imagenet.cfg");
+ network net = parse_network_cfg("cfg/assira.cfg");
+ int imgs = 1000/net.batch+1;
+ //imgs = 1;
srand(2222222);
int i = 0;
char *labels[] = {"cat","dog"};
- float lr = .00001;
- float momentum = .9;
- float decay = 0.01;
while(1){
i += 1000;
- data train = load_data_image_pathfile_random("images/assira/train.list", 1000, labels, 2, 256, 256);
- //image im = float_to_image(256, 256, 3,train.X.vals[0]);
- //visualize_network(net);
- //cvWaitKey(100);
- //show_image(im, "input");
- //cvWaitKey(100);
- //scale_data_rows(train, 1./255.);
+ data train = load_data_image_pathfile_random("data/assira/train.list", imgs*net.batch, labels, 2, 256, 256);
normalize_data_rows(train);
clock_t start = clock(), end;
- float loss = train_network_sgd(net, train, 1000);
+ float loss = train_network_sgd_gpu(net, train, imgs);
end = clock();
- printf("%d: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", i, loss, (float)(end-start)/CLOCKS_PER_SEC, lr, momentum, decay);
+ printf("%d: %f, Time: %lf seconds\n", i, loss, (float)(end-start)/CLOCKS_PER_SEC );
free_data(train);
if(i%10000==0){
char buff[256];
@@ -369,8 +362,8 @@
clock_t start = clock(), end;
float loss = train_network_sgd(net, train, iters);
end = clock();
- visualize_network(net);
- cvWaitKey(5000);
+ //visualize_network(net);
+ //cvWaitKey(5000);
//float test_acc = network_accuracy(net, test);
//printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", count, loss, test_acc,(float)(end-start)/CLOCKS_PER_SEC, net.learning_rate, net.momentum, net.decay);
@@ -378,7 +371,7 @@
float test_acc = network_accuracy(net, test);
printf("%d: Loss: %f, Test Acc: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", count, loss, test_acc,(float)(end-start)/CLOCKS_PER_SEC, net.learning_rate, net.momentum, net.decay);
char buff[256];
- sprintf(buff, "/home/pjreddie/cifar/cifar2_%d.cfg", count);
+ sprintf(buff, "/home/pjreddie/cifar/cifar10_2_%d.cfg", count);
save_network(net, buff);
}else{
printf("%d: Loss: %f, Time: %lf seconds, LR: %f, Momentum: %f, Decay: %f\n", count, loss, (float)(end-start)/CLOCKS_PER_SEC, net.learning_rate, net.momentum, net.decay);
@@ -444,7 +437,7 @@
int iters = 10000/net.batch;
while(++count <= 2000){
clock_t start = clock(), end;
- float loss = train_network_sgd(net, train, iters);
+ float loss = train_network_sgd_gpu(net, train, iters);
end = clock();
float test_acc = network_accuracy(net, test);
//float test_acc = 0;
@@ -902,7 +895,8 @@
int main(int argc, char *argv[])
{
- //train_full();
+ //test_blas();
+ train_assira();
//test_distribution();
//feenableexcept(FE_DIVBYZERO | FE_INVALID | FE_OVERFLOW);
@@ -917,7 +911,7 @@
//test_nist_single();
//test_nist();
//train_nist();
- test_convolutional_layer();
+ //test_convolutional_layer();
//test_col2im();
//test_cifar10();
//train_cifar10();
--
Gitblit v1.10.0