From 845ab7579685b6702c92c1088ec11e71bde51f3c Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 05 Aug 2016 22:27:07 +0000
Subject: [PATCH] some more stuff
---
src/classifier.c | 136 +++++++++++++++++++++++++++++++++++----------
1 files changed, 105 insertions(+), 31 deletions(-)
diff --git a/src/classifier.c b/src/classifier.c
index 7060c5e..608e3ab 100644
--- a/src/classifier.c
+++ b/src/classifier.c
@@ -3,6 +3,7 @@
#include "parser.h"
#include "option_list.h"
#include "blas.h"
+#include "assert.h"
#include "classifier.h"
#include <sys/time.h>
@@ -38,8 +39,11 @@
return options;
}
-void train_classifier(char *datacfg, char *cfgfile, char *weightfile)
+void train_classifier(char *datacfg, char *cfgfile, char *weightfile, int clear)
{
+ int nthreads = 2;
+ int i;
+
data_seed = time(0);
srand(time(0));
float avg_loss = -1;
@@ -49,8 +53,10 @@
if(weightfile){
load_weights(&net, weightfile);
}
+ if(clear) *net.seen = 0;
printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
- int imgs = net.batch;
+ int imgs = net.batch*net.subdivisions/nthreads;
+ assert(net.batch*net.subdivisions % nthreads == 0);
list *options = read_data_cfg(datacfg);
@@ -65,9 +71,10 @@
printf("%d\n", plist->size);
int N = plist->size;
clock_t time;
- pthread_t load_thread;
- data train;
- data buffer;
+
+ pthread_t *load_threads = calloc(nthreads, sizeof(pthread_t));
+ data *trains = calloc(nthreads, sizeof(data));
+ data *buffers = calloc(nthreads, sizeof(data));
load_args args = {0};
args.w = net.w;
@@ -82,41 +89,54 @@
args.n = imgs;
args.m = N;
args.labels = labels;
- args.d = &buffer;
args.type = CLASSIFICATION_DATA;
- load_thread = load_data_in_thread(args);
+ for(i = 0; i < nthreads; ++i){
+ args.d = buffers + i;
+ load_threads[i] = load_data_in_thread(args);
+ }
+
int epoch = (*net.seen)/N;
while(get_current_batch(net) < net.max_batches || net.max_batches == 0){
time=clock();
- pthread_join(load_thread, 0);
- train = buffer;
+ for(i = 0; i < nthreads; ++i){
+ pthread_join(load_threads[i], 0);
+ trains[i] = buffers[i];
+ }
+ data train = concat_datas(trains, nthreads);
- load_thread = load_data_in_thread(args);
+ for(i = 0; i < nthreads; ++i){
+ args.d = buffers + i;
+ load_threads[i] = load_data_in_thread(args);
+ }
+
printf("Loaded: %lf seconds\n", sec(clock()-time));
time=clock();
-/*
- int u;
- for(u = 0; u < net.batch; ++u){
- image im = float_to_image(net.w, net.h, 3, train.X.vals[u]);
- show_image(im, "loaded");
- cvWaitKey(0);
- }
- */
+ /*
+ int u;
+ for(u = 0; u < net.batch; ++u){
+ image im = float_to_image(net.w, net.h, 3, train.X.vals[u]);
+ show_image(im, "loaded");
+ cvWaitKey(0);
+ }
+ */
float loss = train_network(net, train);
if(avg_loss == -1) avg_loss = loss;
avg_loss = avg_loss*.9 + loss*.1;
printf("%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", get_current_batch(net), (float)(*net.seen)/N, loss, avg_loss, get_current_rate(net), sec(clock()-time), *net.seen);
free_data(train);
+ for(i = 0; i < nthreads; ++i){
+ free_data(trains[i]);
+ }
if(*net.seen/N > epoch){
epoch = *net.seen/N;
char buff[256];
sprintf(buff, "%s/%s_%d.weights",backup_directory,base, epoch);
save_weights(net, buff);
}
- if(*net.seen%100 == 0){
+ if(get_current_batch(net)%100 == 0){
char buff[256];
sprintf(buff, "%s/%s.backup",backup_directory,base);
save_weights(net, buff);
@@ -126,8 +146,14 @@
sprintf(buff, "%s/%s.weights", backup_directory, base);
save_weights(net, buff);
- pthread_join(load_thread, 0);
- free_data(buffer);
+ for(i = 0; i < nthreads; ++i){
+ pthread_join(load_threads[i], 0);
+ free_data(buffers[i]);
+ }
+ free(buffers);
+ free(trains);
+ free(load_threads);
+
free_network(net);
free_ptrs((void**)labels, classes);
free_ptrs((void**)paths, plist->size);
@@ -135,7 +161,7 @@
free(base);
}
-void validate_classifier(char *datacfg, char *filename, char *weightfile)
+void validate_classifier_crop(char *datacfg, char *filename, char *weightfile)
{
int i = 0;
network net = parse_network_cfg(filename);
@@ -337,10 +363,10 @@
{
int i, j;
network net = parse_network_cfg(filename);
- set_batch_network(&net, 1);
if(weightfile){
load_weights(&net, weightfile);
}
+ set_batch_network(&net, 1);
srand(time(0));
list *options = read_data_cfg(datacfg);
@@ -378,8 +404,8 @@
//cvWaitKey(0);
float *pred = network_predict(net, crop.data);
+ if(resized.data != im.data) free_image(resized);
free_image(im);
- free_image(resized);
free_image(crop);
top_k(pred, classes, topk, indexes);
@@ -441,7 +467,7 @@
flip_image(r);
p = network_predict(net, r.data);
axpy_cpu(classes, 1, p, 1, pred, 1);
- free_image(r);
+ if(r.data != im.data) free_image(r);
}
free_image(im);
top_k(pred, classes, topk, indexes);
@@ -476,6 +502,7 @@
int *indexes = calloc(top, sizeof(int));
char buff[256];
char *input = buff;
+ int size = net.w;
while(1){
if(filename){
strncpy(input, filename, 256);
@@ -486,8 +513,12 @@
if(!input) return;
strtok(input, "\n");
}
- image im = load_image_color(input, net.w, net.h);
- float *X = im.data;
+ image im = load_image_color(input, 0, 0);
+ image r = resize_min(im, size);
+ resize_network(&net, r.w, r.h);
+ printf("%d %d\n", r.w, r.h);
+
+ float *X = r.data;
time=clock();
float *predictions = network_predict(net, X);
top_predictions(net, top, indexes);
@@ -496,11 +527,52 @@
int index = indexes[i];
printf("%s: %f\n", names[index], predictions[index]);
}
+ if(r.data != im.data) free_image(r);
free_image(im);
if (filename) break;
}
}
+
+void label_classifier(char *datacfg, char *filename, char *weightfile)
+{
+ int i;
+ network net = parse_network_cfg(filename);
+ set_batch_network(&net, 1);
+ if(weightfile){
+ load_weights(&net, weightfile);
+ }
+ srand(time(0));
+
+ list *options = read_data_cfg(datacfg);
+
+ char *label_list = option_find_str(options, "names", "data/labels.list");
+ char *test_list = option_find_str(options, "test", "data/train.list");
+ int classes = option_find_int(options, "classes", 2);
+
+ char **labels = get_labels(label_list);
+ list *plist = get_paths(test_list);
+
+ char **paths = (char **)list_to_array(plist);
+ int m = plist->size;
+ free_list(plist);
+
+ for(i = 0; i < m; ++i){
+ image im = load_image_color(paths[i], 0, 0);
+ image resized = resize_min(im, net.w);
+ image crop = crop_image(resized, (resized.w - net.w)/2, (resized.h - net.h)/2, net.w, net.h);
+ float *pred = network_predict(net, crop.data);
+
+ if(resized.data != im.data) free_image(resized);
+ free_image(im);
+ free_image(crop);
+ int ind = max_index(pred, classes);
+
+ printf("%s\n", labels[ind]);
+ }
+}
+
+
void test_classifier(char *datacfg, char *cfgfile, char *weightfile, int target_layer)
{
int curr = 0;
@@ -649,6 +721,7 @@
}
int cam_index = find_int_arg(argc, argv, "-c", 0);
+ int clear = find_arg(argc, argv, "-clear");
char *data = argv[3];
char *cfg = argv[4];
char *weights = (argc > 5) ? argv[5] : 0;
@@ -656,13 +729,14 @@
char *layer_s = (argc > 7) ? argv[7]: 0;
int layer = layer_s ? atoi(layer_s) : -1;
if(0==strcmp(argv[2], "predict")) predict_classifier(data, cfg, weights, filename);
- else if(0==strcmp(argv[2], "train")) train_classifier(data, cfg, weights);
+ else if(0==strcmp(argv[2], "train")) train_classifier(data, cfg, weights, clear);
else if(0==strcmp(argv[2], "demo")) demo_classifier(data, cfg, weights, cam_index, filename);
else if(0==strcmp(argv[2], "test")) test_classifier(data, cfg, weights, layer);
- else if(0==strcmp(argv[2], "valid")) validate_classifier(data, cfg, weights);
- else if(0==strcmp(argv[2], "valid10")) validate_classifier_10(data, cfg, weights);
+ else if(0==strcmp(argv[2], "label")) label_classifier(data, cfg, weights);
+ else if(0==strcmp(argv[2], "valid")) validate_classifier_single(data, cfg, weights);
else if(0==strcmp(argv[2], "validmulti")) validate_classifier_multi(data, cfg, weights);
- else if(0==strcmp(argv[2], "validsingle")) validate_classifier_single(data, cfg, weights);
+ else if(0==strcmp(argv[2], "valid10")) validate_classifier_10(data, cfg, weights);
+ else if(0==strcmp(argv[2], "validcrop")) validate_classifier_crop(data, cfg, weights);
else if(0==strcmp(argv[2], "validfull")) validate_classifier_full(data, cfg, weights);
}
--
Gitblit v1.10.0