Joseph Redmon
2015-01-14 08b757a0bf76efe8c76b453063a1bb19315bcaa6
Stable, needs to be way faster
4 files modified
44 ■■■■ changed files
src/cnn.c 6 ●●●● patch | view | raw | blame | history
src/network.c 29 ●●●●● patch | view | raw | blame | history
src/network.h 1 ●●●● patch | view | raw | blame | history
src/network_gpu.c 8 ●●●● patch | view | raw | blame | history
src/cnn.c
@@ -105,7 +105,7 @@
        time=clock();
        float loss = train_network(net, train);
        avg_loss = avg_loss*.9 + loss*.1;
        printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs*net.batch);
        printf("%d: %f, %f avg, %lf seconds, %d images\n", i, loss, avg_loss, sec(clock()-time), i*imgs);
        if(i%100==0){
            char buff[256];
            sprintf(buff, "/home/pjreddie/imagenet_backup/detnet_%d.cfg", i);
@@ -213,7 +213,7 @@
    set_learning_network(&net, net.learning_rate, 0, net.decay);
    printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay);
    int imgs = 1024;
    int i = 77700;
    int i = 0;
    char **labels = get_labels("/home/pjreddie/data/imagenet/cls.labels.list");
    list *plist = get_paths("/data/imagenet/cls.train.list");
    char **paths = (char **)list_to_array(plist);
@@ -240,7 +240,7 @@
        free_data(train);
        if(i%100==0){
            char buff[256];
            sprintf(buff, "/home/pjreddie/imagenet_backup/net_%d.cfg", i);
            sprintf(buff, "/home/pjreddie/imagenet_backup/alexnet_%d.cfg", i);
            save_network(net, buff);
        }
    }
src/network.c
@@ -15,6 +15,35 @@
#include "softmax_layer.h"
#include "dropout_layer.h"
char *get_layer_string(LAYER_TYPE a)
{
    switch(a){
        case CONVOLUTIONAL:
            return "convolutional";
        case CONNECTED:
            return "connected";
        case MAXPOOL:
            return "maxpool";
        case SOFTMAX:
            return "softmax";
        case NORMALIZATION:
            return "normalization";
        case DROPOUT:
            return "dropout";
        case FREEWEIGHT:
            return "freeweight";
        case CROP:
            return "crop";
        case COST:
            return "cost";
        default:
            break;
    }
    return "none";
}
network make_network(int n, int batch)
{
    network net;
src/network.h
@@ -41,6 +41,7 @@
#endif
void compare_networks(network n1, network n2, data d);
char *get_layer_string(LAYER_TYPE a);
network make_network(int n, int batch);
void forward_network(network net, float *input, float *truth, int train);
src/network_gpu.c
@@ -24,7 +24,7 @@
{
    int i;
    for(i = 0; i < net.n; ++i){
        clock_t time = clock();
        //clock_t time = clock();
        if(net.types[i] == CONVOLUTIONAL){
            convolutional_layer layer = *(convolutional_layer *)net.layers[i];
            forward_convolutional_layer_gpu(layer, input);
@@ -61,7 +61,7 @@
            input = layer.output_cl;
        }
        check_error(cl);
        //printf("Forw %d %f\n", i, sec(clock() - time));
        //printf("Forward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time));
    }
}
@@ -71,7 +71,7 @@
    cl_mem prev_input;
    cl_mem prev_delta;
    for(i = net.n-1; i >= 0; --i){
        clock_t time = clock();
        //clock_t time = clock();
        if(i == 0){
            prev_input = input;
            prev_delta = 0;
@@ -104,7 +104,7 @@
            backward_softmax_layer_gpu(layer, prev_delta);
        }
        check_error(cl);
        //printf("Back %d %f\n", i, sec(clock() - time));
        //printf("Backward %d %s %f\n", i, get_layer_string(net.types[i]), sec(clock() - time));
    }
}