From b4b729a15e577c68f64e0ac69fb299de6f5f706c Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Thu, 17 Apr 2014 16:58:24 +0000
Subject: [PATCH] Merge branch 'master' of pjreddie.com:jnet

---
 src/network.c |  153 +++++++++++++++++++++++++++++++++++++++++---------
 1 files changed, 125 insertions(+), 28 deletions(-)

diff --git a/src/network.c b/src/network.c
index b2fc922..7d4b1fa 100644
--- a/src/network.c
+++ b/src/network.c
@@ -8,12 +8,14 @@
 #include "convolutional_layer.h"
 //#include "old_conv.h"
 #include "maxpool_layer.h"
+#include "normalization_layer.h"
 #include "softmax_layer.h"
 
-network make_network(int n)
+network make_network(int n, int batch)
 {
     network net;
     net.n = n;
+    net.batch = batch;
     net.layers = calloc(net.n, sizeof(void *));
     net.types = calloc(net.n, sizeof(LAYER_TYPE));
     net.outputs = 0;
@@ -25,10 +27,11 @@
 {
     int i;
     fprintf(fp, "[convolutional]\n");
-    if(first) fprintf(fp,   "height=%d\n"
+    if(first) fprintf(fp,   "batch=%d\n"
+                            "height=%d\n"
                             "width=%d\n"
                             "channels=%d\n",
-                            l->h, l->w, l->c);
+                            l->batch,l->h, l->w, l->c);
     fprintf(fp, "filters=%d\n"
                 "size=%d\n"
                 "stride=%d\n"
@@ -38,17 +41,28 @@
     fprintf(fp, "data=");
     for(i = 0; i < l->n; ++i) fprintf(fp, "%g,", l->biases[i]);
     for(i = 0; i < l->n*l->c*l->size*l->size; ++i) fprintf(fp, "%g,", l->filters[i]);
+    /*
+    int j,k;
+    for(i = 0; i < l->n; ++i) fprintf(fp, "%g,", l->biases[i]);
+    for(i = 0; i < l->n; ++i){
+        for(j = l->c-1; j >= 0; --j){
+            for(k = 0; k < l->size*l->size; ++k){
+                fprintf(fp, "%g,", l->filters[i*(l->c*l->size*l->size)+j*l->size*l->size+k]);
+            }
+        }
+    }
+    */
     fprintf(fp, "\n\n");
 }
 void print_connected_cfg(FILE *fp, connected_layer *l, int first)
 {
     int i;
     fprintf(fp, "[connected]\n");
-    if(first) fprintf(fp, "input=%d\n", l->inputs);
+    if(first) fprintf(fp, "batch=%d\ninput=%d\n", l->batch, l->inputs);
     fprintf(fp, "output=%d\n"
-                "activation=%s\n",
-                l->outputs,
-                get_activation_string(l->activation));
+            "activation=%s\n",
+            l->outputs,
+            get_activation_string(l->activation));
     fprintf(fp, "data=");
     for(i = 0; i < l->outputs; ++i) fprintf(fp, "%g,", l->biases[i]);
     for(i = 0; i < l->inputs*l->outputs; ++i) fprintf(fp, "%g,", l->weights[i]);
@@ -58,17 +72,32 @@
 void print_maxpool_cfg(FILE *fp, maxpool_layer *l, int first)
 {
     fprintf(fp, "[maxpool]\n");
-    if(first) fprintf(fp,   "height=%d\n"
-                            "width=%d\n"
-                            "channels=%d\n",
-                            l->h, l->w, l->c);
+    if(first) fprintf(fp,   "batch=%d\n"
+            "height=%d\n"
+            "width=%d\n"
+            "channels=%d\n",
+            l->batch,l->h, l->w, l->c);
     fprintf(fp, "stride=%d\n\n", l->stride);
 }
 
+void print_normalization_cfg(FILE *fp, normalization_layer *l, int first)
+{
+    fprintf(fp, "[localresponsenormalization]\n");
+    if(first) fprintf(fp,   "batch=%d\n"
+            "height=%d\n"
+            "width=%d\n"
+            "channels=%d\n",
+            l->batch,l->h, l->w, l->c);
+    fprintf(fp, "size=%d\n"
+                "alpha=%g\n"
+                "beta=%g\n"
+                "kappa=%g\n\n", l->size, l->alpha, l->beta, l->kappa);
+}
+
 void print_softmax_cfg(FILE *fp, softmax_layer *l, int first)
 {
     fprintf(fp, "[softmax]\n");
-    if(first) fprintf(fp, "input=%d\n", l->inputs);
+    if(first) fprintf(fp, "batch=%d\ninput=%d\n", l->batch, l->inputs);
     fprintf(fp, "\n");
 }
 
@@ -85,6 +114,8 @@
             print_connected_cfg(fp, (connected_layer *)net.layers[i], i==0);
         else if(net.types[i] == MAXPOOL)
             print_maxpool_cfg(fp, (maxpool_layer *)net.layers[i], i==0);
+        else if(net.types[i] == NORMALIZATION)
+            print_normalization_cfg(fp, (normalization_layer *)net.layers[i], i==0);
         else if(net.types[i] == SOFTMAX)
             print_softmax_cfg(fp, (softmax_layer *)net.layers[i], i==0);
     }
@@ -115,6 +146,11 @@
             forward_maxpool_layer(layer, input);
             input = layer.output;
         }
+        else if(net.types[i] == NORMALIZATION){
+            normalization_layer layer = *(normalization_layer *)net.layers[i];
+            forward_normalization_layer(layer, input);
+            input = layer.output;
+        }
     }
 }
 
@@ -132,6 +168,9 @@
         else if(net.types[i] == SOFTMAX){
             //maxpool_layer layer = *(maxpool_layer *)net.layers[i];
         }
+        else if(net.types[i] == NORMALIZATION){
+            //maxpool_layer layer = *(maxpool_layer *)net.layers[i];
+        }
         else if(net.types[i] == CONNECTED){
             connected_layer layer = *(connected_layer *)net.layers[i];
             update_connected_layer(layer, step, momentum, decay);
@@ -153,6 +192,9 @@
     } else if(net.types[i] == CONNECTED){
         connected_layer layer = *(connected_layer *)net.layers[i];
         return layer.output;
+    } else if(net.types[i] == NORMALIZATION){
+        normalization_layer layer = *(normalization_layer *)net.layers[i];
+        return layer.output;
     }
     return 0;
 }
@@ -191,11 +233,11 @@
     float *out = get_network_output(net);
     int i, k = get_network_output_size(net);
     for(i = 0; i < k; ++i){
-        printf("%f, ", out[i]);
+        //printf("%f, ", out[i]);
         delta[i] = truth[i] - out[i];
         sum += delta[i]*delta[i];
     }
-    printf("\n");
+    //printf("\n");
     return sum;
 }
 
@@ -230,6 +272,10 @@
             maxpool_layer layer = *(maxpool_layer *)net.layers[i];
             if(i != 0) backward_maxpool_layer(layer, prev_input, prev_delta);
         }
+        else if(net.types[i] == NORMALIZATION){
+            normalization_layer layer = *(normalization_layer *)net.layers[i];
+            if(i != 0) backward_normalization_layer(layer, prev_input, prev_delta);
+        }
         else if(net.types[i] == SOFTMAX){
             softmax_layer layer = *(softmax_layer *)net.layers[i];
             if(i != 0) backward_softmax_layer(layer, prev_input, prev_delta);
@@ -258,19 +304,26 @@
     int i;
     float error = 0;
     int correct = 0;
+    int pos = 0;
     for(i = 0; i < n; ++i){
         int index = rand()%d.X.rows;
-        error += train_network_datum(net, d.X.vals[index], d.y.vals[index], step, momentum, decay);
+        float err = train_network_datum(net, d.X.vals[index], d.y.vals[index], step, momentum, decay);
         float *y = d.y.vals[index];
         int class = get_predicted_class_network(net);
         correct += (y[class]?1:0);
+        if(y[1]){
+            error += err;
+            ++pos;
+        }
+
+
         //printf("%d %f %f\n", i,net.output[0], d.y.vals[index][0]);
         //if((i+1)%10 == 0){
         //    printf("%d: %f\n", (i+1), (float)correct/(i+1));
         //}
     }
-    printf("Accuracy: %f\n",(float) correct/n);
-    return error/n;
+    //printf("Accuracy: %f\n",(float) correct/n);
+    return error/pos;
 }
 float train_network_batch(network net, data d, int n, float step, float momentum,float decay)
 {
@@ -304,7 +357,7 @@
     }
     visualize_network(net);
     cvWaitKey(100);
-    printf("Accuracy: %f\n", (float)correct/d.X.rows);
+    fprintf(stderr, "Accuracy: %f\n", (float)correct/d.X.rows);
 }
 
 int get_network_output_size_layer(network net, int i)
@@ -330,29 +383,63 @@
     return 0;
 }
 
-int reset_network_size(network net, int h, int w, int c)
+/*
+   int resize_network(network net, int h, int w, int c)
+   {
+   int i;
+   for (i = 0; i < net.n; ++i){
+   if(net.types[i] == CONVOLUTIONAL){
+   convolutional_layer *layer = (convolutional_layer *)net.layers[i];
+   layer->h = h;
+   layer->w = w;
+   layer->c = c;
+   image output = get_convolutional_image(*layer);
+   h = output.h;
+   w = output.w;
+   c = output.c;
+   }
+   else if(net.types[i] == MAXPOOL){
+   maxpool_layer *layer = (maxpool_layer *)net.layers[i];
+   layer->h = h;
+   layer->w = w;
+   layer->c = c;
+   image output = get_maxpool_image(*layer);
+   h = output.h;
+   w = output.w;
+   c = output.c;
+   }
+   }
+   return 0;
+   }
+ */
+
+int resize_network(network net, int h, int w, int c)
 {
     int i;
     for (i = 0; i < net.n; ++i){
         if(net.types[i] == CONVOLUTIONAL){
             convolutional_layer *layer = (convolutional_layer *)net.layers[i];
-            layer->h = h;
-            layer->w = w;
-            layer->c = c;
+            resize_convolutional_layer(layer, h, w, c);
             image output = get_convolutional_image(*layer);
             h = output.h;
             w = output.w;
             c = output.c;
-        }
-        else if(net.types[i] == MAXPOOL){
+        }else if(net.types[i] == MAXPOOL){
             maxpool_layer *layer = (maxpool_layer *)net.layers[i];
-            layer->h = h;
-            layer->w = w;
-            layer->c = c;
+            resize_maxpool_layer(layer, h, w, c);
             image output = get_maxpool_image(*layer);
             h = output.h;
             w = output.w;
             c = output.c;
+        }else if(net.types[i] == NORMALIZATION){
+            normalization_layer *layer = (normalization_layer *)net.layers[i];
+            resize_normalization_layer(layer, h, w, c);
+            image output = get_normalization_image(*layer);
+            h = output.h;
+            w = output.w;
+            c = output.c;
+        }else{
+            error("Cannot resize this type of layer");
         }
     }
     return 0;
@@ -374,6 +461,10 @@
         maxpool_layer layer = *(maxpool_layer *)net.layers[i];
         return get_maxpool_image(layer);
     }
+    else if(net.types[i] == NORMALIZATION){
+        normalization_layer layer = *(normalization_layer *)net.layers[i];
+        return get_normalization_image(layer);
+    }
     return make_empty_image(0,0,0);
 }
 
@@ -389,13 +480,18 @@
 
 void visualize_network(network net)
 {
+    image *prev = 0;
     int i;
     char buff[256];
     for(i = 0; i < net.n; ++i){
         sprintf(buff, "Layer %d", i);
         if(net.types[i] == CONVOLUTIONAL){
             convolutional_layer layer = *(convolutional_layer *)net.layers[i];
-            visualize_convolutional_layer(layer, buff);
+            prev = visualize_convolutional_layer(layer, buff, prev);
+        }
+        if(net.types[i] == NORMALIZATION){
+            normalization_layer layer = *(normalization_layer *)net.layers[i];
+            visualize_normalization_layer(layer, buff);
         }
     } 
 }
@@ -467,3 +563,4 @@
     return acc;
 }
 
+

--
Gitblit v1.10.0