From 729ce43e6ec45cfdb58e06e227428a0f81c5de0f Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 10 Jun 2016 00:20:31 +0000
Subject: [PATCH] stuff

---
 src/network.c                |    2 +-
 src/tag.c                    |    2 +-
 src/activations.h            |   13 ++++++++++++-
 src/activation_kernels.cu    |   17 +++++++++++++++++
 src/classifier.c             |   10 ++++++++--
 src/activations.c            |    7 +++++++
 src/convolutional_kernels.cu |    7 ++++++-
 7 files changed, 52 insertions(+), 6 deletions(-)

diff --git a/src/activation_kernels.cu b/src/activation_kernels.cu
index 3dc3af0..362d5d7 100644
--- a/src/activation_kernels.cu
+++ b/src/activation_kernels.cu
@@ -7,6 +7,13 @@
 #include "cuda.h"
 }
 
+
+__device__ float hardtan_activate_kernel(float x)
+{
+    if (x < -1) return -1;
+    if (x > 1) return 1;
+    return x;
+}
 __device__ float linear_activate_kernel(float x){return x;}
 __device__ float logistic_activate_kernel(float x){return 1./(1. + exp(-x));}
 __device__ float loggy_activate_kernel(float x){return 2./(1. + exp(-x)) - 1;}
@@ -29,6 +36,12 @@
     else return (x - n) + floor(x/2.);
 }
  
+
+__device__ float hardtan_gradient_kernel(float x)
+{
+    if (x > -1 && x < 1) return 1;
+    return 0;
+}
 __device__ float linear_gradient_kernel(float x){return 1;}
 __device__ float logistic_gradient_kernel(float x){return (1-x)*x;}
 __device__ float loggy_gradient_kernel(float x)
@@ -74,6 +87,8 @@
             return plse_activate_kernel(x);
         case STAIR:
             return stair_activate_kernel(x);
+        case HARDTAN:
+            return hardtan_activate_kernel(x);
     }
     return 0;
 }
@@ -103,6 +118,8 @@
             return plse_gradient_kernel(x);
         case STAIR:
             return stair_gradient_kernel(x);
+        case HARDTAN:
+            return hardtan_gradient_kernel(x);
     }
     return 0;
 }
diff --git a/src/activations.c b/src/activations.c
index 6b98e1c..6ab4963 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -30,6 +30,8 @@
             return "leaky";
         case STAIR:
             return "stair";
+        case HARDTAN:
+            return "hardtan";
         default:
             break;
     }
@@ -44,6 +46,7 @@
     if (strcmp(s, "elu")==0) return ELU;
     if (strcmp(s, "relie")==0) return RELIE;
     if (strcmp(s, "plse")==0) return PLSE;
+    if (strcmp(s, "hardtan")==0) return HARDTAN;
     if (strcmp(s, "linear")==0) return LINEAR;
     if (strcmp(s, "ramp")==0) return RAMP;
     if (strcmp(s, "leaky")==0) return LEAKY;
@@ -78,6 +81,8 @@
             return plse_activate(x);
         case STAIR:
             return stair_activate(x);
+        case HARDTAN:
+            return hardtan_activate(x);
     }
     return 0;
 }
@@ -115,6 +120,8 @@
             return plse_gradient(x);
         case STAIR:
             return stair_gradient(x);
+        case HARDTAN:
+            return hardtan_gradient(x);
     }
     return 0;
 }
diff --git a/src/activations.h b/src/activations.h
index 05f7bca..fed2908 100644
--- a/src/activations.h
+++ b/src/activations.h
@@ -4,7 +4,7 @@
 #include "math.h"
 
 typedef enum{
-    LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY, STAIR
+    LOGISTIC, RELU, RELIE, LINEAR, RAMP, TANH, PLSE, LEAKY, ELU, LOGGY, STAIR, HARDTAN
 }ACTIVATION;
 
 ACTIVATION get_activation(char *s);
@@ -25,6 +25,12 @@
     if (n%2 == 0) return floor(x/2.);
     else return (x - n) + floor(x/2.);
 }
+static inline float hardtan_activate(float x)
+{
+    if (x < -1) return -1;
+    if (x > 1) return 1;
+    return x;
+}
 static inline float linear_activate(float x){return x;}
 static inline float logistic_activate(float x){return 1./(1. + exp(-x));}
 static inline float loggy_activate(float x){return 2./(1. + exp(-x)) - 1;}
@@ -41,6 +47,11 @@
     return .125*x + .5;
 }
 
+static inline float hardtan_gradient(float x)
+{
+    if (x > -1 && x < 1) return 1;
+    return 0;
+}
 static inline float linear_gradient(float x){return 1;}
 static inline float logistic_gradient(float x){return (1-x)*x;}
 static inline float loggy_gradient(float x)
diff --git a/src/classifier.c b/src/classifier.c
index 5104608..24b28b5 100644
--- a/src/classifier.c
+++ b/src/classifier.c
@@ -477,6 +477,7 @@
     int *indexes = calloc(top, sizeof(int));
     char buff[256];
     char *input = buff;
+    int size = net.w;
     while(1){
         if(filename){
             strncpy(input, filename, 256);
@@ -487,8 +488,12 @@
             if(!input) return;
             strtok(input, "\n");
         }
-        image im = load_image_color(input, net.w, net.h);
-        float *X = im.data;
+        image im = load_image_color(input, 0, 0);
+        image r = resize_min(im, size);
+        resize_network(&net, r.w, r.h);
+        printf("%d %d\n", r.w, r.h);
+
+        float *X = r.data;
         time=clock();
         float *predictions = network_predict(net, X);
         top_predictions(net, top, indexes);
@@ -497,6 +502,7 @@
             int index = indexes[i];
             printf("%s: %f\n", names[index], predictions[index]);
         }
+        if(r.data != im.data) free_image(r);
         free_image(im);
         if (filename) break;
     }
diff --git a/src/convolutional_kernels.cu b/src/convolutional_kernels.cu
index cb50561..1de9dc0 100644
--- a/src/convolutional_kernels.cu
+++ b/src/convolutional_kernels.cu
@@ -142,6 +142,7 @@
     if(l.batch_normalize){
         backward_batchnorm_layer_gpu(l, state);
     }
+    float *original_input = state.input;
 
     if(l.xnor) state.input = l.binary_input_gpu;
 #ifdef CUDNN
@@ -176,6 +177,7 @@
                 l.dsrcTensorDesc,
                 state.delta);
         if(l.binary || l.xnor) swap_binary(&l);
+        if(l.xnor) gradient_array_ongpu(original_input, l.batch*l.c*l.h*l.w, HARDTAN, state.delta);
     }
 
 #else
@@ -197,7 +199,10 @@
             gemm_ongpu(1,0,n,k,m,1,a,n,b + i*k*m,k,0,c,k);
 
             col2im_ongpu(state.workspace, l.c,  l.h,  l.w,  l.size,  l.stride, l.pad, state.delta + i*l.c*l.h*l.w);
-            if(l.binary || l.xnor) swap_binary(&l);
+            if(l.binary || l.xnor) {
+                swap_binary(&l);
+            }
+            if(l.xnor) gradient_array_ongpu(original_input + i*l.c*l.h*l.w, l.c*l.h*l.w, HARDTAN, state.delta + i*l.c*l.h*l.w);
         }
     }
 #endif
diff --git a/src/network.c b/src/network.c
index b617f7e..2960d67 100644
--- a/src/network.c
+++ b/src/network.c
@@ -434,7 +434,7 @@
         net->workspace = cuda_make_array(0, (workspace_size-1)/sizeof(float)+1);
 #else
         free(net->workspace);
-        net->workspace = calloc(1, (workspace_size-1)/sizeof(float)+1);
+        net->workspace = calloc(1, workspace_size);
 #endif
     //fprintf(stderr, " Done!\n");
     return 0;
diff --git a/src/tag.c b/src/tag.c
index f97621c..e3e1707 100644
--- a/src/tag.c
+++ b/src/tag.c
@@ -125,7 +125,7 @@
             int index = indexes[i];
             printf("%.1f%%: %s\n", predictions[index]*100, names[index]);
         }
-        free_image(r);
+        if(r.data != im.data) free_image(r);
         free_image(im);
         if (filename) break;
     }

--
Gitblit v1.10.0