From 028696bf15efeca3acb3db8c42a96f7b9e0f55ff Mon Sep 17 00:00:00 2001
From: iovodov <b@ovdv.ru>
Date: Thu, 03 May 2018 13:33:46 +0000
Subject: [PATCH] Output improvements for detector results: When printing detector results, output was done in random order, obfuscating results for interpreting. Now: 1. Text output includes coordinates of rects in (left,right,top,bottom in pixels) along with label and score 2. Text output is sorted by rect lefts to simplify finding appropriate rects on image 3. If several class probs are > thresh for some detection, the most probable is written first and coordinates for others are not repeated 4. Rects are imprinted in image in order by their best class prob, so most probable rects are always on top and not overlayed by less probable ones 5. Most probable label for rect is always written first Also: 6. Message about low GPU memory include required amount
---
src/activations.c | 145 +++++++++++++++++++++---------------------------
1 files changed, 63 insertions(+), 82 deletions(-)
diff --git a/src/activations.c b/src/activations.c
index 4232efa..0cbb2f5 100644
--- a/src/activations.c
+++ b/src/activations.c
@@ -8,16 +8,32 @@
char *get_activation_string(ACTIVATION a)
{
switch(a){
- case SIGMOID:
- return "sigmoid";
+ case LOGISTIC:
+ return "logistic";
+ case LOGGY:
+ return "loggy";
case RELU:
return "relu";
+ case ELU:
+ return "elu";
+ case RELIE:
+ return "relie";
case RAMP:
return "ramp";
case LINEAR:
return "linear";
case TANH:
return "tanh";
+ case PLSE:
+ return "plse";
+ case LEAKY:
+ return "leaky";
+ case STAIR:
+ return "stair";
+ case HARDTAN:
+ return "hardtan";
+ case LHTAN:
+ return "lhtan";
default:
break;
}
@@ -26,41 +42,52 @@
ACTIVATION get_activation(char *s)
{
- if (strcmp(s, "sigmoid")==0) return SIGMOID;
+ if (strcmp(s, "logistic")==0) return LOGISTIC;
+ if (strcmp(s, "loggy")==0) return LOGGY;
if (strcmp(s, "relu")==0) return RELU;
+ if (strcmp(s, "elu")==0) return ELU;
+ if (strcmp(s, "relie")==0) return RELIE;
+ if (strcmp(s, "plse")==0) return PLSE;
+ if (strcmp(s, "hardtan")==0) return HARDTAN;
+ if (strcmp(s, "lhtan")==0) return LHTAN;
if (strcmp(s, "linear")==0) return LINEAR;
if (strcmp(s, "ramp")==0) return RAMP;
+ if (strcmp(s, "leaky")==0) return LEAKY;
if (strcmp(s, "tanh")==0) return TANH;
+ if (strcmp(s, "stair")==0) return STAIR;
fprintf(stderr, "Couldn't find activation function %s, going with ReLU\n", s);
return RELU;
}
-float linear_activate(float x){return x;}
-float sigmoid_activate(float x){return 1./(1. + exp(-x));}
-float relu_activate(float x){return x*(x>0);}
-float ramp_activate(float x){return x*(x>0)+.1*x;}
-float tanh_activate(float x){return (exp(2*x)-1)/(exp(2*x)+1);}
-//float tanh_activate(float x){return x - (x*x*x)/3;}
-
-float linear_gradient(float x){return 1;}
-float sigmoid_gradient(float x){return (1-x)*x;}
-float relu_gradient(float x){return (x>0);}
-float ramp_gradient(float x){return (x>0)+.1;}
-float tanh_gradient(float x){return 1-x*x;}
-
float activate(float x, ACTIVATION a)
{
switch(a){
case LINEAR:
return linear_activate(x);
- case SIGMOID:
- return sigmoid_activate(x);
+ case LOGISTIC:
+ return logistic_activate(x);
+ case LOGGY:
+ return loggy_activate(x);
case RELU:
return relu_activate(x);
+ case ELU:
+ return elu_activate(x);
+ case RELIE:
+ return relie_activate(x);
case RAMP:
return ramp_activate(x);
+ case LEAKY:
+ return leaky_activate(x);
case TANH:
return tanh_activate(x);
+ case PLSE:
+ return plse_activate(x);
+ case STAIR:
+ return stair_activate(x);
+ case HARDTAN:
+ return hardtan_activate(x);
+ case LHTAN:
+ return lhtan_activate(x);
}
return 0;
}
@@ -78,14 +105,30 @@
switch(a){
case LINEAR:
return linear_gradient(x);
- case SIGMOID:
- return sigmoid_gradient(x);
+ case LOGISTIC:
+ return logistic_gradient(x);
+ case LOGGY:
+ return loggy_gradient(x);
case RELU:
return relu_gradient(x);
+ case ELU:
+ return elu_gradient(x);
+ case RELIE:
+ return relie_gradient(x);
case RAMP:
return ramp_gradient(x);
+ case LEAKY:
+ return leaky_gradient(x);
case TANH:
return tanh_gradient(x);
+ case PLSE:
+ return plse_gradient(x);
+ case STAIR:
+ return stair_gradient(x);
+ case HARDTAN:
+ return hardtan_gradient(x);
+ case LHTAN:
+ return lhtan_gradient(x);
}
return 0;
}
@@ -98,65 +141,3 @@
}
}
-#ifdef GPU
-
-#include "opencl.h"
-#include <math.h>
-
-cl_kernel get_activation_kernel()
-{
- static int init = 0;
- static cl_kernel kernel;
- if(!init){
- kernel = get_kernel("src/activations.cl", "activate_array", 0);
- init = 1;
- }
- return kernel;
-}
-
-void activate_array_ongpu(cl_mem x, int n, ACTIVATION a)
-{
- cl_kernel kernel = get_activation_kernel();
- cl_command_queue queue = cl.queue;
-
- cl_uint i = 0;
- cl.error = clSetKernelArg(kernel, i++, sizeof(x), (void*) &x);
- cl.error = clSetKernelArg(kernel, i++, sizeof(n), (void*) &n);
- cl.error = clSetKernelArg(kernel, i++, sizeof(a), (void*) &a);
- check_error(cl);
-
- size_t gsize = n;
-
- cl.error = clEnqueueNDRangeKernel(queue, kernel, 1, 0, &gsize, 0, 0, 0, 0);
- check_error(cl);
-}
-
-cl_kernel get_gradient_kernel()
-{
- static int init = 0;
- static cl_kernel kernel;
- if(!init){
- kernel = get_kernel("src/activations.cl", "gradient_array", 0);
- init = 1;
- }
- return kernel;
-}
-
-void gradient_array_ongpu(cl_mem x, int n, ACTIVATION a, cl_mem delta)
-{
- cl_kernel kernel = get_gradient_kernel();
- cl_command_queue queue = cl.queue;
-
- cl_uint i = 0;
- cl.error = clSetKernelArg(kernel, i++, sizeof(x), (void*) &x);
- cl.error = clSetKernelArg(kernel, i++, sizeof(n), (void*) &n);
- cl.error = clSetKernelArg(kernel, i++, sizeof(a), (void*) &a);
- cl.error = clSetKernelArg(kernel, i++, sizeof(delta), (void*) &delta);
- check_error(cl);
-
- size_t gsize = n;
-
- cl.error = clEnqueueNDRangeKernel(queue, kernel, 1, 0, &gsize, 0, 0, 0, 0);
- check_error(cl);
-}
-#endif
--
Gitblit v1.10.0