From c604f2d9947413b75e5b35f6997458f1f8f89166 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sun, 31 Jan 2016 23:52:03 +0000
Subject: [PATCH] ...
---
src/rnn_layer.c | 10 ++++++----
1 files changed, 6 insertions(+), 4 deletions(-)
diff --git a/src/rnn_layer.c b/src/rnn_layer.c
index 6358285..e58e0a4 100644
--- a/src/rnn_layer.c
+++ b/src/rnn_layer.c
@@ -11,9 +11,9 @@
#include <string.h>
-layer make_rnn_layer(int batch, int inputs, int hidden, int outputs, int steps, ACTIVATION activation, int batch_normalize)
+layer make_rnn_layer(int batch, int inputs, int hidden, int outputs, int steps, ACTIVATION activation, int batch_normalize, int log)
{
- printf("%d %d\n", batch, steps);
+ fprintf(stderr, "RNN Layer: %d inputs, %d outputs\n", inputs, outputs);
batch = batch / steps;
layer l = {0};
l.batch = batch;
@@ -25,14 +25,17 @@
l.state = calloc(batch*hidden, sizeof(float));
l.input_layer = malloc(sizeof(layer));
+ fprintf(stderr, "\t\t");
*(l.input_layer) = make_connected_layer(batch*steps, inputs, hidden, activation, batch_normalize);
l.input_layer->batch = batch;
l.self_layer = malloc(sizeof(layer));
- *(l.self_layer) = make_connected_layer(batch*steps, hidden, hidden, activation, batch_normalize);
+ fprintf(stderr, "\t\t");
+ *(l.self_layer) = make_connected_layer(batch*steps, hidden, hidden, (log==2)?LOGGY:(log==1?LOGISTIC:activation), batch_normalize);
l.self_layer->batch = batch;
l.output_layer = malloc(sizeof(layer));
+ fprintf(stderr, "\t\t");
*(l.output_layer) = make_connected_layer(batch*steps, hidden, outputs, activation, batch_normalize);
l.output_layer->batch = batch;
@@ -46,7 +49,6 @@
l.delta_gpu = l.output_layer->delta_gpu;
#endif
- fprintf(stderr, "RNN Layer: %d inputs, %d outputs\n", inputs, outputs);
return l;
}
--
Gitblit v1.10.0