From a0a2e6b50096c92de8cea2eba32a71537bc5f2c8 Mon Sep 17 00:00:00 2001
From: Edmond Yoo <hj3yoo@uwaterloo.ca>
Date: Sun, 16 Sep 2018 03:15:34 +0000
Subject: [PATCH] Merge branch 'master' of https://github.com/hj3yoo/darknet

---
 src/rnn_layer.h |   23 ++++++++++++-----------
 1 files changed, 12 insertions(+), 11 deletions(-)

diff --git a/src/rnn_layer.h b/src/rnn_layer.h
index 9e19cee..bb9478b 100644
--- a/src/rnn_layer.h
+++ b/src/rnn_layer.h
@@ -1,23 +1,24 @@
 
-#ifndef GRU_LAYER_H
-#define GRU_LAYER_H
+#ifndef RNN_LAYER_H
+#define RNN_LAYER_H
 
 #include "activations.h"
 #include "layer.h"
 #include "network.h"
+#define USET
 
-layer make_gru_layer(int batch, int inputs, int outputs, int steps, int batch_normalize);
+layer make_rnn_layer(int batch, int inputs, int hidden, int outputs, int steps, ACTIVATION activation, int batch_normalize, int log);
 
-void forward_gru_layer(layer l, network_state state);
-void backward_gru_layer(layer l, network_state state);
-void update_gru_layer(layer l, int batch, float learning_rate, float momentum, float decay);
+void forward_rnn_layer(layer l, network_state state);
+void backward_rnn_layer(layer l, network_state state);
+void update_rnn_layer(layer l, int batch, float learning_rate, float momentum, float decay);
 
 #ifdef GPU
-void forward_gru_layer_gpu(layer l, network_state state);
-void backward_gru_layer_gpu(layer l, network_state state);
-void update_gru_layer_gpu(layer l, int batch, float learning_rate, float momentum, float decay);
-void push_gru_layer(layer l);
-void pull_gru_layer(layer l);
+void forward_rnn_layer_gpu(layer l, network_state state);
+void backward_rnn_layer_gpu(layer l, network_state state);
+void update_rnn_layer_gpu(layer l, int batch, float learning_rate, float momentum, float decay);
+void push_rnn_layer(layer l);
+void pull_rnn_layer(layer l);
 #endif
 
 #endif

--
Gitblit v1.10.0