From d8adaf8ea6a31a380f6bf1fe65e88b661d3bb51e Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Fri, 21 Oct 2016 20:16:43 +0000
Subject: [PATCH] tree stuff
---
src/layer.h | 102 ++++++++++++++++++++++++++++++++++++++++++++-------
1 files changed, 88 insertions(+), 14 deletions(-)
diff --git a/src/layer.h b/src/layer.h
index fc76234..341e58a 100644
--- a/src/layer.h
+++ b/src/layer.h
@@ -2,6 +2,10 @@
#define BASE_LAYER_H
#include "activations.h"
+#include "stddef.h"
+#include "tree.h"
+
+struct network_state;
struct layer;
typedef struct layer layer;
@@ -22,7 +26,15 @@
LOCAL,
SHORTCUT,
ACTIVE,
- RNN
+ RNN,
+ GRU,
+ CRNN,
+ BATCHNORM,
+ NETWORK,
+ XNOR,
+ REGION,
+ REORG,
+ BLANK
} LAYER_TYPE;
typedef enum{
@@ -33,7 +45,14 @@
LAYER_TYPE type;
ACTIVATION activation;
COST_TYPE cost_type;
+ void (*forward) (struct layer, struct network_state);
+ void (*backward) (struct layer, struct network_state);
+ void (*update) (struct layer, int, float, float, float);
+ void (*forward_gpu) (struct layer, struct network_state);
+ void (*backward_gpu) (struct layer, struct network_state);
+ void (*update_gpu) (struct layer, int, float, float, float);
int batch_normalize;
+ int shortcut;
int batch;
int forced;
int flipped;
@@ -43,6 +62,7 @@
int h,w,c;
int out_h, out_w, out_c;
int n;
+ int max_boxes;
int groups;
int size;
int side;
@@ -52,13 +72,16 @@
int flip;
int index;
int binary;
+ int xnor;
int steps;
int hidden;
+ float dot;
float angle;
float jitter;
float saturation;
float exposure;
float shift;
+ float ratio;
int softmax;
int classes;
int coords;
@@ -68,6 +91,10 @@
int does_cost;
int joint;
int noadjust;
+ int reorg;
+ int log;
+
+ tree *softmax_tree;
float alpha;
float beta;
@@ -77,6 +104,7 @@
float object_scale;
float noobject_scale;
float class_scale;
+ int random;
int dontload;
int dontloadscales;
@@ -88,11 +116,17 @@
int *indexes;
float *rand;
float *cost;
- float *filters;
- float *filter_updates;
+ char *cweights;
float *state;
+ float *prev_state;
+ float *forgot_state;
+ float *forgot_delta;
+ float *state_delta;
- float *binary_filters;
+ float *concat;
+ float *concat_delta;
+
+ float *binary_weights;
float *biases;
float *bias_updates;
@@ -128,17 +162,50 @@
struct layer *self_layer;
struct layer *output_layer;
+ struct layer *input_gate_layer;
+ struct layer *state_gate_layer;
+ struct layer *input_save_layer;
+ struct layer *state_save_layer;
+ struct layer *input_state_layer;
+ struct layer *state_state_layer;
+
+ struct layer *input_z_layer;
+ struct layer *state_z_layer;
+
+ struct layer *input_r_layer;
+ struct layer *state_r_layer;
+
+ struct layer *input_h_layer;
+ struct layer *state_h_layer;
+
+ float *z_cpu;
+ float *r_cpu;
+ float *h_cpu;
+
+ float *binary_input;
+
+ size_t workspace_size;
+
#ifdef GPU
+ float *z_gpu;
+ float *r_gpu;
+ float *h_gpu;
+
int *indexes_gpu;
+ float * prev_state_gpu;
+ float * forgot_state_gpu;
+ float * forgot_delta_gpu;
float * state_gpu;
- float * filters_gpu;
- float * filter_updates_gpu;
+ float * state_delta_gpu;
+ float * gate_gpu;
+ float * gate_delta_gpu;
+ float * save_gpu;
+ float * save_delta_gpu;
+ float * concat_gpu;
+ float * concat_delta_gpu;
- float *binary_filters_gpu;
- float *mean_filters_gpu;
-
- float * spatial_mean_gpu;
- float * spatial_variance_gpu;
+ float *binary_input_gpu;
+ float *binary_weights_gpu;
float * mean_gpu;
float * variance_gpu;
@@ -146,9 +213,6 @@
float * rolling_mean_gpu;
float * rolling_variance_gpu;
- float * spatial_mean_delta_gpu;
- float * spatial_variance_delta_gpu;
-
float * variance_delta_gpu;
float * mean_delta_gpu;
@@ -170,6 +234,16 @@
float * rand_gpu;
float * squared_gpu;
float * norms_gpu;
+ #ifdef CUDNN
+ cudnnTensorDescriptor_t srcTensorDesc, dstTensorDesc;
+ cudnnTensorDescriptor_t dsrcTensorDesc, ddstTensorDesc;
+ cudnnFilterDescriptor_t weightDesc;
+ cudnnFilterDescriptor_t dweightDesc;
+ cudnnConvolutionDescriptor_t convDesc;
+ cudnnConvolutionFwdAlgo_t fw_algo;
+ cudnnConvolutionBwdDataAlgo_t bd_algo;
+ cudnnConvolutionBwdFilterAlgo_t bf_algo;
+ #endif
#endif
};
--
Gitblit v1.10.0