From 2f62fe33c913cd9484fe7f2486889d12292c66e0 Mon Sep 17 00:00:00 2001
From: Joseph Redmon <pjreddie@gmail.com>
Date: Sat, 07 Feb 2015 02:53:53 +0000
Subject: [PATCH] saving weight files as binaries, hell yeah
---
src/parser.c | 80 +++++++++++++++++++++++++++++++++++++++-
1 files changed, 78 insertions(+), 2 deletions(-)
diff --git a/src/parser.c b/src/parser.c
index a00feec..6a107cc 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -103,7 +103,7 @@
parse_data(weights, layer->filters, c*n*size*size);
parse_data(biases, layer->biases, n);
#ifdef GPU
- push_convolutional_layer(*layer);
+ if(weights || biases) push_convolutional_layer(*layer);
#endif
option_unused(options);
return layer;
@@ -137,7 +137,7 @@
parse_data(biases, layer->biases, output);
parse_data(weights, layer->weights, input*output);
#ifdef GPU
- push_connected_layer(*layer);
+ if(weights || biases) push_connected_layer(*layer);
#endif
option_unused(options);
return layer;
@@ -597,6 +597,82 @@
fprintf(fp, "\n");
}
+void save_weights(network net, char *filename)
+{
+ printf("Saving weights to %s\n", filename);
+ FILE *fp = fopen(filename, "w");
+ if(!fp) file_error(filename);
+
+ fwrite(&net.learning_rate, sizeof(float), 1, fp);
+ fwrite(&net.momentum, sizeof(float), 1, fp);
+ fwrite(&net.decay, sizeof(float), 1, fp);
+ fwrite(&net.seen, sizeof(int), 1, fp);
+
+ int i;
+ for(i = 0; i < net.n; ++i){
+ if(net.types[i] == CONVOLUTIONAL){
+ convolutional_layer layer = *(convolutional_layer *) net.layers[i];
+ #ifdef GPU
+ if(gpu_index >= 0){
+ pull_convolutional_layer(layer);
+ }
+ #endif
+ int num = layer.n*layer.c*layer.size*layer.size;
+ fwrite(layer.biases, sizeof(float), layer.n, fp);
+ fwrite(layer.filters, sizeof(float), num, fp);
+ }
+ if(net.types[i] == CONNECTED){
+ connected_layer layer = *(connected_layer *) net.layers[i];
+ #ifdef GPU
+ if(gpu_index >= 0){
+ pull_connected_layer(layer);
+ }
+ #endif
+ fwrite(layer.biases, sizeof(float), layer.outputs, fp);
+ fwrite(layer.weights, sizeof(float), layer.outputs*layer.inputs, fp);
+ }
+ }
+ fclose(fp);
+}
+
+void load_weights(network *net, char *filename)
+{
+ printf("Loading weights from %s\n", filename);
+ FILE *fp = fopen(filename, "r");
+ if(!fp) file_error(filename);
+
+ fread(&net->learning_rate, sizeof(float), 1, fp);
+ fread(&net->momentum, sizeof(float), 1, fp);
+ fread(&net->decay, sizeof(float), 1, fp);
+ fread(&net->seen, sizeof(int), 1, fp);
+ set_learning_network(net, net->learning_rate, net->momentum, net->decay);
+
+ int i;
+ for(i = 0; i < net->n; ++i){
+ if(net->types[i] == CONVOLUTIONAL){
+ convolutional_layer layer = *(convolutional_layer *) net->layers[i];
+ int num = layer.n*layer.c*layer.size*layer.size;
+ fread(layer.biases, sizeof(float), layer.n, fp);
+ fread(layer.filters, sizeof(float), num, fp);
+ #ifdef GPU
+ if(gpu_index >= 0){
+ push_convolutional_layer(layer);
+ }
+ #endif
+ }
+ if(net->types[i] == CONNECTED){
+ connected_layer layer = *(connected_layer *) net->layers[i];
+ fread(layer.biases, sizeof(float), layer.outputs, fp);
+ fread(layer.weights, sizeof(float), layer.outputs*layer.inputs, fp);
+ #ifdef GPU
+ if(gpu_index >= 0){
+ push_connected_layer(layer);
+ }
+ #endif
+ }
+ }
+ fclose(fp);
+}
void save_network(network net, char *filename)
{
--
Gitblit v1.10.0