| | |
| | | state.delta = prev.delta; |
| | | } |
| | | layer l = net.layers[i]; |
| | | if (l.stopbackward) break; |
| | | l.backward(l, state); |
| | | } |
| | | } |
| | |
| | | }else if(l.type == COST){ |
| | | resize_cost_layer(&l, inputs); |
| | | }else{ |
| | | fprintf(stderr, "Resizing type %d \n", (int)l.type); |
| | | error("Cannot resize this type of layer"); |
| | | } |
| | | if(l.workspace_size > workspace_size) workspace_size = l.workspace_size; |
| | |
| | | free_layer(net.layers[i]); |
| | | } |
| | | free(net.layers); |
| | | free(net.workspace); |
| | | #ifdef GPU |
| | | if (gpu_index >= 0) cuda_free(net.workspace); |
| | | else free(net.workspace); |
| | | if (*net.input_gpu) cuda_free(*net.input_gpu); |
| | | if (*net.truth_gpu) cuda_free(*net.truth_gpu); |
| | | if (net.input_gpu) free(net.input_gpu); |
| | | if (net.truth_gpu) free(net.truth_gpu); |
| | | #else |
| | | free(net.workspace); |
| | | #endif |
| | | } |