Layers don't need to store the forward pass output

This commit is contained in:
lluni 2023-02-04 17:56:30 +01:00
parent 16b6072d80
commit d130c7cce1
Signed by: lluni
GPG key ID: ACEEB468BC325D35
2 changed files with 12 additions and 8 deletions

View file

@ -5,7 +5,7 @@ use crate::functions::activation_functions::*;
pub struct ActivationLayer {
input: Array1<f64>,
output: Array1<f64>,
// output: Array1<f64>,
activation: fn(&Array1<f64>) -> Array1<f64>,
activation_prime: fn(&Array1<f64>) -> Array1<f64>,
}
@ -15,7 +15,7 @@ impl ActivationLayer {
let (activation, activation_prime) = parse_type(activation_fn);
ActivationLayer {
input: arr1(&[]),
output: arr1(&[]),
// output: arr1(&[]),
activation,
activation_prime,
}
@ -25,8 +25,10 @@ impl ActivationLayer {
impl Layer for ActivationLayer {
fn forward_pass(&mut self, input: ArrayView1<f64>) -> Array1<f64> {
self.input = input.to_owned();
self.output = (self.activation)(&self.input);
self.output.clone()
// output isn't needed elsewhere
// self.output = (self.activation)(&self.input);
// self.output.clone()
(self.activation)(&self.input)
}
fn backward_pass(&mut self, output_error: ArrayView1<f64>, _learning_rate: f64) -> Array1<f64> {

View file

@ -40,7 +40,7 @@ pub struct FCLayer {
weight_initializer: Initializer,
bias_initializer: Initializer,
input: Array1<f64>,
output: Array1<f64>,
// output: Array1<f64>,
weights: Array2<f64>,
biases: Array1<f64>,
}
@ -57,7 +57,7 @@ impl FCLayer {
weight_initializer,
bias_initializer,
input: arr1(&[]),
output: arr1(&[]),
// output: arr1(&[]),
weights: arr2(&[[]]),
biases: arr1(&[]),
}
@ -77,8 +77,10 @@ impl Layer for FCLayer {
}
self.input = input.to_owned();
self.output = self.input.dot(&self.weights) + &self.biases;
self.output.clone()
// output isn't needed elsewhere
// self.output = self.input.dot(&self.weights) + &self.biases;
// self.output.clone()
self.input.dot(&self.weights) + &self.biases
}
fn backward_pass(&mut self, output_error: ArrayView1<f64>, learning_rate: f64) -> Array1<f64> {