Removed some no longer used code

This commit is contained in:
lluni 2023-01-15 23:24:19 +01:00
parent 961626616f
commit 18eb691588
Signed by: lluni
GPG key ID: ACEEB468BC325D35

View file

@ -74,25 +74,6 @@ impl Layer for FCLayer {
} }
fn backward_pass(&mut self, output_error: ArrayView1<f64>, learning_rate: f64) -> Array1<f64> { fn backward_pass(&mut self, output_error: ArrayView1<f64>, learning_rate: f64) -> Array1<f64> {
//let input_error = output_error.dot(&self.weights.clone().reversed_axes());
/* let input_error = stack(Axis(0), &vec![output_error; self.num_neurons]).unwrap().dot(&self.weights.clone().reversed_axes());
// let weights_error = self.input.clone().into_shape((1 as usize, self.num_neurons as usize)).unwrap().dot(&output_error);
// let weights_error = self.input.clone().reversed_axes().dot(&output_error);
// let mut weights_error = self.input.clone();
// weights_error.zip_mut_with(&output_error, |x, y| *x *= y);
let weights_error = self.input.clone().t().dot(&output_error.broadcast((self.input.len(),)).unwrap());
self.weights = &self.weights + learning_rate * weights_error;
self.biases = &self.biases + learning_rate * &output_error;
let len = input_error.len();
let a = input_error.into_shape((len, )).unwrap();
a */
/* let delta_weights = &self.output.t() * &output_error;
let delta_biases = output_error.sum_axis(Axis(0));
self.weights = &self.weights + learning_rate * delta_weights;
self.biases = &self.biases + learning_rate * delta_biases;
output_error.dot(&self.weights.t()) */
let input_error = output_error.dot(&self.weights.t()); let input_error = output_error.dot(&self.weights.t());
let delta_weights = let delta_weights =
self.input.to_owned().into_shape((self.input.len(), 1usize)).unwrap() self.input.to_owned().into_shape((self.input.len(), 1usize)).unwrap()