From a8270914e057befc2733f7db70957fa6a9cd0e2e Mon Sep 17 00:00:00 2001 From: lluni Date: Wed, 1 Feb 2023 16:10:56 +0100 Subject: [PATCH] cargo clippy --- examples/example_sine.rs | 4 ++-- examples/example_xor.rs | 2 +- src/functions/activation_functions.rs | 10 +++++----- src/functions/loss_functions.rs | 8 ++++---- src/lib.rs | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/example_sine.rs b/examples/example_sine.rs index 206a0ff..18d6945 100644 --- a/examples/example_sine.rs +++ b/examples/example_sine.rs @@ -96,7 +96,7 @@ fn main() -> Result<(), Box> { .draw_series(LineSeries::new(data1, &RED)) .unwrap() .label("true values") - .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], &RED)); + .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], RED)); // add the second plot let data2: Vec<(f64, f64)> = x_test @@ -108,7 +108,7 @@ fn main() -> Result<(), Box> { .draw_series(LineSeries::new(data2, &BLUE)) .unwrap() .label("predicted values") - .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], &BLUE)); + .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], BLUE)); Ok(()) } diff --git a/examples/example_xor.rs b/examples/example_xor.rs index 9934aff..88863e4 100644 --- a/examples/example_xor.rs +++ b/examples/example_xor.rs @@ -55,6 +55,6 @@ fn main() { let mut prediction = y_test.get(i).unwrap().to_owned(); // comment the following line to see the exact predictions prediction.map_mut(|x| *x = x.round()); - print!("prediction: {}\n", prediction); + println!("prediction: {prediction}"); } } diff --git a/src/functions/activation_functions.rs b/src/functions/activation_functions.rs index bafd96c..e133544 100644 --- a/src/functions/activation_functions.rs +++ b/src/functions/activation_functions.rs @@ -9,12 +9,12 @@ pub enum Type { LeakyRelu, } -pub fn parse_type( - t: Type, -) -> ( +type ActFuncTuple = ( fn(&Array1) -> Array1, fn(&Array1) -> Array1, -) { +); + +pub fn parse_type(t: Type) -> ActFuncTuple { match t { Type::Identity => (identity, identity_prime), Type::Logistic => (logistic, logistic_prime), @@ -67,7 +67,7 @@ pub fn tanh(matrix: &Array1) -> Array1 { pub fn tanh_prime(matrix: &Array1) -> Array1 { let mut result = matrix.clone(); for x in result.iter_mut() { - *x = 1.0 as f64 - (*x).tanh().pow(2); + *x = 1.0 - (*x).tanh().pow(2); } result } diff --git a/src/functions/loss_functions.rs b/src/functions/loss_functions.rs index 4eda8fe..a5d8a94 100644 --- a/src/functions/loss_functions.rs +++ b/src/functions/loss_functions.rs @@ -5,12 +5,12 @@ pub enum Type { MAE, } -pub fn parse_type( - t: Type, -) -> ( +type LossFuncTuple = ( fn(ArrayView1, ArrayView1) -> f64, fn(ArrayView1, ArrayView1) -> Array1, -) { +); + +pub fn parse_type(t: Type) -> LossFuncTuple { match t { Type::MSE => (mse, mse_prime), Type::MAE => (mae, mae_prime), diff --git a/src/lib.rs b/src/lib.rs index f757376..e9e06b4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -26,12 +26,12 @@ impl Network { } pub fn predict(&mut self, inputs: Vec>) -> Vec> { - assert!(inputs.len() > 0); + assert!(!inputs.is_empty()); let mut result = vec![]; for input in inputs.iter() { let mut output = Array1::default(inputs[0].raw_dim()); - output.assign(&input); + output.assign(input); for layer in &mut self.layers { output = layer.forward_pass(output.view()); } @@ -49,7 +49,7 @@ impl Network { learning_rate: f64, trivial_optimize: bool, ) { - assert!(x_train.len() > 0); + assert!(!x_train.is_empty()); assert!(x_train.len() == y_train.len()); let num_samples = x_train.len();