cargo clippy
This commit is contained in:
parent
2f3745a31c
commit
a8270914e0
5 changed files with 15 additions and 15 deletions
|
@ -96,7 +96,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
.draw_series(LineSeries::new(data1, &RED))
|
.draw_series(LineSeries::new(data1, &RED))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.label("true values")
|
.label("true values")
|
||||||
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], &RED));
|
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], RED));
|
||||||
|
|
||||||
// add the second plot
|
// add the second plot
|
||||||
let data2: Vec<(f64, f64)> = x_test
|
let data2: Vec<(f64, f64)> = x_test
|
||||||
|
@ -108,7 +108,7 @@ fn main() -> Result<(), Box<dyn Error>> {
|
||||||
.draw_series(LineSeries::new(data2, &BLUE))
|
.draw_series(LineSeries::new(data2, &BLUE))
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.label("predicted values")
|
.label("predicted values")
|
||||||
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], &BLUE));
|
.legend(|(x, y)| PathElement::new(vec![(x, y), (x + 1, y)], BLUE));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,6 +55,6 @@ fn main() {
|
||||||
let mut prediction = y_test.get(i).unwrap().to_owned();
|
let mut prediction = y_test.get(i).unwrap().to_owned();
|
||||||
// comment the following line to see the exact predictions
|
// comment the following line to see the exact predictions
|
||||||
prediction.map_mut(|x| *x = x.round());
|
prediction.map_mut(|x| *x = x.round());
|
||||||
print!("prediction: {}\n", prediction);
|
println!("prediction: {prediction}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,12 +9,12 @@ pub enum Type {
|
||||||
LeakyRelu,
|
LeakyRelu,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_type(
|
type ActFuncTuple = (
|
||||||
t: Type,
|
|
||||||
) -> (
|
|
||||||
fn(&Array1<f64>) -> Array1<f64>,
|
fn(&Array1<f64>) -> Array1<f64>,
|
||||||
fn(&Array1<f64>) -> Array1<f64>,
|
fn(&Array1<f64>) -> Array1<f64>,
|
||||||
) {
|
);
|
||||||
|
|
||||||
|
pub fn parse_type(t: Type) -> ActFuncTuple {
|
||||||
match t {
|
match t {
|
||||||
Type::Identity => (identity, identity_prime),
|
Type::Identity => (identity, identity_prime),
|
||||||
Type::Logistic => (logistic, logistic_prime),
|
Type::Logistic => (logistic, logistic_prime),
|
||||||
|
@ -67,7 +67,7 @@ pub fn tanh(matrix: &Array1<f64>) -> Array1<f64> {
|
||||||
pub fn tanh_prime(matrix: &Array1<f64>) -> Array1<f64> {
|
pub fn tanh_prime(matrix: &Array1<f64>) -> Array1<f64> {
|
||||||
let mut result = matrix.clone();
|
let mut result = matrix.clone();
|
||||||
for x in result.iter_mut() {
|
for x in result.iter_mut() {
|
||||||
*x = 1.0 as f64 - (*x).tanh().pow(2);
|
*x = 1.0 - (*x).tanh().pow(2);
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,12 +5,12 @@ pub enum Type {
|
||||||
MAE,
|
MAE,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_type(
|
type LossFuncTuple = (
|
||||||
t: Type,
|
|
||||||
) -> (
|
|
||||||
fn(ArrayView1<f64>, ArrayView1<f64>) -> f64,
|
fn(ArrayView1<f64>, ArrayView1<f64>) -> f64,
|
||||||
fn(ArrayView1<f64>, ArrayView1<f64>) -> Array1<f64>,
|
fn(ArrayView1<f64>, ArrayView1<f64>) -> Array1<f64>,
|
||||||
) {
|
);
|
||||||
|
|
||||||
|
pub fn parse_type(t: Type) -> LossFuncTuple {
|
||||||
match t {
|
match t {
|
||||||
Type::MSE => (mse, mse_prime),
|
Type::MSE => (mse, mse_prime),
|
||||||
Type::MAE => (mae, mae_prime),
|
Type::MAE => (mae, mae_prime),
|
||||||
|
|
|
@ -26,12 +26,12 @@ impl Network {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn predict(&mut self, inputs: Vec<Array1<f64>>) -> Vec<Array1<f64>> {
|
pub fn predict(&mut self, inputs: Vec<Array1<f64>>) -> Vec<Array1<f64>> {
|
||||||
assert!(inputs.len() > 0);
|
assert!(!inputs.is_empty());
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
|
|
||||||
for input in inputs.iter() {
|
for input in inputs.iter() {
|
||||||
let mut output = Array1::default(inputs[0].raw_dim());
|
let mut output = Array1::default(inputs[0].raw_dim());
|
||||||
output.assign(&input);
|
output.assign(input);
|
||||||
for layer in &mut self.layers {
|
for layer in &mut self.layers {
|
||||||
output = layer.forward_pass(output.view());
|
output = layer.forward_pass(output.view());
|
||||||
}
|
}
|
||||||
|
@ -49,7 +49,7 @@ impl Network {
|
||||||
learning_rate: f64,
|
learning_rate: f64,
|
||||||
trivial_optimize: bool,
|
trivial_optimize: bool,
|
||||||
) {
|
) {
|
||||||
assert!(x_train.len() > 0);
|
assert!(!x_train.is_empty());
|
||||||
assert!(x_train.len() == y_train.len());
|
assert!(x_train.len() == y_train.len());
|
||||||
let num_samples = x_train.len();
|
let num_samples = x_train.len();
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue