Compare commits
No commits in common. "d130c7cce11b487b566028b70ec8c3a87a5041aa" and "a8270914e057befc2733f7db70957fa6a9cd0e2e" have entirely different histories.
d130c7cce1
...
a8270914e0
5 changed files with 22 additions and 36 deletions
|
@ -12,7 +12,7 @@ use rust_nn::layers::activation_layer::ActivationLayer;
|
||||||
use rust_nn::layers::fc_layer::{FCLayer, Initializer};
|
use rust_nn::layers::fc_layer::{FCLayer, Initializer};
|
||||||
use rust_nn::Network;
|
use rust_nn::Network;
|
||||||
|
|
||||||
fn main() {
|
fn main() -> Result<(), Box<dyn Error>> {
|
||||||
// training data
|
// training data
|
||||||
let training_interval = (0.0f64, 2.0f64 * PI);
|
let training_interval = (0.0f64, 2.0f64 * PI);
|
||||||
let steps = 100000;
|
let steps = 100000;
|
||||||
|
@ -66,25 +66,11 @@ fn main() {
|
||||||
)));
|
)));
|
||||||
|
|
||||||
// train network on training data
|
// train network on training data
|
||||||
network.fit(&x_train, &y_train, 100, 0.05, true);
|
network.fit(x_train, y_train, 100, 0.05, true);
|
||||||
|
|
||||||
// predict test dataset
|
// predict test dataset
|
||||||
let y_test_pred = network.predict(&x_test);
|
let y_test_pred = network.predict(x_test.clone());
|
||||||
|
|
||||||
// show results
|
|
||||||
if let Ok(()) = draw_results(&training_interval, &x_test, &y_test_true, &y_test_pred) {
|
|
||||||
println!("results can be seen in ./examples/sine.png");
|
|
||||||
} else {
|
|
||||||
println!("failed to draw results");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn draw_results(
|
|
||||||
training_interval: &(f64, f64),
|
|
||||||
x_test: &[Array1<f64>],
|
|
||||||
y_test_true: &[Array1<f64>],
|
|
||||||
y_test_pred: &[Array1<f64>],
|
|
||||||
) -> Result<(), Box<dyn Error>> {
|
|
||||||
// create the chart
|
// create the chart
|
||||||
let buf = BitMapBackend::new("./examples/sine.png", (800, 600)).into_drawing_area();
|
let buf = BitMapBackend::new("./examples/sine.png", (800, 600)).into_drawing_area();
|
||||||
buf.fill(&WHITE)?;
|
buf.fill(&WHITE)?;
|
||||||
|
|
|
@ -45,10 +45,10 @@ fn main() {
|
||||||
)));
|
)));
|
||||||
|
|
||||||
// train network on training data
|
// train network on training data
|
||||||
network.fit(&x_train, &y_train, 1000, 0.1, false);
|
network.fit(x_train, y_train, 1000, 0.1, false);
|
||||||
|
|
||||||
// print predictions
|
// print predictions
|
||||||
let y_test = network.predict(&x_test);
|
let y_test = network.predict(x_test.clone());
|
||||||
println!("{}", x_test.get(0).unwrap());
|
println!("{}", x_test.get(0).unwrap());
|
||||||
for i in 0..y_test.len() {
|
for i in 0..y_test.len() {
|
||||||
print!("input: {}\t\t", x_test.get(i).unwrap());
|
print!("input: {}\t\t", x_test.get(i).unwrap());
|
||||||
|
|
|
@ -5,7 +5,7 @@ use crate::functions::activation_functions::*;
|
||||||
|
|
||||||
pub struct ActivationLayer {
|
pub struct ActivationLayer {
|
||||||
input: Array1<f64>,
|
input: Array1<f64>,
|
||||||
// output: Array1<f64>,
|
output: Array1<f64>,
|
||||||
activation: fn(&Array1<f64>) -> Array1<f64>,
|
activation: fn(&Array1<f64>) -> Array1<f64>,
|
||||||
activation_prime: fn(&Array1<f64>) -> Array1<f64>,
|
activation_prime: fn(&Array1<f64>) -> Array1<f64>,
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ impl ActivationLayer {
|
||||||
let (activation, activation_prime) = parse_type(activation_fn);
|
let (activation, activation_prime) = parse_type(activation_fn);
|
||||||
ActivationLayer {
|
ActivationLayer {
|
||||||
input: arr1(&[]),
|
input: arr1(&[]),
|
||||||
// output: arr1(&[]),
|
output: arr1(&[]),
|
||||||
activation,
|
activation,
|
||||||
activation_prime,
|
activation_prime,
|
||||||
}
|
}
|
||||||
|
@ -25,13 +25,15 @@ impl ActivationLayer {
|
||||||
impl Layer for ActivationLayer {
|
impl Layer for ActivationLayer {
|
||||||
fn forward_pass(&mut self, input: ArrayView1<f64>) -> Array1<f64> {
|
fn forward_pass(&mut self, input: ArrayView1<f64>) -> Array1<f64> {
|
||||||
self.input = input.to_owned();
|
self.input = input.to_owned();
|
||||||
// output isn't needed elsewhere
|
self.output = (self.activation)(&self.input);
|
||||||
// self.output = (self.activation)(&self.input);
|
self.output.clone()
|
||||||
// self.output.clone()
|
|
||||||
(self.activation)(&self.input)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn backward_pass(&mut self, output_error: ArrayView1<f64>, _learning_rate: f64) -> Array1<f64> {
|
fn backward_pass(&mut self, output_error: ArrayView1<f64>, _learning_rate: f64) -> Array1<f64> {
|
||||||
(self.activation_prime)(&self.input) * output_error
|
// (self.activation_prime)(&self.input).into_shape((1 as usize, output_error.len() as usize)).unwrap().dot(&output_error)
|
||||||
|
// (self.activation_prime)(&self.input) * &output_error
|
||||||
|
let mut temp = (self.activation_prime)(&self.input);
|
||||||
|
temp.zip_mut_with(&output_error, |x, y| *x *= y);
|
||||||
|
temp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -40,7 +40,7 @@ pub struct FCLayer {
|
||||||
weight_initializer: Initializer,
|
weight_initializer: Initializer,
|
||||||
bias_initializer: Initializer,
|
bias_initializer: Initializer,
|
||||||
input: Array1<f64>,
|
input: Array1<f64>,
|
||||||
// output: Array1<f64>,
|
output: Array1<f64>,
|
||||||
weights: Array2<f64>,
|
weights: Array2<f64>,
|
||||||
biases: Array1<f64>,
|
biases: Array1<f64>,
|
||||||
}
|
}
|
||||||
|
@ -57,7 +57,7 @@ impl FCLayer {
|
||||||
weight_initializer,
|
weight_initializer,
|
||||||
bias_initializer,
|
bias_initializer,
|
||||||
input: arr1(&[]),
|
input: arr1(&[]),
|
||||||
// output: arr1(&[]),
|
output: arr1(&[]),
|
||||||
weights: arr2(&[[]]),
|
weights: arr2(&[[]]),
|
||||||
biases: arr1(&[]),
|
biases: arr1(&[]),
|
||||||
}
|
}
|
||||||
|
@ -77,10 +77,8 @@ impl Layer for FCLayer {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.input = input.to_owned();
|
self.input = input.to_owned();
|
||||||
// output isn't needed elsewhere
|
self.output = self.input.dot(&self.weights) + &self.biases;
|
||||||
// self.output = self.input.dot(&self.weights) + &self.biases;
|
self.output.clone()
|
||||||
// self.output.clone()
|
|
||||||
self.input.dot(&self.weights) + &self.biases
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn backward_pass(&mut self, output_error: ArrayView1<f64>, learning_rate: f64) -> Array1<f64> {
|
fn backward_pass(&mut self, output_error: ArrayView1<f64>, learning_rate: f64) -> Array1<f64> {
|
||||||
|
|
|
@ -25,7 +25,7 @@ impl Network {
|
||||||
self.layers.push(layer);
|
self.layers.push(layer);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn predict(&mut self, inputs: &[Array1<f64>]) -> Vec<Array1<f64>> {
|
pub fn predict(&mut self, inputs: Vec<Array1<f64>>) -> Vec<Array1<f64>> {
|
||||||
assert!(!inputs.is_empty());
|
assert!(!inputs.is_empty());
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ impl Network {
|
||||||
for layer in &mut self.layers {
|
for layer in &mut self.layers {
|
||||||
output = layer.forward_pass(output.view());
|
output = layer.forward_pass(output.view());
|
||||||
}
|
}
|
||||||
result.push(output);
|
result.push(output.to_owned());
|
||||||
}
|
}
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -43,8 +43,8 @@ impl Network {
|
||||||
|
|
||||||
pub fn fit(
|
pub fn fit(
|
||||||
&mut self,
|
&mut self,
|
||||||
x_train: &[Array1<f64>],
|
x_train: Vec<Array1<f64>>,
|
||||||
y_train: &[Array1<f64>],
|
y_train: Vec<Array1<f64>>,
|
||||||
epochs: usize,
|
epochs: usize,
|
||||||
learning_rate: f64,
|
learning_rate: f64,
|
||||||
trivial_optimize: bool,
|
trivial_optimize: bool,
|
||||||
|
|
Loading…
Reference in a new issue