master

1
.gitignore vendored

@ -0,0 +1 @@
/target

@ -0,0 +1,5 @@
target/*
target/
output/*
output/
Cargo.lock

@ -0,0 +1,6 @@
{
"rust-analyzer.linkedProjects": [
".\\Cargo.toml"
],
"rust-analyzer.showUnlinkedFileNotification": false
}

7
Cargo.lock generated

@ -0,0 +1,7 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "Neur"
version = "0.1.0"

@ -0,0 +1,11 @@
[package]
name = "Neur"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
rand = "0.8.5"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"

@ -0,0 +1,27 @@
use std::f64::consts::E;
#[derive(Clone)]
pub struct Activation<'a> {
pub function: &'a dyn Fn(f64) -> f64,
pub derivative: &'a dyn Fn(f64) -> f64,
}
pub const IDENTITY: Activation = Activation {
function: &|x| x,
derivative: &|_| 1.0,
};
pub const SIGMOID: Activation = Activation {
function: &|x| 1.0 / (1.0 + E.powf(-x)),
derivative: &|x| x * (1.0 - x),
};
pub const TANH: Activation = Activation {
function: &|x| x.tanh(),
derivative: &|x| 1.0 - (x.powi(2)),
};
pub const RELU: Activation = Activation {
function: &|x| x.max(0.0),
derivative: &|x| if x > 0.0 { 1.0 } else { 0.0 },
};

@ -0,0 +1,11 @@
use activations::RELU;
use network::Network;
pub mod activations;
pub mod matrix;
pub mod network;
pub mod simula;
fn main(){
}

@ -0,0 +1,150 @@
use rand::{thread_rng, Rng};
use std::fmt::{Debug, Formatter, Result};
#[derive(Clone)]
pub struct Matrix {
pub rows: usize,
pub cols: usize,
pub data: Vec<Vec<f64>>,
}
impl Matrix {
pub fn zeros(rows: usize, cols: usize) -> Matrix {
Matrix {
rows,
cols,
data: vec![vec![0.0; cols]; rows],
}
}
pub fn random(rows: usize, cols: usize) -> Matrix {
let mut rng = thread_rng();
let mut res = Matrix::zeros(rows, cols);
for i in 0..rows {
for j in 0..cols {
res.data[i][j] = rng.gen::<f64>() * 2.0 - 1.0;
}
}
res
}
pub fn from(data: Vec<Vec<f64>>) -> Matrix {
Matrix {
rows: data.len(),
cols: data[0].len(),
data,
}
}
pub fn multiply(&self, other: &Matrix) -> Matrix {
if self.cols != other.rows {
panic!("Attempted to multiply by matrix of incorrect dimensions");
}
let mut res = Matrix::zeros(self.rows, other.cols);
for i in 0..self.rows {
for j in 0..other.cols {
let mut sum = 0.0;
for k in 0..self.cols {
sum += self.data[i][k] * other.data[k][j];
}
res.data[i][j] = sum;
}
}
res
}
pub fn add(&self, other: &Matrix) -> Matrix {
if self.rows != other.rows || self.cols != other.cols {
panic!("Attempted to add matrix of incorrect dimensions");
}
let mut res = Matrix::zeros(self.rows, self.cols);
for i in 0..self.rows {
for j in 0..self.cols {
res.data[i][j] = self.data[i][j] + other.data[i][j];
}
}
res
}
pub fn dot_multiply(&self, other: &Matrix) -> Matrix {
if self.rows != other.rows || self.cols != other.cols {
panic!("Attempted to dot multiply by matrix of incorrect dimensions");
}
let mut res = Matrix::zeros(self.rows, self.cols);
for i in 0..self.rows {
for j in 0..self.cols {
res.data[i][j] = self.data[i][j] * other.data[i][j];
}
}
res
}
pub fn subtract(&self, other: &Matrix) -> Matrix {
if self.rows != other.rows || self.cols != other.cols {
panic!("Attempted to subtract matrix of incorrect dimensions");
}
let mut res = Matrix::zeros(self.rows, self.cols);
for i in 0..self.rows {
for j in 0..self.cols {
res.data[i][j] = self.data[i][j] - other.data[i][j];
}
}
res
}
pub fn map(&self, function: &dyn Fn(f64) -> f64) -> Matrix {
Matrix::from(
(self.data)
.clone()
.into_iter()
.map(|row| row.into_iter().map(|value| function(value)).collect())
.collect(),
)
}
pub fn transpose(&self) -> Matrix {
let mut res = Matrix::zeros(self.cols, self.rows);
for i in 0..self.rows {
for j in 0..self.cols {
res.data[j][i] = self.data[i][j];
}
}
res
}
}
impl Debug for Matrix {
fn fmt(&self, f: &mut Formatter) -> Result {
write!(
f,
"Matrix {{\n{}\n}}",
(&self.data)
.into_iter()
.map(|row| " ".to_string()
+ &row
.into_iter()
.map(|value| value.to_string())
.collect::<Vec<String>>()
.join(" "))
.collect::<Vec<String>>()
.join("\n")
)
}
}

@ -0,0 +1,134 @@
use std::{
fs::File,
io::{Read, Write},
};
use serde::{Deserialize, Serialize};
use serde_json::{from_str, json};
use super::{activations::Activation, matrix::Matrix};
pub struct Network<'a> {
layers: Vec<usize>,
weights: Vec<Matrix>,
biases: Vec<Matrix>,
data: Vec<Matrix>,
learning_rate: f64,
activation: Activation<'a>,
}
#[derive(Serialize, Deserialize)]
struct SaveData {
weights: Vec<Vec<Vec<f64>>>,
biases: Vec<Vec<Vec<f64>>>,
}
impl Network<'_> {
pub fn new<'a>(
layers: Vec<usize>,
learning_rate: f64,
activation: Activation<'a>,
) -> Network<'a> {
let mut weights = vec![];
let mut biases = vec![];
for i in 0..layers.len() - 1 {
weights.push(Matrix::random(layers[i + 1], layers[i]));
biases.push(Matrix::random(layers[i + 1], 1));
}
Network {
layers,
weights,
biases,
data: vec![],
learning_rate,
activation,
}
}
pub fn feed_forward(&mut self, inputs: Vec<f64>) -> Vec<f64> {
if inputs.len() != self.layers[0] {
panic!("Invalid inputs length");
}
let mut current = Matrix::from(vec![inputs]).transpose();
self.data = vec![current.clone()];
for i in 0..self.layers.len() - 1 {
current = self.weights[i]
.multiply(&current)
.add(&self.biases[i])
.map(self.activation.function);
self.data.push(current.clone());
}
current.transpose().data[0].to_owned()
}
pub fn back_propogate(&mut self, outputs: Vec<f64>, targets: Vec<f64>) {
if targets.len() != self.layers[self.layers.len() - 1] {
panic!("Invalid targets length");
}
let parsed = Matrix::from(vec![outputs]).transpose();
let mut errors = Matrix::from(vec![targets]).transpose().subtract(&parsed);
let mut gradients = parsed.map(self.activation.derivative);
for i in (0..self.layers.len() - 1).rev() {
gradients = gradients
.dot_multiply(&errors)
.map(&|x| x * self.learning_rate);
self.weights[i] = self.weights[i].add(&gradients.multiply(&self.data[i].transpose()));
self.biases[i] = self.biases[i].add(&gradients);
errors = self.weights[i].transpose().multiply(&errors);
gradients = self.data[i].map(self.activation.derivative);
}
}
pub fn train(&mut self, inputs: Vec<Vec<f64>>, targets: Vec<Vec<f64>>, epochs: u16) {
for i in 1..=epochs {
if epochs < 100 || i % (epochs / 100) == 0 {
println!("Epoch {} of {}", i, epochs);
}
for j in 0..inputs.len() {
let outputs = self.feed_forward(inputs[j].clone());
self.back_propogate(outputs, targets[j].clone());
}
}
}
pub fn save(&self, file: String) {
let mut file = File::create(file).expect("Unable to touch save file");
file.write_all(
json!({
"weights": self.weights.clone().into_iter().map(|matrix| matrix.data).collect::<Vec<Vec<Vec<f64>>>>(),
"biases": self.biases.clone().into_iter().map(|matrix| matrix.data).collect::<Vec<Vec<Vec<f64>>>>()
}).to_string().as_bytes(),
).expect("Unable to write to save file");
}
pub fn load(&mut self, file: String) {
let mut file = File::open(file).expect("Unable to open save file");
let mut buffer = String::new();
file.read_to_string(&mut buffer)
.expect("Unable to read save file");
let save_data: SaveData = from_str(&buffer).expect("Unable to serialize save data");
let mut weights = vec![];
let mut biases = vec![];
for i in 0..self.layers.len() - 1 {
weights.push(Matrix::from(save_data.weights[i].clone()));
biases.push(Matrix::from(save_data.biases[i].clone()));
}
self.weights = weights;
self.biases = biases;
}
}
Loading…
Cancel
Save