diff --git a/lab4/src/algo/layer.rs b/lab4/src/algo/layer.rs new file mode 100644 index 0000000..e18b50c --- /dev/null +++ b/lab4/src/algo/layer.rs @@ -0,0 +1,9 @@ +pub trait Layer { + type InputType; + type OutputType; + + fn compute( + &self, + input_data: &[Self::InputType; PrevLayerSize], + ) -> [Self::OutputType; CurrentLayerSize]; +} \ No newline at end of file diff --git a/lab4/src/algo/layer_impl.rs b/lab4/src/algo/layer_impl.rs new file mode 100644 index 0000000..9175092 --- /dev/null +++ b/lab4/src/algo/layer_impl.rs @@ -0,0 +1,90 @@ +use crate::algo::layer::Layer; +use crate::algo::net::LayersConnect; +use std::iter::Sum; +use std::marker::PhantomData; +use std::ops::{Add, Mul}; + +struct Neuron { + input_weights: [f64; PrevLayerSize], +} + +pub trait ApplyWeight { + type Output; + fn apply_weight(&self, w: f64) -> Self::Output; +} + +pub struct LayerImpl< + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + InputType: ApplyWeight, + ActivationType: Sum<::Output>, + ActivationFunction: Fn(ActivationType) -> OutputType, + OutputType, +> { + neurons: [Neuron; CurrentLayerSize], + activation_function: ActivationFunction, + __phantom: PhantomData<(InputType, ActivationType, OutputType)>, +} + +impl< + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + InputType: ApplyWeight, + ActivationType: Sum<::Output>, + ActivationFunction: Fn(ActivationType) -> OutputType, + OutputType, +> Layer + for LayerImpl< + PrevLayerSize, + CurrentLayerSize, + InputType, + ActivationType, + ActivationFunction, + OutputType, + > +{ + type InputType = InputType; + type OutputType = OutputType; + + fn compute(&self, input_data: &[InputType; PrevLayerSize]) -> [OutputType; CurrentLayerSize] { + let mut output: [OutputType; CurrentLayerSize] = + std::array::from_fn(|_| unsafe { std::mem::uninitialized::() }); + for (i, n) in self.neurons.iter().enumerate() { + let P = input_data + .iter() + .zip(n.input_weights) + .map(|(x, w)| x.apply_weight(w)) + .sum(); + output[i] = (self.activation_function)(P); + } + + return output; + } +} + +impl< + const PrevPrevLayerSize: usize, + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + PrevLayerInputType: ApplyWeight, + PrevLayerActivationType: Sum<::Output>, + PrevLayerActivationFunction: Fn(PrevLayerActivationType) -> PrevLayerOutputType, + PrevLayerOutputType, + CurrentLayer: Layer, +> Add + for LayerImpl< + PrevPrevLayerSize, + PrevLayerSize, + PrevLayerInputType, + PrevLayerActivationType, + PrevLayerActivationFunction, + PrevLayerOutputType, + > +{ + type Output = + LayersConnect; + + fn add(self, rhs: CurrentLayer) -> Self::Output { + return LayersConnect::join(self, rhs); + } +} diff --git a/lab4/src/algo/mod.rs b/lab4/src/algo/mod.rs new file mode 100644 index 0000000..4013c1b --- /dev/null +++ b/lab4/src/algo/mod.rs @@ -0,0 +1,3 @@ +mod layer; +mod net; +mod layer_impl; diff --git a/lab4/src/algo/net.rs b/lab4/src/algo/net.rs new file mode 100644 index 0000000..e08d745 --- /dev/null +++ b/lab4/src/algo/net.rs @@ -0,0 +1,74 @@ +use crate::algo::layer::Layer; +use std::ops::{Add}; + +pub(super) struct LayersConnect< + const PrevPrevLayerSize: usize, + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + PrevLayer: Layer, + CurrentLayer: Layer, +> { + prev_layer: PrevLayer, + current_layer: CurrentLayer, +} + +impl< + const PrevPrevLayerSize: usize, + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + PrevLayer: Layer, + CurrentLayer: Layer, +> LayersConnect +{ + pub fn join(l1: PrevLayer, l2: CurrentLayer) -> Self { + return Self { + prev_layer: l1, + current_layer: l2, + }; + } +} + +impl< + const PrevPrevLayerSize: usize, + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + PrevLayer: Layer, + CurrentLayer: Layer, +> Layer + for LayersConnect +{ + type InputType = PrevLayer::InputType; + type OutputType = CurrentLayer::OutputType; + + fn compute( + &self, + input_data: &[Self::InputType; PrevPrevLayerSize], + ) -> [Self::OutputType; CurrentLayerSize] { + let intermediate_data = self.prev_layer.compute(input_data); + return self.current_layer.compute(&intermediate_data); + } +} + +impl< + const PrevPrevLayerSize: usize, + const PrevLayerSize: usize, + const CurrentLayerSize: usize, + const NextLayerSize: usize, + PrevLayer: Layer, + CurrentLayer: Layer, + NextLayer: Layer, +> Add + for LayersConnect< + PrevPrevLayerSize, + PrevLayerSize, + CurrentLayerSize, + PrevLayer, + CurrentLayer, + > +{ + type Output = LayersConnect; + + fn add(self, rhs: NextLayer) -> Self::Output { + return LayersConnect::join(self, rhs) + } +} diff --git a/lab4/src/main.rs b/lab4/src/main.rs index e69de29..552b8c2 100644 --- a/lab4/src/main.rs +++ b/lab4/src/main.rs @@ -0,0 +1,3 @@ +mod algo; + +fn main() {} \ No newline at end of file