[lab4] Layer struct

This commit is contained in:
Andrew Golovashevich 2026-02-08 17:16:29 +03:00
parent 9243aa4399
commit d4bcef5160
5 changed files with 179 additions and 0 deletions

9
lab4/src/algo/layer.rs Normal file
View File

@ -0,0 +1,9 @@
pub trait Layer<const PrevLayerSize: usize, const CurrentLayerSize: usize> {
type InputType;
type OutputType;
fn compute(
&self,
input_data: &[Self::InputType; PrevLayerSize],
) -> [Self::OutputType; CurrentLayerSize];
}

View File

@ -0,0 +1,90 @@
use crate::algo::layer::Layer;
use crate::algo::net::LayersConnect;
use std::iter::Sum;
use std::marker::PhantomData;
use std::ops::{Add, Mul};
struct Neuron<const PrevLayerSize: usize> {
input_weights: [f64; PrevLayerSize],
}
pub trait ApplyWeight {
type Output;
fn apply_weight(&self, w: f64) -> Self::Output;
}
pub struct LayerImpl<
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
InputType: ApplyWeight,
ActivationType: Sum<<InputType as ApplyWeight>::Output>,
ActivationFunction: Fn(ActivationType) -> OutputType,
OutputType,
> {
neurons: [Neuron<PrevLayerSize>; CurrentLayerSize],
activation_function: ActivationFunction,
__phantom: PhantomData<(InputType, ActivationType, OutputType)>,
}
impl<
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
InputType: ApplyWeight,
ActivationType: Sum<<InputType as ApplyWeight>::Output>,
ActivationFunction: Fn(ActivationType) -> OutputType,
OutputType,
> Layer<PrevLayerSize, CurrentLayerSize>
for LayerImpl<
PrevLayerSize,
CurrentLayerSize,
InputType,
ActivationType,
ActivationFunction,
OutputType,
>
{
type InputType = InputType;
type OutputType = OutputType;
fn compute(&self, input_data: &[InputType; PrevLayerSize]) -> [OutputType; CurrentLayerSize] {
let mut output: [OutputType; CurrentLayerSize] =
std::array::from_fn(|_| unsafe { std::mem::uninitialized::<OutputType>() });
for (i, n) in self.neurons.iter().enumerate() {
let P = input_data
.iter()
.zip(n.input_weights)
.map(|(x, w)| x.apply_weight(w))
.sum();
output[i] = (self.activation_function)(P);
}
return output;
}
}
impl<
const PrevPrevLayerSize: usize,
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
PrevLayerInputType: ApplyWeight,
PrevLayerActivationType: Sum<<PrevLayerInputType as ApplyWeight>::Output>,
PrevLayerActivationFunction: Fn(PrevLayerActivationType) -> PrevLayerOutputType,
PrevLayerOutputType,
CurrentLayer: Layer<PrevLayerSize, CurrentLayerSize, InputType = PrevLayerOutputType>,
> Add<CurrentLayer>
for LayerImpl<
PrevPrevLayerSize,
PrevLayerSize,
PrevLayerInputType,
PrevLayerActivationType,
PrevLayerActivationFunction,
PrevLayerOutputType,
>
{
type Output =
LayersConnect<PrevPrevLayerSize, PrevLayerSize, CurrentLayerSize, Self, CurrentLayer>;
fn add(self, rhs: CurrentLayer) -> Self::Output {
return LayersConnect::join(self, rhs);
}
}

3
lab4/src/algo/mod.rs Normal file
View File

@ -0,0 +1,3 @@
mod layer;
mod net;
mod layer_impl;

74
lab4/src/algo/net.rs Normal file
View File

@ -0,0 +1,74 @@
use crate::algo::layer::Layer;
use std::ops::{Add};
pub(super) struct LayersConnect<
const PrevPrevLayerSize: usize,
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
PrevLayer: Layer<PrevPrevLayerSize, PrevLayerSize>,
CurrentLayer: Layer<PrevLayerSize, CurrentLayerSize, InputType = PrevLayer::OutputType>,
> {
prev_layer: PrevLayer,
current_layer: CurrentLayer,
}
impl<
const PrevPrevLayerSize: usize,
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
PrevLayer: Layer<PrevPrevLayerSize, PrevLayerSize>,
CurrentLayer: Layer<PrevLayerSize, CurrentLayerSize, InputType = PrevLayer::OutputType>,
> LayersConnect<PrevPrevLayerSize, PrevLayerSize, CurrentLayerSize, PrevLayer, CurrentLayer>
{
pub fn join(l1: PrevLayer, l2: CurrentLayer) -> Self {
return Self {
prev_layer: l1,
current_layer: l2,
};
}
}
impl<
const PrevPrevLayerSize: usize,
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
PrevLayer: Layer<PrevPrevLayerSize, PrevLayerSize>,
CurrentLayer: Layer<PrevLayerSize, CurrentLayerSize, InputType = PrevLayer::OutputType>,
> Layer<PrevPrevLayerSize, CurrentLayerSize>
for LayersConnect<PrevPrevLayerSize, PrevLayerSize, CurrentLayerSize, PrevLayer, CurrentLayer>
{
type InputType = PrevLayer::InputType;
type OutputType = CurrentLayer::OutputType;
fn compute(
&self,
input_data: &[Self::InputType; PrevPrevLayerSize],
) -> [Self::OutputType; CurrentLayerSize] {
let intermediate_data = self.prev_layer.compute(input_data);
return self.current_layer.compute(&intermediate_data);
}
}
impl<
const PrevPrevLayerSize: usize,
const PrevLayerSize: usize,
const CurrentLayerSize: usize,
const NextLayerSize: usize,
PrevLayer: Layer<PrevPrevLayerSize, PrevLayerSize>,
CurrentLayer: Layer<PrevLayerSize, CurrentLayerSize, InputType = PrevLayer::OutputType>,
NextLayer: Layer<CurrentLayerSize, NextLayerSize, InputType = CurrentLayer::OutputType>,
> Add<NextLayer>
for LayersConnect<
PrevPrevLayerSize,
PrevLayerSize,
CurrentLayerSize,
PrevLayer,
CurrentLayer,
>
{
type Output = LayersConnect<PrevPrevLayerSize, CurrentLayerSize, NextLayerSize, Self, NextLayer>;
fn add(self, rhs: NextLayer) -> Self::Output {
return LayersConnect::join(self, rhs)
}
}

View File

@ -0,0 +1,3 @@
mod algo;
fn main() {}