pub struct NabLayer {Show 23 fields
pub layer_type: String,
pub name: String,
pub input_shape: Vec<usize>,
pub output_shape: Vec<usize>,
pub weights: Option<NDArray>,
pub biases: Option<NDArray>,
pub input_cache: Option<NDArray>,
pub output_cache: Option<NDArray>,
pub trainable: bool,
pub weight_gradients: Option<NDArray>,
pub bias_gradients: Option<NDArray>,
pub activation: Option<String>,
pub dropout_rate: Option<f64>,
pub dropout_mask: Option<NDArray>,
pub epsilon: Option<f64>,
pub momentum: Option<f64>,
pub running_mean: Option<NDArray>,
pub running_var: Option<NDArray>,
pub batch_mean: Option<NDArray>,
pub batch_var: Option<NDArray>,
pub normalized: Option<NDArray>,
pub node_index: Option<usize>,
pub input_nodes: Option<Vec<usize>>,
}
Expand description
Represents a layer’s configuration and state
Fields§
§layer_type: String
Layer type identifier
name: String
Layer name (unique identifier)
input_shape: Vec<usize>
Input shape of the layer
output_shape: Vec<usize>
Output shape of the layer
weights: Option<NDArray>
Layer weights (if any)
biases: Option<NDArray>
Layer biases (if any)
input_cache: Option<NDArray>
Stored input for backpropagation
output_cache: Option<NDArray>
Stored output for backpropagation
trainable: bool
Training mode flag
weight_gradients: Option<NDArray>
Weight gradients for optimization
bias_gradients: Option<NDArray>
Bias gradients for optimization
activation: Option<String>
Type of activation function
dropout_rate: Option<f64>
Dropout rate (if applicable)
dropout_mask: Option<NDArray>
Dropout mask for backpropagation
epsilon: Option<f64>
Epsilon for numerical stability in BatchNorm
momentum: Option<f64>
Momentum for running statistics in BatchNorm
running_mean: Option<NDArray>
Running mean for BatchNorm inference
running_var: Option<NDArray>
Running variance for BatchNorm inference
batch_mean: Option<NDArray>
Current batch mean (for backprop)
batch_var: Option<NDArray>
Current batch variance (for backprop)
normalized: Option<NDArray>
Normalized values before scaling (for backprop)
node_index: Option<usize>
§input_nodes: Option<Vec<usize>>
Input connections for the layer
Implementations§
Source§impl NabLayer
impl NabLayer
Sourcepub fn dense(
input_dim: usize,
units: usize,
activation: Option<&str>,
name: Option<&str>,
) -> Self
pub fn dense( input_dim: usize, units: usize, activation: Option<&str>, name: Option<&str>, ) -> Self
Creates a new Dense (fully connected) layer
§Arguments
input_dim
- Number of input featuresunits
- Number of output unitsactivation
- Optional activation function (“relu”, “sigmoid”, “tanh”, etc.)name
- Optional name for the layer
§Example
use nabla_ml::nab_layers::NabLayer;
// Dense layer with ReLU activation
let dense = NabLayer::dense(784, 128, Some("relu"), Some("hidden_1"));
Sourcepub fn activation(
activation_type: &str,
input_shape: Vec<usize>,
name: Option<&str>,
) -> Self
pub fn activation( activation_type: &str, input_shape: Vec<usize>, name: Option<&str>, ) -> Self
Creates a new Activation layer
§Arguments
activation_type
- Type of activation (“relu”, “sigmoid”, “tanh”, etc.)input_shape
- Shape of the input (excluding batch dimension)name
- Optional name for the layer
§Example
use nabla_ml::nab_layers::NabLayer;
let relu = NabLayer::activation("relu", vec![128], Some("relu_1"));
assert_eq!(relu.get_output_shape(), &[128]);
Sourcepub fn flatten(input_shape: Vec<usize>, name: Option<&str>) -> Self
pub fn flatten(input_shape: Vec<usize>, name: Option<&str>) -> Self
Creates a new Flatten layer
Flattens the input while keeping the batch size. For example: (batch_size, height, width, channels) -> (batch_size, height * width * channels)
§Arguments
input_shape
- Shape of the input (excluding batch dimension)name
- Optional name for the layer
§Example
use nabla_ml::nab_layers::NabLayer;
// Flatten a 28x28x1 image to 784 features
let flatten = NabLayer::flatten(vec![28, 28, 1], Some("flatten_1"));
assert_eq!(flatten.get_output_shape(), &[784]);
Sourcepub fn dropout(input_shape: Vec<usize>, rate: f64, name: Option<&str>) -> Self
pub fn dropout(input_shape: Vec<usize>, rate: f64, name: Option<&str>) -> Self
Creates a new Dropout layer
Randomly sets input units to 0 with a probability of rate during training. During inference (training=false), the layer behaves like an identity function.
§Arguments
input_shape
- Shape of the input (excluding batch dimension)rate
- Dropout rate between 0 and 1 (e.g., 0.5 means 50% of units are dropped)name
- Optional name for the layer
§Example
use nabla_ml::nab_layers::NabLayer;
// Dropout with 50% rate
let dropout = NabLayer::dropout(vec![128], 0.5, Some("dropout_1"));
assert_eq!(dropout.get_output_shape(), &[128]);
Sourcepub fn batch_norm(
input_shape: Vec<usize>,
epsilon: Option<f64>,
momentum: Option<f64>,
name: Option<&str>,
) -> Self
pub fn batch_norm( input_shape: Vec<usize>, epsilon: Option<f64>, momentum: Option<f64>, name: Option<&str>, ) -> Self
Creates a new BatchNormalization layer
Normalizes the activations of the previous layer for each batch. During training, uses batch statistics. During inference, uses running statistics.
§Arguments
input_shape
- Shape of the input (excluding batch dimension)epsilon
- Small constant for numerical stability (default: 1e-5)momentum
- Momentum for running statistics (default: 0.99)name
- Optional name for the layer
§Example
use nabla_ml::nab_layers::NabLayer;
let bn = NabLayer::batch_norm(vec![128], None, None, Some("bn_1"));
assert_eq!(bn.get_output_shape(), &[128]);
Sourcepub fn forward(&mut self, input: &NDArray, training: bool) -> NDArray
pub fn forward(&mut self, input: &NDArray, training: bool) -> NDArray
Forward pass through the layer
Sourcepub fn get_output_shape(&self) -> &[usize]
pub fn get_output_shape(&self) -> &[usize]
Returns the output shape of the layer
Sourcepub fn is_trainable(&self) -> bool
pub fn is_trainable(&self) -> bool
Returns whether the layer is trainable
Sourcepub fn compute_output_shape(&self, input_shape: &[usize]) -> Vec<usize>
pub fn compute_output_shape(&self, input_shape: &[usize]) -> Vec<usize>
Computes output shape for a given input shape