From 6a6aec46d7f1096c00d29b9e0375409350d124da Mon Sep 17 00:00:00 2001 From: prjanitor Date: Thu, 2 Apr 2026 09:38:20 +0300 Subject: [PATCH] Fix missing null check for num_features in BatchNorm1D The num_features parameter is required by PyTorch's BatchNorm1d but was defaulting to None. Added a null check to ensure num_features is provided before instantiation. --- neuralpy/layers/normalization/batchnorm1d.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neuralpy/layers/normalization/batchnorm1d.py b/neuralpy/layers/normalization/batchnorm1d.py index 266514e..069492a 100644 --- a/neuralpy/layers/normalization/batchnorm1d.py +++ b/neuralpy/layers/normalization/batchnorm1d.py @@ -58,7 +58,7 @@ def __init__( automatically calculates a unique name for the layer """ # Checking num_features field - if num_features is not None and not isinstance(num_features, int): + if num_features is None or not isinstance(num_features, int): raise ValueError("Please provide a valid num_features") # Checking eps field if not isinstance(eps, float):