From c1dc509a2c28afefd6c1c743d24321c17dd271ba Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Sun, 3 Aug 2025 11:24:02 +0200 Subject: [PATCH 1/6] Add static_assert for Neurons --- include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp | 1 + include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp | 1 + include/snn/neural_network/layer/neuron/SimpleNeuron.hpp | 1 + 3 files changed, 3 insertions(+) diff --git a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp index 62d965e4..5bb11767 100644 --- a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp +++ b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp @@ -54,6 +54,7 @@ class GatedRecurrentUnit final auto operator==(const GatedRecurrentUnit& neuron) const -> bool; auto operator!=(const GatedRecurrentUnit& neuron) const -> bool; }; +static_assert(BaseNeuron); template void GatedRecurrentUnit::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) diff --git a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp index 03d27f35..f1157904 100644 --- a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp @@ -38,6 +38,7 @@ class RecurrentNeuron final : public Neuron auto operator==(const RecurrentNeuron& neuron) const -> bool; auto operator!=(const RecurrentNeuron& neuron) const -> bool; }; +static_assert(BaseNeuron); template void RecurrentNeuron::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) diff --git a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp index 30310ebe..06459376 100644 --- a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp @@ -30,6 +30,7 @@ class SimpleNeuron final : public Neuron auto operator==(const SimpleNeuron& neuron) const -> bool; auto operator!=(const SimpleNeuron& neuron) const -> bool; }; +static_assert(BaseNeuron); template void SimpleNeuron::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) From 9cd621fdbf18da52a13f25b7032b74f8dd2c4b5f Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Mon, 4 Aug 2025 01:03:07 +0200 Subject: [PATCH 2/6] Add resetLearningVariables method Also remove learning variables from serialization. Update SaveNeuralNetwork.save unit tests. --- .clang-tidy | 2 +- examples/.clang-tidy | 2 +- include/snn/neural_network/NeuralNetwork.hpp | 3 +++ .../snn/neural_network/layer/BaseLayer.hpp | 2 ++ include/snn/neural_network/layer/Layer.hpp | 2 ++ include/snn/neural_network/layer/Layer.tpp | 9 +++++++ .../layer/neuron/BaseNeuron.hpp | 5 ++-- .../neural_network/layer/neuron/Circular.hpp | 7 +++++ .../layer/neuron/GatedRecurrentUnit.hpp | 8 ++---- .../layer/neuron/LearningObject.hpp | 6 +++++ .../neural_network/layer/neuron/Neuron.hpp | 7 ++--- .../layer/neuron/RecurrentNeuron.hpp | 7 +++-- .../layer/neuron/SimpleNeuron.hpp | 1 + src/neural_network/NeuralNetwork.cpp | 8 ++++++ .../StraightforwardNeuralNetwork.cpp | 1 + src/neural_network/layer/Layer.cpp | 3 +++ src/neural_network/layer/neuron/Circular.cpp | 27 ++++++++++++++++--- .../layer/neuron/GatedRecurrentUnit.cpp | 16 ++++++++--- src/neural_network/layer/neuron/Neuron.cpp | 20 ++++++++------ .../layer/neuron/RecurrentNeuron.cpp | 9 +++++++ tests/.clang-tidy | 2 +- tests/unit_tests/SaveNeuralNetworkTests.cpp | 17 +++++++++--- 22 files changed, 125 insertions(+), 39 deletions(-) create mode 100644 include/snn/neural_network/layer/neuron/LearningObject.hpp diff --git a/.clang-tidy b/.clang-tidy index 707d2cb9..07da2077 100644 --- a/.clang-tidy +++ b/.clang-tidy @@ -1,5 +1,6 @@ Checks: > *, + -altera-unroll-loops, -boost-use-ranges, -cppcoreguidelines-non-private-member-variables-in-classes, -fuchsia-default-arguments-calls, @@ -19,7 +20,6 @@ Checks: > -cert* WarningsAsErrors: > *, - -altera-unroll-loops, -altera-id-dependent-backward-branch, -bugprone-easily-swappable-parameters, -fuchsia-overloaded-operator, diff --git a/examples/.clang-tidy b/examples/.clang-tidy index 747eda25..897125e6 100644 --- a/examples/.clang-tidy +++ b/examples/.clang-tidy @@ -1,5 +1,6 @@ Checks: > *, + -altera-unroll-loops, -boost-use-ranges, -cppcoreguidelines-non-private-member-variables-in-classes, -fuchsia-default-arguments-calls, @@ -23,7 +24,6 @@ Checks: > -readability-magic-numbers WarningsAsErrors: > *, - -altera-unroll-loops, -altera-id-dependent-backward-branch, -bugprone-easily-swappable-parameters, -fuchsia-overloaded-operator, diff --git a/include/snn/neural_network/NeuralNetwork.hpp b/include/snn/neural_network/NeuralNetwork.hpp index 8b963a3c..5e77a0bf 100644 --- a/include/snn/neural_network/NeuralNetwork.hpp +++ b/include/snn/neural_network/NeuralNetwork.hpp @@ -74,12 +74,15 @@ class NeuralNetwork : public StatisticAnalysis [[nodiscard]] auto isValid() const -> errorType; + void resetLearningVariables(); + void trainOnce(const std::vector& inputs, const std::vector& desired, const std::vector& weighting = {}, bool temporalReset = true); auto operator==(const NeuralNetwork& neuralNetwork) const -> bool; auto operator!=(const NeuralNetwork& neuralNetwork) const -> bool; }; +static_assert(LearningObject); template void NeuralNetwork::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) diff --git a/include/snn/neural_network/layer/BaseLayer.hpp b/include/snn/neural_network/layer/BaseLayer.hpp index 32a73622..fb36f52b 100644 --- a/include/snn/neural_network/layer/BaseLayer.hpp +++ b/include/snn/neural_network/layer/BaseLayer.hpp @@ -47,6 +47,8 @@ class BaseLayer [[nodiscard]] virtual auto isValid() const -> errorType = 0; + virtual void resetLearningVariables() = 0; + [[nodiscard]] virtual auto summary() const -> std::string = 0; virtual auto operator==(const BaseLayer& layer) const -> bool = 0; diff --git a/include/snn/neural_network/layer/Layer.hpp b/include/snn/neural_network/layer/Layer.hpp index 7596cb22..1f9a61bb 100644 --- a/include/snn/neural_network/layer/Layer.hpp +++ b/include/snn/neural_network/layer/Layer.hpp @@ -68,6 +68,8 @@ class Layer : public BaseLayer [[nodiscard]] auto isValid() const -> errorType override; + void resetLearningVariables() final; + auto operator==(const BaseLayer& layer) const -> bool override; auto operator!=(const BaseLayer& layer) const -> bool override; }; diff --git a/include/snn/neural_network/layer/Layer.tpp b/include/snn/neural_network/layer/Layer.tpp index f51ad5df..de353bff 100644 --- a/include/snn/neural_network/layer/Layer.tpp +++ b/include/snn/neural_network/layer/Layer.tpp @@ -150,6 +150,15 @@ auto Layer::getNumberOfParameters() const -> int return sum; } +template +void Layer::resetLearningVariables() +{ + for (auto& neuron : this->neurons) + { + neuron.resetLearningVariables(); + } +} + template auto Layer::operator==(const BaseLayer& layer) const -> bool { diff --git a/include/snn/neural_network/layer/neuron/BaseNeuron.hpp b/include/snn/neural_network/layer/neuron/BaseNeuron.hpp index 948d7957..26397c8b 100644 --- a/include/snn/neural_network/layer/neuron/BaseNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/BaseNeuron.hpp @@ -2,6 +2,7 @@ #include #include "../../optimizer/NeuralNetworkOptimizer.hpp" +#include "LearningObject.hpp" namespace snn::internal { @@ -36,6 +37,6 @@ concept HasCommonConstMethods = requires(const N neuron) { }; template -concept BaseNeuron = - HasCommonMethods && HasCommonConstMethods && (HasNonTemporalOuputMethod || HasTemporalOuputMethod); +concept BaseNeuron = HasCommonMethods && HasCommonConstMethods && + (HasNonTemporalOuputMethod || HasTemporalOuputMethod) && LearningObject; } // namespace snn::internal diff --git a/include/snn/neural_network/layer/neuron/Circular.hpp b/include/snn/neural_network/layer/neuron/Circular.hpp index 14dc897e..b1f95d72 100644 --- a/include/snn/neural_network/layer/neuron/Circular.hpp +++ b/include/snn/neural_network/layer/neuron/Circular.hpp @@ -29,6 +29,7 @@ class Circular final ~Circular() = default; void initialize(size_t queueSize, size_t dataSize = 1); // Should be call after the ctor. + void reset(); // Do the same as initialize. [[nodiscard]] auto getBack() -> const T*; [[nodiscard]] auto getSum() const -> T; @@ -55,6 +56,12 @@ void Circular::initialize(size_t size, size_t dataSize); template <> void Circular>::initialize(size_t size, size_t dataSize); +template <> +void Circular::reset(); + +template <> +void Circular>::reset(); + template <> auto Circular::getSum() const -> float; diff --git a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp index 5bb11767..bdcb3940 100644 --- a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp +++ b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp @@ -21,7 +21,6 @@ class GatedRecurrentUnit final int numberOfInputs{}; float previousOutput = 0; - float recurrentError = 0; float updateGateOutput = 0; float outputGateOutput = 0; @@ -51,6 +50,8 @@ class GatedRecurrentUnit final [[nodiscard]] auto getOptimizer() const -> NeuralNetworkOptimizer*; void setOptimizer(std::shared_ptr newOptimizer); + void resetLearningVariables(); + auto operator==(const GatedRecurrentUnit& neuron) const -> bool; auto operator!=(const GatedRecurrentUnit& neuron) const -> bool; }; @@ -59,12 +60,7 @@ static_assert(BaseNeuron); template void GatedRecurrentUnit::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) { - archive& this->errors; archive& this->numberOfInputs; - archive& this->previousOutput; - archive& this->recurrentError; - archive& this->updateGateOutput; - archive& this->outputGateOutput; archive& this->resetGate; archive& this->updateGate; archive& this->outputGate; diff --git a/include/snn/neural_network/layer/neuron/LearningObject.hpp b/include/snn/neural_network/layer/neuron/LearningObject.hpp new file mode 100644 index 00000000..29754c6a --- /dev/null +++ b/include/snn/neural_network/layer/neuron/LearningObject.hpp @@ -0,0 +1,6 @@ +#include + +template +concept LearningObject = requires(T t) { + { t.resetLearningVariables() } -> std::same_as; +}; \ No newline at end of file diff --git a/include/snn/neural_network/layer/neuron/Neuron.hpp b/include/snn/neural_network/layer/neuron/Neuron.hpp index 1d054dea..45d24459 100644 --- a/include/snn/neural_network/layer/neuron/Neuron.hpp +++ b/include/snn/neural_network/layer/neuron/Neuron.hpp @@ -54,6 +54,8 @@ class Neuron [[nodiscard]] auto getOptimizer() const -> NeuralNetworkOptimizer*; void setOptimizer(std::shared_ptr newOptimizer); + void resetLearningVariables(); + auto operator==(const Neuron& neuron) const -> bool; auto operator!=(const Neuron& neuron) const -> bool; }; @@ -67,11 +69,6 @@ void Neuron::serialize(Archive& archive, [[maybe_unused]] const uint32_t version archive& this->batchSize; archive& this->weights; archive& this->bias; - archive& this->deltaWeights; - archive& this->lastInputs; - archive& this->lastError; - archive& this->lastSum; - archive& this->errors; archive& this->activationFunction; this->outputFunction = ActivationFunction::get(activationFunction); } diff --git a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp index f1157904..00450a36 100644 --- a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp @@ -2,6 +2,7 @@ #include #include +#include "BaseNeuron.hpp" #include "Neuron.hpp" namespace snn::internal @@ -35,6 +36,8 @@ class RecurrentNeuron final : public Neuron [[nodiscard]] auto isValid() const -> errorType; + void resetLearningVariables(); + auto operator==(const RecurrentNeuron& neuron) const -> bool; auto operator!=(const RecurrentNeuron& neuron) const -> bool; }; @@ -45,9 +48,5 @@ void RecurrentNeuron::serialize(Archive& archive, [[maybe_unused]] const uint32_ { boost::serialization::void_cast_register(); archive& boost::serialization::base_object(*this); - archive& this->lastOutput; - archive& this->previousOutput; - archive& this->recurrentError; - archive& this->previousSum; } } // namespace snn::internal diff --git a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp index 06459376..376b3b1b 100644 --- a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp @@ -2,6 +2,7 @@ #include #include +#include "BaseNeuron.hpp" #include "Neuron.hpp" namespace snn::internal diff --git a/src/neural_network/NeuralNetwork.cpp b/src/neural_network/NeuralNetwork.cpp index f1dfe247..2d261a82 100644 --- a/src/neural_network/NeuralNetwork.cpp +++ b/src/neural_network/NeuralNetwork.cpp @@ -210,6 +210,14 @@ auto NeuralNetwork::isValid() const -> errorType return errorType::noError; } +void NeuralNetwork::resetLearningVariables() +{ + for (const auto& layer : this->layers) + { + layer->resetLearningVariables(); + } +} + auto NeuralNetwork::operator==(const NeuralNetwork& neuralNetwork) const -> bool { return *this->optimizer == *neuralNetwork.optimizer && this->layers.size() == neuralNetwork.layers.size() && diff --git a/src/neural_network/StraightforwardNeuralNetwork.cpp b/src/neural_network/StraightforwardNeuralNetwork.cpp index 2a01d009..91c613c1 100644 --- a/src/neural_network/StraightforwardNeuralNetwork.cpp +++ b/src/neural_network/StraightforwardNeuralNetwork.cpp @@ -340,6 +340,7 @@ auto StraightforwardNeuralNetwork::loadFrom(const std::string& filePath) -> Stra std::ifstream ifs(filePath); boost::archive::text_iarchive archive(ifs); archive >> neuralNetwork; + neuralNetwork->resetLearningVariables(); return *neuralNetwork; } diff --git a/src/neural_network/layer/Layer.cpp b/src/neural_network/layer/Layer.cpp index cccbfcf3..d8029ae0 100644 --- a/src/neural_network/layer/Layer.cpp +++ b/src/neural_network/layer/Layer.cpp @@ -9,4 +9,7 @@ namespace snn::internal template class Layer; template class Layer; template class Layer; +static_assert(LearningObject>); +static_assert(LearningObject>); +static_assert(LearningObject>); } // namespace snn::internal diff --git a/src/neural_network/layer/neuron/Circular.cpp b/src/neural_network/layer/neuron/Circular.cpp index 4f60c485..bb578788 100644 --- a/src/neural_network/layer/neuron/Circular.cpp +++ b/src/neural_network/layer/neuron/Circular.cpp @@ -13,15 +13,17 @@ template <> void Circular::initialize(const size_t size, [[maybe_unused]] const size_t dataSize) { assert(dataSize == 1); + this->indexPush = 0; + this->indexGet = 0; this->divider = static_cast(size); - this->queue.clear(); - this->queue.resize(size); + this->queue.assign(size, 0.0F); } template <> void Circular>::initialize(const size_t size, const size_t dataSize) { + this->indexPush = 0; + this->indexGet = 0; this->divider = static_cast(size); - this->queue.clear(); this->queue.resize(size); for (auto& d : this->queue) { @@ -29,6 +31,25 @@ void Circular>::initialize(const size_t size, const size_t da } } +template <> +void Circular::reset() +{ + this->indexPush = 0; + this->indexGet = 0; + std::ranges::fill(this->queue, 0.0F); +} + +template <> +void Circular>::reset() +{ + this->indexPush = 0; + this->indexGet = 0; + for (auto& d : this->queue) + { + std::ranges::fill(d, 0.0F); + } +} + template <> auto Circular>::getSum() const -> std::vector { diff --git a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp index 0fe18507..d5cbf162 100644 --- a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp +++ b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp @@ -96,8 +96,8 @@ auto GatedRecurrentUnit::getNumberOfInputs() const -> int { return this->numberO inline void GatedRecurrentUnit::reset() { this->previousOutput = 0; - this->recurrentError = 0; this->updateGateOutput = 0; + this->outputGateOutput = 0; } auto GatedRecurrentUnit::isValid() const -> errorType @@ -129,12 +129,20 @@ void GatedRecurrentUnit::setOptimizer(std::shared_ptr ne this->outputGate.setOptimizer(newOptimizer); } +void GatedRecurrentUnit::resetLearningVariables() +{ + this->reset(); + this->resetGate.resetLearningVariables(); + this->updateGate.resetLearningVariables(); + this->outputGate.resetLearningVariables(); +} + auto GatedRecurrentUnit::operator==(const GatedRecurrentUnit& neuron) const -> bool { return this->numberOfInputs == neuron.numberOfInputs && this->previousOutput == neuron.previousOutput && - this->recurrentError == neuron.recurrentError && this->updateGateOutput == neuron.updateGateOutput && - this->outputGateOutput == neuron.outputGateOutput && this->resetGate == neuron.resetGate && - this->updateGate == neuron.updateGate && this->outputGate == neuron.outputGate; + this->updateGateOutput == neuron.updateGateOutput && this->outputGateOutput == neuron.outputGateOutput && + this->resetGate == neuron.resetGate && this->updateGate == neuron.updateGate && + this->outputGate == neuron.outputGate; } auto GatedRecurrentUnit::operator!=(const GatedRecurrentUnit& neuron) const -> bool { return !(*this == neuron); } diff --git a/src/neural_network/layer/neuron/Neuron.cpp b/src/neural_network/layer/neuron/Neuron.cpp index 7250a9d4..c474f121 100644 --- a/src/neural_network/layer/neuron/Neuron.cpp +++ b/src/neural_network/layer/neuron/Neuron.cpp @@ -14,7 +14,6 @@ Neuron::Neuron(NeuronModel model, std::shared_ptr optimi optimizer(std::move(optimizer)) { - this->errors.resize(model.numberOfInputs, 0); this->outputFunction = ActivationFunction::get(this->activationFunction); this->weights.resize(model.numberOfWeights); for (auto& weight : this->weights) @@ -22,10 +21,7 @@ Neuron::Neuron(NeuronModel model, std::shared_ptr optimi weight = randomInitializeWeight(model.numberOfWeights); } this->weights.back() = std::abs(this->weights.back()); - this->lastInputs.initialize(this->batchSize, model.numberOfInputs); - this->lastError.initialize(this->batchSize); - this->lastSum.initialize(this->batchSize); - this->deltaWeights.resize(model.numberOfWeights, 0); + this->resetLearningVariables(); } auto Neuron::randomInitializeWeight(int numberOfWeights) -> float @@ -79,15 +75,23 @@ void Neuron::setOptimizer(std::shared_ptr newOptimizer) this->optimizer = std::move(newOptimizer); } +void Neuron::resetLearningVariables() +{ + this->deltaWeights.assign(this->weights.size(), 0.0F); + this->errors.assign(this->numberOfInputs, 0.0F); + this->lastInputs.initialize(this->batchSize, this->numberOfInputs); + this->lastError.initialize(this->batchSize); + this->lastSum.initialize(this->batchSize); +} + auto Neuron::operator==(const Neuron& neuron) const -> bool { return typeid(*this).hash_code() == typeid(neuron).hash_code() && this->numberOfInputs == neuron.numberOfInputs && this->weights == neuron.weights && this->bias == neuron.bias && this->deltaWeights == neuron.deltaWeights && this->lastInputs == neuron.lastInputs && this->lastError == neuron.lastError && this->lastSum == neuron.lastSum && this->errors == neuron.errors && - this->activationFunction == neuron.activationFunction && - this->outputFunction == neuron.outputFunction // not really good - && *this->optimizer == *neuron.optimizer; + this->activationFunction == neuron.activationFunction && this->outputFunction == neuron.outputFunction && + *this->optimizer == *neuron.optimizer; } auto Neuron::operator!=(const Neuron& Neuron) const -> bool { return !(*this == Neuron); } diff --git a/src/neural_network/layer/neuron/RecurrentNeuron.cpp b/src/neural_network/layer/neuron/RecurrentNeuron.cpp index 5c4c9496..6fa98514 100644 --- a/src/neural_network/layer/neuron/RecurrentNeuron.cpp +++ b/src/neural_network/layer/neuron/RecurrentNeuron.cpp @@ -78,6 +78,15 @@ auto RecurrentNeuron::isValid() const -> errorType return this->Neuron::isValid(); } +void RecurrentNeuron::resetLearningVariables() +{ + this->Neuron::resetLearningVariables(); + this->lastOutput = 0; + this->previousOutput = 0; + this->recurrentError = 0; + this->previousSum = 0; +} + auto RecurrentNeuron::operator==(const RecurrentNeuron& neuron) const -> bool { return this->Neuron::operator==(neuron) && this->lastOutput == neuron.lastOutput && diff --git a/tests/.clang-tidy b/tests/.clang-tidy index 747eda25..897125e6 100644 --- a/tests/.clang-tidy +++ b/tests/.clang-tidy @@ -1,5 +1,6 @@ Checks: > *, + -altera-unroll-loops, -boost-use-ranges, -cppcoreguidelines-non-private-member-variables-in-classes, -fuchsia-default-arguments-calls, @@ -23,7 +24,6 @@ Checks: > -readability-magic-numbers WarningsAsErrors: > *, - -altera-unroll-loops, -altera-id-dependent-backward-branch, -bugprone-easily-swappable-parameters, -fuchsia-overloaded-operator, diff --git a/tests/unit_tests/SaveNeuralNetworkTests.cpp b/tests/unit_tests/SaveNeuralNetworkTests.cpp index 00c44a98..5d0af199 100644 --- a/tests/unit_tests/SaveNeuralNetworkTests.cpp +++ b/tests/unit_tests/SaveNeuralNetworkTests.cpp @@ -99,7 +99,7 @@ TEST(SaveNeuralNetwork, EqualTestWithDropout) EXPECT_TRUE(A.getWeightedClusteringRate() == B.getWeightedClusteringRate()) << "A == B"; } -TEST(SaveNeuralNetwork, Save) // TODO(matth): do a forward to be sure that the network is the same. +TEST(SaveNeuralNetwork, Save) { StraightforwardNeuralNetwork A( {Input(45), MaxPooling(3), Convolution(2, 2, activation::ReLU), @@ -110,17 +110,26 @@ TEST(SaveNeuralNetwork, Save) // TODO(matth): do a forward to be sure that the auto randomInput = tools::randomVector(0, 2, 45); auto randomOutput = tools::randomVector(0, 1, 2); + A.trainOnce(randomInput, randomOutput); // To update learning variables. + A.trainOnce(randomInput, randomOutput); // To update learning variables of GRUs. + A.saveAs("./testSave.tmp"); + auto B = StraightforwardNeuralNetwork::loadFrom("./testSave.tmp"); + + EXPECT_TRUE(A != B); // Learning variables are not saved. + + A.resetLearningVariables(); + + EXPECT_TRUE(A == B); // All learning variables should be 0. + A.trainOnce(randomInput, randomOutput); auto outputA = A.computeOutput(randomInput); - StraightforwardNeuralNetwork B = StraightforwardNeuralNetwork::loadFrom("./testSave.tmp"); - B.trainOnce(randomInput, randomOutput); auto outputB = B.computeOutput(randomInput); EXPECT_TRUE(A == B); - EXPECT_TRUE(sizeof(A) == sizeof(B)); // Don't count pointing objects by pointer + EXPECT_TRUE(sizeof(A) == sizeof(B)); // Don't count pointing objects by pointer. EXPECT_TRUE(outputA == outputB); ASSERT_EQ(B.isValid(), errorType::noError); } From d55f4687ffb7be0af5d003425e0498b985118881 Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Wed, 13 Aug 2025 00:31:28 +0200 Subject: [PATCH 3/6] Remove operator!= Use some default operator==. --- include/snn/neural_network/NeuralNetwork.hpp | 1 - .../snn/neural_network/StatisticAnalysis.hpp | 3 +-- .../StraightforwardNeuralNetwork.hpp | 1 - .../neural_network/binary_classification.hpp | 1 + .../snn/neural_network/layer/BaseLayer.hpp | 1 - .../neural_network/layer/Convolution1D.hpp | 1 - .../neural_network/layer/Convolution2D.hpp | 1 - .../snn/neural_network/layer/FilterLayer.hpp | 1 - include/snn/neural_network/layer/Layer.hpp | 1 - include/snn/neural_network/layer/Layer.tpp | 6 ------ .../layer/LocallyConnected1D.hpp | 1 - .../layer/LocallyConnected2D.hpp | 1 - .../snn/neural_network/layer/MaxPooling1D.hpp | 1 - .../snn/neural_network/layer/MaxPooling2D.hpp | 1 - .../snn/neural_network/layer/SimpleLayer.hpp | 1 - .../snn/neural_network/layer/SimpleLayer.tpp | 6 ------ .../layer/neuron/BaseNeuron.hpp | 1 - .../layer/neuron/GatedRecurrentUnit.hpp | 1 - .../neural_network/layer/neuron/Neuron.hpp | 1 - .../layer/neuron/RecurrentNeuron.hpp | 3 +-- .../layer/neuron/SimpleNeuron.hpp | 1 - .../ActivationFunction.hpp | 1 - .../snn/neural_network/optimizer/Dropout.hpp | 1 - .../optimizer/ErrorMultiplier.hpp | 1 - .../optimizer/L1Regularization.hpp | 1 - .../optimizer/L2Regularization.hpp | 1 - .../optimizer/LayerOptimizer.hpp | 1 - .../optimizer/NeuralNetworkOptimizer.hpp | 1 - .../snn/neural_network/optimizer/Softmax.hpp | 1 - .../optimizer/StochasticGradientDescent.hpp | 1 - src/neural_network/NeuralNetwork.cpp | 2 -- src/neural_network/StatisticAnalysis.cpp | 21 ------------------- .../StraightforwardNeuralNetwork.cpp | 5 ----- src/neural_network/layer/Convolution1D.cpp | 2 -- src/neural_network/layer/Convolution2D.cpp | 2 -- src/neural_network/layer/FilterLayer.cpp | 2 -- .../layer/LocallyConnected1D.cpp | 2 -- .../layer/LocallyConnected2D.cpp | 2 -- src/neural_network/layer/MaxPooling1D.cpp | 2 -- src/neural_network/layer/MaxPooling2D.cpp | 2 -- .../layer/neuron/GatedRecurrentUnit.cpp | 4 +--- src/neural_network/layer/neuron/Neuron.cpp | 2 -- .../layer/neuron/RecurrentNeuron.cpp | 11 +--------- .../layer/neuron/SimpleNeuron.cpp | 2 -- .../ActivationFunction.cpp | 5 ----- src/neural_network/optimizer/Dropout.cpp | 2 -- .../optimizer/ErrorMultiplier.cpp | 2 -- .../optimizer/L1Regularization.cpp | 2 -- .../optimizer/L2Regularization.cpp | 4 +--- .../optimizer/LayerOptimizer.cpp | 2 -- .../optimizer/NeuralNetworkOptimizer.cpp | 5 ----- src/neural_network/optimizer/Softmax.cpp | 2 -- .../optimizer/StochasticGradientDescent.cpp | 5 ----- 53 files changed, 6 insertions(+), 128 deletions(-) diff --git a/include/snn/neural_network/NeuralNetwork.hpp b/include/snn/neural_network/NeuralNetwork.hpp index 5e77a0bf..c15cb8ee 100644 --- a/include/snn/neural_network/NeuralNetwork.hpp +++ b/include/snn/neural_network/NeuralNetwork.hpp @@ -80,7 +80,6 @@ class NeuralNetwork : public StatisticAnalysis const std::vector& weighting = {}, bool temporalReset = true); auto operator==(const NeuralNetwork& neuralNetwork) const -> bool; - auto operator!=(const NeuralNetwork& neuralNetwork) const -> bool; }; static_assert(LearningObject); diff --git a/include/snn/neural_network/StatisticAnalysis.hpp b/include/snn/neural_network/StatisticAnalysis.hpp index 6df082ca..6fa30a1d 100644 --- a/include/snn/neural_network/StatisticAnalysis.hpp +++ b/include/snn/neural_network/StatisticAnalysis.hpp @@ -76,8 +76,7 @@ class StatisticAnalysis [[nodiscard]] auto getMeanAbsoluteErrorMin() const -> float; [[nodiscard]] auto getRootMeanSquaredErrorMin() const -> float; - auto operator==(const StatisticAnalysis& other) const -> bool; - auto operator!=(const StatisticAnalysis& other) const -> bool; + auto operator==(const StatisticAnalysis& other) const -> bool = default; }; template diff --git a/include/snn/neural_network/StraightforwardNeuralNetwork.hpp b/include/snn/neural_network/StraightforwardNeuralNetwork.hpp index 4761c285..c8930758 100644 --- a/include/snn/neural_network/StraightforwardNeuralNetwork.hpp +++ b/include/snn/neural_network/StraightforwardNeuralNetwork.hpp @@ -93,7 +93,6 @@ class StraightforwardNeuralNetwork final : public internal::NeuralNetwork } auto operator==(const StraightforwardNeuralNetwork& neuralNetwork) const -> bool; - auto operator!=(const StraightforwardNeuralNetwork& neuralNetwork) const -> bool; }; template diff --git a/include/snn/neural_network/binary_classification.hpp b/include/snn/neural_network/binary_classification.hpp index 3b2b4d37..6dd90d49 100644 --- a/include/snn/neural_network/binary_classification.hpp +++ b/include/snn/neural_network/binary_classification.hpp @@ -21,6 +21,7 @@ struct binaryClassification archive& this->trueNegative; archive& this->falsePositive; archive& this->falseNegative; + archive& this->totalError; } }; } // namespace snn::internal \ No newline at end of file diff --git a/include/snn/neural_network/layer/BaseLayer.hpp b/include/snn/neural_network/layer/BaseLayer.hpp index fb36f52b..cbe66bf6 100644 --- a/include/snn/neural_network/layer/BaseLayer.hpp +++ b/include/snn/neural_network/layer/BaseLayer.hpp @@ -52,6 +52,5 @@ class BaseLayer [[nodiscard]] virtual auto summary() const -> std::string = 0; virtual auto operator==(const BaseLayer& layer) const -> bool = 0; - virtual auto operator!=(const BaseLayer& layer) const -> bool = 0; }; } // namespace snn::internal diff --git a/include/snn/neural_network/layer/Convolution1D.hpp b/include/snn/neural_network/layer/Convolution1D.hpp index 5bf40e36..52de02e7 100644 --- a/include/snn/neural_network/layer/Convolution1D.hpp +++ b/include/snn/neural_network/layer/Convolution1D.hpp @@ -29,7 +29,6 @@ class Convolution1D final : public Convolution [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/Convolution2D.hpp b/include/snn/neural_network/layer/Convolution2D.hpp index 7beec111..b0e7b142 100644 --- a/include/snn/neural_network/layer/Convolution2D.hpp +++ b/include/snn/neural_network/layer/Convolution2D.hpp @@ -29,7 +29,6 @@ class Convolution2D final : public Convolution [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/FilterLayer.hpp b/include/snn/neural_network/layer/FilterLayer.hpp index 288f08e7..2943b224 100644 --- a/include/snn/neural_network/layer/FilterLayer.hpp +++ b/include/snn/neural_network/layer/FilterLayer.hpp @@ -41,7 +41,6 @@ class FilterLayer : public Layer [[nodiscard]] auto isValid() const -> errorType override; auto operator==(const BaseLayer& layer) const -> bool override; - auto operator!=(const BaseLayer& layer) const -> bool override; }; template diff --git a/include/snn/neural_network/layer/Layer.hpp b/include/snn/neural_network/layer/Layer.hpp index 1f9a61bb..0dd01450 100644 --- a/include/snn/neural_network/layer/Layer.hpp +++ b/include/snn/neural_network/layer/Layer.hpp @@ -71,7 +71,6 @@ class Layer : public BaseLayer void resetLearningVariables() final; auto operator==(const BaseLayer& layer) const -> bool override; - auto operator!=(const BaseLayer& layer) const -> bool override; }; template diff --git a/include/snn/neural_network/layer/Layer.tpp b/include/snn/neural_network/layer/Layer.tpp index de353bff..61185cb2 100644 --- a/include/snn/neural_network/layer/Layer.tpp +++ b/include/snn/neural_network/layer/Layer.tpp @@ -184,10 +184,4 @@ auto Layer::operator==(const BaseLayer& layer) const -> bool return false; } } - -template -auto Layer::operator!=(const BaseLayer& layer) const -> bool -{ - return !(*this == layer); -} } // namespace snn::internal diff --git a/include/snn/neural_network/layer/LocallyConnected1D.hpp b/include/snn/neural_network/layer/LocallyConnected1D.hpp index 22ad106d..8e430a18 100644 --- a/include/snn/neural_network/layer/LocallyConnected1D.hpp +++ b/include/snn/neural_network/layer/LocallyConnected1D.hpp @@ -34,7 +34,6 @@ class LocallyConnected1D final : public FilterLayer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/LocallyConnected2D.hpp b/include/snn/neural_network/layer/LocallyConnected2D.hpp index 86f66484..1cd05441 100644 --- a/include/snn/neural_network/layer/LocallyConnected2D.hpp +++ b/include/snn/neural_network/layer/LocallyConnected2D.hpp @@ -34,7 +34,6 @@ class LocallyConnected2D final : public FilterLayer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/MaxPooling1D.hpp b/include/snn/neural_network/layer/MaxPooling1D.hpp index 95d028f0..ee3819d5 100644 --- a/include/snn/neural_network/layer/MaxPooling1D.hpp +++ b/include/snn/neural_network/layer/MaxPooling1D.hpp @@ -37,7 +37,6 @@ class MaxPooling1D final : public FilterLayer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/MaxPooling2D.hpp b/include/snn/neural_network/layer/MaxPooling2D.hpp index 19292c73..ce5ad091 100644 --- a/include/snn/neural_network/layer/MaxPooling2D.hpp +++ b/include/snn/neural_network/layer/MaxPooling2D.hpp @@ -37,7 +37,6 @@ class MaxPooling2D final : public FilterLayer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const BaseLayer& layer) const -> bool final; - auto operator!=(const BaseLayer& layer) const -> bool final; }; template diff --git a/include/snn/neural_network/layer/SimpleLayer.hpp b/include/snn/neural_network/layer/SimpleLayer.hpp index 816d4884..f0812fff 100644 --- a/include/snn/neural_network/layer/SimpleLayer.hpp +++ b/include/snn/neural_network/layer/SimpleLayer.hpp @@ -35,7 +35,6 @@ class SimpleLayer : public Layer [[nodiscard]] auto isValid() const -> errorType final; auto operator==(const BaseLayer& layer) const -> bool override; - auto operator!=(const BaseLayer& layer) const -> bool override; }; template diff --git a/include/snn/neural_network/layer/SimpleLayer.tpp b/include/snn/neural_network/layer/SimpleLayer.tpp index e0ca26c6..78e1ee25 100644 --- a/include/snn/neural_network/layer/SimpleLayer.tpp +++ b/include/snn/neural_network/layer/SimpleLayer.tpp @@ -89,10 +89,4 @@ auto SimpleLayer::operator==(const BaseLayer& layer) const -> bool { return Layer::operator==(layer); } - -template -auto SimpleLayer::operator!=(const BaseLayer& layer) const -> bool -{ - return !(*this == layer); -} } // namespace snn::internal diff --git a/include/snn/neural_network/layer/neuron/BaseNeuron.hpp b/include/snn/neural_network/layer/neuron/BaseNeuron.hpp index 26397c8b..19f900db 100644 --- a/include/snn/neural_network/layer/neuron/BaseNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/BaseNeuron.hpp @@ -33,7 +33,6 @@ concept HasCommonConstMethods = requires(const N neuron) { { neuron.getNumberOfInputs() } -> std::same_as; { neuron.operator==(neuron) } -> std::same_as; - { neuron.operator!=(neuron) } -> std::same_as; }; template diff --git a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp index bdcb3940..364cc41f 100644 --- a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp +++ b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp @@ -53,7 +53,6 @@ class GatedRecurrentUnit final void resetLearningVariables(); auto operator==(const GatedRecurrentUnit& neuron) const -> bool; - auto operator!=(const GatedRecurrentUnit& neuron) const -> bool; }; static_assert(BaseNeuron); diff --git a/include/snn/neural_network/layer/neuron/Neuron.hpp b/include/snn/neural_network/layer/neuron/Neuron.hpp index 45d24459..2a7a105a 100644 --- a/include/snn/neural_network/layer/neuron/Neuron.hpp +++ b/include/snn/neural_network/layer/neuron/Neuron.hpp @@ -57,7 +57,6 @@ class Neuron void resetLearningVariables(); auto operator==(const Neuron& neuron) const -> bool; - auto operator!=(const Neuron& neuron) const -> bool; }; template diff --git a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp index 00450a36..e1dc8a37 100644 --- a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp @@ -38,8 +38,7 @@ class RecurrentNeuron final : public Neuron void resetLearningVariables(); - auto operator==(const RecurrentNeuron& neuron) const -> bool; - auto operator!=(const RecurrentNeuron& neuron) const -> bool; + auto operator==(const RecurrentNeuron& neuron) const -> bool = default; }; static_assert(BaseNeuron); diff --git a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp index 376b3b1b..a87835b8 100644 --- a/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/SimpleNeuron.hpp @@ -29,7 +29,6 @@ class SimpleNeuron final : public Neuron [[nodiscard]] auto isValid() const -> errorType; auto operator==(const SimpleNeuron& neuron) const -> bool; - auto operator!=(const SimpleNeuron& neuron) const -> bool; }; static_assert(BaseNeuron); diff --git a/include/snn/neural_network/layer/neuron/activation_function/ActivationFunction.hpp b/include/snn/neural_network/layer/neuron/activation_function/ActivationFunction.hpp index 3160fe02..d8b605db 100644 --- a/include/snn/neural_network/layer/neuron/activation_function/ActivationFunction.hpp +++ b/include/snn/neural_network/layer/neuron/activation_function/ActivationFunction.hpp @@ -48,6 +48,5 @@ class ActivationFunction [[nodiscard]] virtual auto getName() const -> std::string = 0; virtual auto operator==(const ActivationFunction& activationFunction) const -> bool; - virtual auto operator!=(const ActivationFunction& activationFunction) const -> bool; }; } // namespace snn::internal diff --git a/include/snn/neural_network/optimizer/Dropout.hpp b/include/snn/neural_network/optimizer/Dropout.hpp index b29af322..6ff57128 100644 --- a/include/snn/neural_network/optimizer/Dropout.hpp +++ b/include/snn/neural_network/optimizer/Dropout.hpp @@ -36,7 +36,6 @@ class Dropout final : public LayerOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const LayerOptimizer& optimizer) const -> bool final; - auto operator!=(const LayerOptimizer& optimizer) const -> bool final; }; template diff --git a/include/snn/neural_network/optimizer/ErrorMultiplier.hpp b/include/snn/neural_network/optimizer/ErrorMultiplier.hpp index 84905ee6..8cfc1318 100644 --- a/include/snn/neural_network/optimizer/ErrorMultiplier.hpp +++ b/include/snn/neural_network/optimizer/ErrorMultiplier.hpp @@ -35,7 +35,6 @@ class ErrorMultiplier final : public LayerOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const LayerOptimizer& optimizer) const -> bool final; - auto operator!=(const LayerOptimizer& optimizer) const -> bool final; }; template diff --git a/include/snn/neural_network/optimizer/L1Regularization.hpp b/include/snn/neural_network/optimizer/L1Regularization.hpp index 0b1b4d04..f2508e92 100644 --- a/include/snn/neural_network/optimizer/L1Regularization.hpp +++ b/include/snn/neural_network/optimizer/L1Regularization.hpp @@ -35,7 +35,6 @@ class L1Regularization final : public LayerOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const LayerOptimizer& optimizer) const -> bool final; - auto operator!=(const LayerOptimizer& optimizer) const -> bool final; }; template diff --git a/include/snn/neural_network/optimizer/L2Regularization.hpp b/include/snn/neural_network/optimizer/L2Regularization.hpp index dc0efecc..ac43a649 100644 --- a/include/snn/neural_network/optimizer/L2Regularization.hpp +++ b/include/snn/neural_network/optimizer/L2Regularization.hpp @@ -33,7 +33,6 @@ class L2Regularization final : public LayerOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const LayerOptimizer& optimizer) const -> bool final; - auto operator!=(const LayerOptimizer& optimizer) const -> bool final; }; template diff --git a/include/snn/neural_network/optimizer/LayerOptimizer.hpp b/include/snn/neural_network/optimizer/LayerOptimizer.hpp index aeb7f2da..86680ba0 100644 --- a/include/snn/neural_network/optimizer/LayerOptimizer.hpp +++ b/include/snn/neural_network/optimizer/LayerOptimizer.hpp @@ -32,7 +32,6 @@ class LayerOptimizer [[nodiscard]] virtual auto summary() const -> std::string = 0; virtual auto operator==(const LayerOptimizer& optimizer) const -> bool; - virtual auto operator!=(const LayerOptimizer& optimizer) const -> bool; }; template diff --git a/include/snn/neural_network/optimizer/NeuralNetworkOptimizer.hpp b/include/snn/neural_network/optimizer/NeuralNetworkOptimizer.hpp index 00089ece..b99b33c5 100644 --- a/include/snn/neural_network/optimizer/NeuralNetworkOptimizer.hpp +++ b/include/snn/neural_network/optimizer/NeuralNetworkOptimizer.hpp @@ -32,6 +32,5 @@ class NeuralNetworkOptimizer [[nodiscard]] virtual auto summary() const -> std::string = 0; virtual auto operator==(const NeuralNetworkOptimizer& optimizer) const -> bool; - virtual auto operator!=(const NeuralNetworkOptimizer& optimizer) const -> bool; }; } // namespace snn::internal diff --git a/include/snn/neural_network/optimizer/Softmax.hpp b/include/snn/neural_network/optimizer/Softmax.hpp index dcc14855..b8926a09 100644 --- a/include/snn/neural_network/optimizer/Softmax.hpp +++ b/include/snn/neural_network/optimizer/Softmax.hpp @@ -32,7 +32,6 @@ class Softmax final : public LayerOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const LayerOptimizer& optimizer) const -> bool final; - auto operator!=(const LayerOptimizer& optimizer) const -> bool final; }; template diff --git a/include/snn/neural_network/optimizer/StochasticGradientDescent.hpp b/include/snn/neural_network/optimizer/StochasticGradientDescent.hpp index 4aa6a231..a900be76 100644 --- a/include/snn/neural_network/optimizer/StochasticGradientDescent.hpp +++ b/include/snn/neural_network/optimizer/StochasticGradientDescent.hpp @@ -32,7 +32,6 @@ class StochasticGradientDescent final : public NeuralNetworkOptimizer [[nodiscard]] auto summary() const -> std::string final; auto operator==(const NeuralNetworkOptimizer& optimizer) const -> bool final; - auto operator!=(const NeuralNetworkOptimizer& optimizer) const -> bool final; }; template diff --git a/src/neural_network/NeuralNetwork.cpp b/src/neural_network/NeuralNetwork.cpp index 2d261a82..98ba3f6f 100644 --- a/src/neural_network/NeuralNetwork.cpp +++ b/src/neural_network/NeuralNetwork.cpp @@ -233,6 +233,4 @@ auto NeuralNetwork::operator==(const NeuralNetwork& neuralNetwork) const -> bool return true; }(); } - -auto NeuralNetwork::operator!=(const NeuralNetwork& neuralNetwork) const -> bool { return !(*this == neuralNetwork); } } // namespace snn::internal diff --git a/src/neural_network/StatisticAnalysis.cpp b/src/neural_network/StatisticAnalysis.cpp index 51f69c31..b76798d0 100644 --- a/src/neural_network/StatisticAnalysis.cpp +++ b/src/neural_network/StatisticAnalysis.cpp @@ -290,25 +290,4 @@ auto StatisticAnalysis::getF1ScoreMax() const -> float { return this->f1ScoreMax auto StatisticAnalysis::getMeanAbsoluteErrorMin() const -> float { return this->meanAbsoluteErrorMin; } auto StatisticAnalysis::getRootMeanSquaredErrorMin() const -> float { return this->rootMeanSquaredErrorMin; } - -auto StatisticAnalysis::operator==(const StatisticAnalysis& other) const -> bool -{ - return this->clusters == other.clusters && this->numberOfDataWellClassified == other.numberOfDataWellClassified && - this->numberOfDataMisclassified == other.numberOfDataMisclassified && - this->globalClusteringRate == other.globalClusteringRate && - this->weightedClusteringRate == other.weightedClusteringRate && this->f1Score == other.f1Score && - this->meanAbsoluteError == other.meanAbsoluteError && - this->rootMeanSquaredError == other.rootMeanSquaredError && - this->globalClusteringRateMax == other.globalClusteringRateMax && - this->weightedClusteringRateMax == other.weightedClusteringRateMax && this->f1ScoreMax == other.f1ScoreMax && - this->meanAbsoluteErrorMin == other.meanAbsoluteErrorMin && - this->rootMeanSquaredErrorMin == other.rootMeanSquaredErrorMin && - this->globalClusteringRateIsBetterThanMax == other.globalClusteringRateIsBetterThanMax && - this->weightedClusteringRateIsBetterThanMax == other.weightedClusteringRateIsBetterThanMax && - this->f1ScoreIsBetterThanMax == other.f1ScoreIsBetterThanMax && - this->meanAbsoluteErrorIsBetterThanMin == other.meanAbsoluteErrorIsBetterThanMin && - this->rootMeanSquaredErrorIsBetterThanMin == other.rootMeanSquaredErrorIsBetterThanMin; -} - -auto StatisticAnalysis::operator!=(const StatisticAnalysis& other) const -> bool { return !(*this == other); } } // namespace snn::internal diff --git a/src/neural_network/StraightforwardNeuralNetwork.cpp b/src/neural_network/StraightforwardNeuralNetwork.cpp index 91c613c1..6ab0ef78 100644 --- a/src/neural_network/StraightforwardNeuralNetwork.cpp +++ b/src/neural_network/StraightforwardNeuralNetwork.cpp @@ -377,9 +377,4 @@ auto StraightforwardNeuralNetwork::operator==(const StraightforwardNeuralNetwork this->isIdle == neuralNetwork.isIdle && this->epoch == neuralNetwork.epoch && this->numberOfTrainingsBetweenTwoEvaluations == neuralNetwork.numberOfTrainingsBetweenTwoEvaluations; } - -auto StraightforwardNeuralNetwork::operator!=(const StraightforwardNeuralNetwork& neuralNetwork) const -> bool -{ - return !(*this == neuralNetwork); -} } // namespace snn diff --git a/src/neural_network/layer/Convolution1D.cpp b/src/neural_network/layer/Convolution1D.cpp index 69aa698b..31c31ff8 100644 --- a/src/neural_network/layer/Convolution1D.cpp +++ b/src/neural_network/layer/Convolution1D.cpp @@ -94,6 +94,4 @@ inline auto Convolution1D::operator==(const BaseLayer& layer) const -> bool { return this->FilterLayer::operator==(layer); } - -inline auto Convolution1D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/Convolution2D.cpp b/src/neural_network/layer/Convolution2D.cpp index 80c3b478..b56aa20e 100644 --- a/src/neural_network/layer/Convolution2D.cpp +++ b/src/neural_network/layer/Convolution2D.cpp @@ -114,6 +114,4 @@ inline auto Convolution2D::operator==(const BaseLayer& layer) const -> bool return false; } } - -inline auto Convolution2D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/FilterLayer.cpp b/src/neural_network/layer/FilterLayer.cpp index 70d7faab..28e8766a 100644 --- a/src/neural_network/layer/FilterLayer.cpp +++ b/src/neural_network/layer/FilterLayer.cpp @@ -57,6 +57,4 @@ auto FilterLayer::operator==(const BaseLayer& layer) const -> bool return false; } } - -auto FilterLayer::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/LocallyConnected1D.cpp b/src/neural_network/layer/LocallyConnected1D.cpp index af722127..78e63d43 100644 --- a/src/neural_network/layer/LocallyConnected1D.cpp +++ b/src/neural_network/layer/LocallyConnected1D.cpp @@ -154,6 +154,4 @@ inline auto LocallyConnected1D::operator==(const BaseLayer& layer) const -> bool { return this->FilterLayer::operator==(layer); } - -inline auto LocallyConnected1D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/LocallyConnected2D.cpp b/src/neural_network/layer/LocallyConnected2D.cpp index 2f6d25a4..37caac78 100644 --- a/src/neural_network/layer/LocallyConnected2D.cpp +++ b/src/neural_network/layer/LocallyConnected2D.cpp @@ -171,6 +171,4 @@ inline auto LocallyConnected2D::operator==(const BaseLayer& layer) const -> bool return false; } } - -inline auto LocallyConnected2D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/MaxPooling1D.cpp b/src/neural_network/layer/MaxPooling1D.cpp index 51b5d2b1..41c222d0 100644 --- a/src/neural_network/layer/MaxPooling1D.cpp +++ b/src/neural_network/layer/MaxPooling1D.cpp @@ -135,6 +135,4 @@ inline auto MaxPooling1D::operator==(const BaseLayer& layer) const -> bool return false; } } - -inline auto MaxPooling1D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/MaxPooling2D.cpp b/src/neural_network/layer/MaxPooling2D.cpp index ba2c1c3d..68ae0237 100644 --- a/src/neural_network/layer/MaxPooling2D.cpp +++ b/src/neural_network/layer/MaxPooling2D.cpp @@ -146,6 +146,4 @@ inline auto MaxPooling2D::operator==(const BaseLayer& layer) const -> bool return false; } } - -inline auto MaxPooling2D::operator!=(const BaseLayer& layer) const -> bool { return !(*this == layer); } } // namespace snn::internal diff --git a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp index d5cbf162..02c099ba 100644 --- a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp +++ b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp @@ -144,6 +144,4 @@ auto GatedRecurrentUnit::operator==(const GatedRecurrentUnit& neuron) const -> b this->resetGate == neuron.resetGate && this->updateGate == neuron.updateGate && this->outputGate == neuron.outputGate; } - -auto GatedRecurrentUnit::operator!=(const GatedRecurrentUnit& neuron) const -> bool { return !(*this == neuron); } -} // namespace snn::internal \ No newline at end of file +} // namespace snn::internal diff --git a/src/neural_network/layer/neuron/Neuron.cpp b/src/neural_network/layer/neuron/Neuron.cpp index c474f121..0af64423 100644 --- a/src/neural_network/layer/neuron/Neuron.cpp +++ b/src/neural_network/layer/neuron/Neuron.cpp @@ -93,6 +93,4 @@ auto Neuron::operator==(const Neuron& neuron) const -> bool this->activationFunction == neuron.activationFunction && this->outputFunction == neuron.outputFunction && *this->optimizer == *neuron.optimizer; } - -auto Neuron::operator!=(const Neuron& Neuron) const -> bool { return !(*this == Neuron); } } // namespace snn::internal diff --git a/src/neural_network/layer/neuron/RecurrentNeuron.cpp b/src/neural_network/layer/neuron/RecurrentNeuron.cpp index 6fa98514..29cf257f 100644 --- a/src/neural_network/layer/neuron/RecurrentNeuron.cpp +++ b/src/neural_network/layer/neuron/RecurrentNeuron.cpp @@ -86,13 +86,4 @@ void RecurrentNeuron::resetLearningVariables() this->recurrentError = 0; this->previousSum = 0; } - -auto RecurrentNeuron::operator==(const RecurrentNeuron& neuron) const -> bool -{ - return this->Neuron::operator==(neuron) && this->lastOutput == neuron.lastOutput && - this->previousOutput == neuron.previousOutput && this->recurrentError == neuron.recurrentError && - this->previousSum == neuron.previousSum; -} - -auto RecurrentNeuron::operator!=(const RecurrentNeuron& neuron) const -> bool { return !(*this == neuron); } -} // namespace snn::internal \ No newline at end of file +} // namespace snn::internal diff --git a/src/neural_network/layer/neuron/SimpleNeuron.cpp b/src/neural_network/layer/neuron/SimpleNeuron.cpp index 34aadff4..31fe644f 100644 --- a/src/neural_network/layer/neuron/SimpleNeuron.cpp +++ b/src/neural_network/layer/neuron/SimpleNeuron.cpp @@ -52,6 +52,4 @@ void SimpleNeuron::train() { this->optimizer->updateWeights(*this); } auto SimpleNeuron::isValid() const -> errorType { return this->Neuron::isValid(); } auto SimpleNeuron::operator==(const SimpleNeuron& neuron) const -> bool { return this->Neuron::operator==(neuron); } - -auto SimpleNeuron::operator!=(const SimpleNeuron& neuron) const -> bool { return !(*this == neuron); } } // namespace snn::internal diff --git a/src/neural_network/layer/neuron/activation_function/ActivationFunction.cpp b/src/neural_network/layer/neuron/activation_function/ActivationFunction.cpp index 4c83145e..aa972228 100644 --- a/src/neural_network/layer/neuron/activation_function/ActivationFunction.cpp +++ b/src/neural_network/layer/neuron/activation_function/ActivationFunction.cpp @@ -50,9 +50,4 @@ auto ActivationFunction::operator==(const ActivationFunction& activationFunction { return this->getType() == activationFunction.getType(); } - -auto ActivationFunction::operator!=(const ActivationFunction& activationFunction) const -> bool -{ - return !this->operator==(activationFunction); -} } // namespace snn::internal diff --git a/src/neural_network/optimizer/Dropout.cpp b/src/neural_network/optimizer/Dropout.cpp index d56f31a8..47f64727 100644 --- a/src/neural_network/optimizer/Dropout.cpp +++ b/src/neural_network/optimizer/Dropout.cpp @@ -76,6 +76,4 @@ auto Dropout::operator==(const LayerOptimizer& optimizer) const -> bool return false; } } - -auto Dropout::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } } // namespace snn::internal diff --git a/src/neural_network/optimizer/ErrorMultiplier.cpp b/src/neural_network/optimizer/ErrorMultiplier.cpp index 31471d8e..1037ffc1 100644 --- a/src/neural_network/optimizer/ErrorMultiplier.cpp +++ b/src/neural_network/optimizer/ErrorMultiplier.cpp @@ -46,6 +46,4 @@ auto ErrorMultiplier::operator==(const LayerOptimizer& optimizer) const -> bool return false; } } - -auto ErrorMultiplier::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } } // namespace snn::internal diff --git a/src/neural_network/optimizer/L1Regularization.cpp b/src/neural_network/optimizer/L1Regularization.cpp index fab48cfe..c97e2307 100644 --- a/src/neural_network/optimizer/L1Regularization.cpp +++ b/src/neural_network/optimizer/L1Regularization.cpp @@ -49,6 +49,4 @@ auto L1Regularization::operator==(const LayerOptimizer& optimizer) const -> bool return false; } } - -auto L1Regularization::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } } // namespace snn::internal diff --git a/src/neural_network/optimizer/L2Regularization.cpp b/src/neural_network/optimizer/L2Regularization.cpp index 05073553..4163a612 100644 --- a/src/neural_network/optimizer/L2Regularization.cpp +++ b/src/neural_network/optimizer/L2Regularization.cpp @@ -49,6 +49,4 @@ auto L2Regularization::operator==(const LayerOptimizer& optimizer) const -> bool return false; } } - -auto L2Regularization::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } -} // namespace snn::internal \ No newline at end of file +} // namespace snn::internal diff --git a/src/neural_network/optimizer/LayerOptimizer.cpp b/src/neural_network/optimizer/LayerOptimizer.cpp index 79ab2e29..a1082757 100644 --- a/src/neural_network/optimizer/LayerOptimizer.cpp +++ b/src/neural_network/optimizer/LayerOptimizer.cpp @@ -14,6 +14,4 @@ auto LayerOptimizer::operator==(const LayerOptimizer& optimizer) const -> bool return typeid(*this).hash_code() == typeid(optimizer).hash_code() && typeid(this->layer).hash_code() == typeid(optimizer.layer).hash_code(); } - -auto LayerOptimizer::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } } // namespace snn::internal diff --git a/src/neural_network/optimizer/NeuralNetworkOptimizer.cpp b/src/neural_network/optimizer/NeuralNetworkOptimizer.cpp index ab706355..c6d07363 100644 --- a/src/neural_network/optimizer/NeuralNetworkOptimizer.cpp +++ b/src/neural_network/optimizer/NeuralNetworkOptimizer.cpp @@ -8,9 +8,4 @@ auto NeuralNetworkOptimizer::operator==(const NeuralNetworkOptimizer& optimizer) { return typeid(*this).hash_code() == typeid(optimizer).hash_code(); } - -auto NeuralNetworkOptimizer::operator!=(const NeuralNetworkOptimizer& optimizer) const -> bool -{ - return !(*this == optimizer); -} } // namespace snn::internal diff --git a/src/neural_network/optimizer/Softmax.cpp b/src/neural_network/optimizer/Softmax.cpp index 7b79a0ea..95f2d74d 100644 --- a/src/neural_network/optimizer/Softmax.cpp +++ b/src/neural_network/optimizer/Softmax.cpp @@ -71,6 +71,4 @@ auto Softmax::operator==(const LayerOptimizer& optimizer) const -> bool return false; } } - -auto Softmax::operator!=(const LayerOptimizer& optimizer) const -> bool { return !(*this == optimizer); } } // namespace snn::internal diff --git a/src/neural_network/optimizer/StochasticGradientDescent.cpp b/src/neural_network/optimizer/StochasticGradientDescent.cpp index 7b73a220..648da6e5 100644 --- a/src/neural_network/optimizer/StochasticGradientDescent.cpp +++ b/src/neural_network/optimizer/StochasticGradientDescent.cpp @@ -107,9 +107,4 @@ auto StochasticGradientDescent::operator==(const NeuralNetworkOptimizer& optimiz return false; } } - -auto StochasticGradientDescent::operator!=(const NeuralNetworkOptimizer& optimizer) const -> bool -{ - return !(*this == optimizer); -} } // namespace snn::internal From 5dd28281cad868ad8ea87442313e032a9c0aac00 Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Mon, 18 Aug 2025 22:34:32 +0200 Subject: [PATCH 4/6] Cannot serialize Circular --- .../neural_network/layer/neuron/Circular.hpp | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/include/snn/neural_network/layer/neuron/Circular.hpp b/include/snn/neural_network/layer/neuron/Circular.hpp index b1f95d72..798913de 100644 --- a/include/snn/neural_network/layer/neuron/Circular.hpp +++ b/include/snn/neural_network/layer/neuron/Circular.hpp @@ -1,7 +1,4 @@ #pragma once -#include -#include -#include #include namespace snn::internal @@ -11,10 +8,6 @@ class Circular final { private: friend class Circular>; - friend class boost::serialization::access; - template - void serialize(Archive& archive, uint32_t version); - std::vector queue; size_t indexPush = 0; size_t indexGet = 0; @@ -40,16 +33,6 @@ class Circular final auto operator<=>(const Circular& other) const = default; }; -template -template -void Circular::serialize(Archive& archive, [[maybe_unused]] const uint32_t version) -{ - archive & queue; - archive & indexGet; - archive & indexPush; - archive & divider; -} - template <> void Circular::initialize(size_t size, size_t dataSize); From c588464d16f4cc505566bc9e58209e88835e1160 Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Tue, 19 Aug 2025 00:14:57 +0200 Subject: [PATCH 5/6] Rename neuron::batchSize to numberOfUses --- .../snn/neural_network/layer/LayerFactory.hpp | 25 ++++++++----------- .../neural_network/layer/neuron/Neuron.hpp | 4 +-- .../layer/neuron/NeuronModel.hpp | 2 +- src/neural_network/layer/LayerFactory.cpp | 14 +++++------ .../layer/neuron/GatedRecurrentUnit.cpp | 6 ++--- src/neural_network/layer/neuron/Neuron.cpp | 8 +++--- 6 files changed, 28 insertions(+), 31 deletions(-) diff --git a/include/snn/neural_network/layer/LayerFactory.hpp b/include/snn/neural_network/layer/LayerFactory.hpp index e32fbf45..c87a779c 100644 --- a/include/snn/neural_network/layer/LayerFactory.hpp +++ b/include/snn/neural_network/layer/LayerFactory.hpp @@ -18,7 +18,7 @@ extern auto Input(TInt... sizeOfInput) -> LayerModel .numberOfNeurons = 0, .numberOfOutputs = 0, .neuron = {.numberOfInputs = 0, - .batchSize = 0, + .numberOfUses = 0, .numberOfWeights = 0, .bias = 0, .activationFunction = activation::identity}, @@ -40,7 +40,7 @@ auto FullyConnected(int numberOfNeurons, activation activation = activation::sig .numberOfNeurons = numberOfNeurons, .numberOfOutputs = -1, .neuron = {.numberOfInputs = -1, - .batchSize = -1, + .numberOfUses = -1, .numberOfWeights = -1, .bias = 1.0F, .activationFunction = activation}, @@ -61,7 +61,7 @@ auto Recurrence(int numberOfNeurons, activation activation = activation::tanh, T .numberOfNeurons = numberOfNeurons, .numberOfOutputs = -1, .neuron = {.numberOfInputs = -1, - .batchSize = -1, + .numberOfUses = -1, .numberOfWeights = -1, .bias = 1.0F, .activationFunction = activation}, @@ -84,7 +84,7 @@ auto GruLayer(int numberOfNeurons, TOptimizer... optimizers) -> LayerModel .neuron = { .numberOfInputs = -1, - .batchSize = -1, + .numberOfUses = -1, .numberOfWeights = -1, .bias = 1.0F, .activationFunction = activation::tanh, @@ -106,7 +106,7 @@ auto MaxPooling(int kernelSize) -> LayerModel .numberOfNeurons = 0, .numberOfOutputs = -1, .neuron = {.numberOfInputs = 0, - .batchSize = 0, + .numberOfUses = 0, .numberOfWeights = 0, .bias = 0.0F, .activationFunction = activation::identity}, @@ -128,7 +128,7 @@ auto LocallyConnected(int numberOfLocallyConnected, int kernelSize, activation a .numberOfNeurons = -1, .numberOfOutputs = -1, .neuron = {.numberOfInputs = -1, - .batchSize = -1, + .numberOfUses = -1, .numberOfWeights = -1, .bias = almostZero, .activationFunction = activation}, @@ -149,14 +149,11 @@ auto Convolution(int numberOfConvolution, int kernelSize, activation activation .numberOfInputs = -1, .numberOfNeurons = 1, .numberOfOutputs = -1, - .neuron = - { - .numberOfInputs = -1, - .batchSize = -1, - .numberOfWeights = -1, - .bias = bias, - .activationFunction = activation, - }, + .neuron = {.numberOfInputs = -1, + .numberOfUses = -1, + .numberOfWeights = -1, + .bias = bias, + .activationFunction = activation}, .numberOfFilters = numberOfConvolution, .numberOfKernels = -1, .numberOfKernelsPerFilter = -1, diff --git a/include/snn/neural_network/layer/neuron/Neuron.hpp b/include/snn/neural_network/layer/neuron/Neuron.hpp index 2a7a105a..1594eb3a 100644 --- a/include/snn/neural_network/layer/neuron/Neuron.hpp +++ b/include/snn/neural_network/layer/neuron/Neuron.hpp @@ -18,7 +18,7 @@ class Neuron protected: int numberOfInputs{}; - int batchSize{}; + int numberOfUses{}; // Represents the number of times the neuron is used for one output of a layer. std::vector weights; float bias{}; @@ -65,7 +65,7 @@ void Neuron::serialize(Archive& archive, [[maybe_unused]] const uint32_t version archive.template register_type(); archive& this->optimizer; archive& this->numberOfInputs; - archive& this->batchSize; + archive& this->numberOfUses; archive& this->weights; archive& this->bias; archive& this->activationFunction; diff --git a/include/snn/neural_network/layer/neuron/NeuronModel.hpp b/include/snn/neural_network/layer/neuron/NeuronModel.hpp index 7611bd43..54e5450b 100644 --- a/include/snn/neural_network/layer/neuron/NeuronModel.hpp +++ b/include/snn/neural_network/layer/neuron/NeuronModel.hpp @@ -6,7 +6,7 @@ namespace snn struct NeuronModel { int numberOfInputs = -1; - int batchSize = -1; + int numberOfUses = -1; int numberOfWeights = -1; float bias = 1.0F; activation activationFunction{}; diff --git a/src/neural_network/layer/LayerFactory.cpp b/src/neural_network/layer/LayerFactory.cpp index 43783b08..e62cf96b 100644 --- a/src/neural_network/layer/LayerFactory.cpp +++ b/src/neural_network/layer/LayerFactory.cpp @@ -97,21 +97,21 @@ inline auto LayerFactory::build(LayerModel& model, std::vector& shapeOfInpu throw InvalidArchitectureException("Input of layer has size of 0."); } model.neuron.numberOfInputs = model.numberOfInputs; - model.neuron.batchSize = 1; + model.neuron.numberOfUses = 1; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 1; // for the bias model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); case recurrence: model.neuron.numberOfInputs = model.numberOfInputs; - model.neuron.batchSize = 1; + model.neuron.numberOfUses = 1; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 2; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); case gruLayer: model.neuron.numberOfInputs = model.numberOfInputs; - model.neuron.batchSize = 1; + model.neuron.numberOfUses = 1; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 2; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); @@ -168,7 +168,7 @@ inline auto LayerFactory::build(LayerModel& model, std::vector& shapeOfInpu model.numberOfKernels = model.numberOfNeurons; model.numberOfKernelsPerFilter = model.numberOfKernels / model.numberOfFilters; model.neuron.numberOfInputs = model.kernelSize * model.shapeOfInput[C]; - model.neuron.batchSize = 1; + model.neuron.numberOfUses = 1; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 1; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); @@ -185,7 +185,7 @@ inline auto LayerFactory::build(LayerModel& model, std::vector& shapeOfInpu model.numberOfKernels = model.numberOfNeurons; model.numberOfKernelsPerFilter = model.numberOfKernels / model.numberOfFilters; model.neuron.numberOfInputs = model.kernelSize * model.kernelSize * model.shapeOfInput[C]; - model.neuron.batchSize = 1; + model.neuron.numberOfUses = 1; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 1; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); @@ -213,7 +213,7 @@ inline auto LayerFactory::build(LayerModel& model, std::vector& shapeOfInpu computeNumberOfKernelsForConvolution1D(model.numberOfFilters, model.shapeOfInput); model.numberOfKernelsPerFilter = model.numberOfKernels / model.numberOfFilters; model.neuron.numberOfInputs = model.kernelSize * model.shapeOfInput[C]; - model.neuron.batchSize = model.numberOfKernelsPerFilter; + model.neuron.numberOfUses = model.numberOfKernelsPerFilter; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 1; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); @@ -230,7 +230,7 @@ inline auto LayerFactory::build(LayerModel& model, std::vector& shapeOfInpu computeNumberOfKernelsForConvolution2D(model.numberOfFilters, model.shapeOfInput); model.numberOfKernelsPerFilter = model.numberOfKernels / model.numberOfFilters; model.neuron.numberOfInputs = model.kernelSize * model.kernelSize * model.shapeOfInput[C]; - model.neuron.batchSize = model.numberOfKernelsPerFilter; + model.neuron.numberOfUses = model.numberOfKernelsPerFilter; model.neuron.numberOfWeights = model.neuron.numberOfInputs + 1; model.numberOfOutputs = model.numberOfNeurons; return std::make_unique(model, optimizer); diff --git a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp index 02c099ba..9fe89f42 100644 --- a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp +++ b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp @@ -7,11 +7,11 @@ namespace snn::internal { GatedRecurrentUnit::GatedRecurrentUnit(NeuronModel model, std::shared_ptr optimizer) : numberOfInputs(model.numberOfInputs), - resetGate({model.numberOfInputs, model.batchSize, model.numberOfWeights, model.bias, activation::sigmoid}, + resetGate({model.numberOfInputs, model.numberOfUses, model.numberOfWeights, model.bias, activation::sigmoid}, optimizer), - updateGate({model.numberOfInputs, model.batchSize, model.numberOfWeights, model.bias, activation::sigmoid}, + updateGate({model.numberOfInputs, model.numberOfUses, model.numberOfWeights, model.bias, activation::sigmoid}, optimizer), - outputGate({model.numberOfInputs, model.batchSize, model.numberOfWeights, model.bias, activation::tanh}, + outputGate({model.numberOfInputs, model.numberOfUses, model.numberOfWeights, model.bias, activation::tanh}, optimizer) { } diff --git a/src/neural_network/layer/neuron/Neuron.cpp b/src/neural_network/layer/neuron/Neuron.cpp index 0af64423..e4da9a78 100644 --- a/src/neural_network/layer/neuron/Neuron.cpp +++ b/src/neural_network/layer/neuron/Neuron.cpp @@ -8,7 +8,7 @@ namespace snn::internal { Neuron::Neuron(NeuronModel model, std::shared_ptr optimizer) : numberOfInputs(model.numberOfInputs), - batchSize(model.batchSize), + numberOfUses(model.numberOfUses), bias(model.bias), activationFunction(model.activationFunction), optimizer(std::move(optimizer)) @@ -79,9 +79,9 @@ void Neuron::resetLearningVariables() { this->deltaWeights.assign(this->weights.size(), 0.0F); this->errors.assign(this->numberOfInputs, 0.0F); - this->lastInputs.initialize(this->batchSize, this->numberOfInputs); - this->lastError.initialize(this->batchSize); - this->lastSum.initialize(this->batchSize); + this->lastInputs.initialize(this->numberOfUses, this->numberOfInputs); + this->lastError.initialize(this->numberOfUses); + this->lastSum.initialize(this->numberOfUses); } auto Neuron::operator==(const Neuron& neuron) const -> bool From cdfeb27c1b7e3adfce2faf91cdb5ccdec99155aa Mon Sep 17 00:00:00 2001 From: Matthieu HERNANDEZ Date: Thu, 21 Aug 2025 00:22:09 +0200 Subject: [PATCH 6/6] Add batchSize to resetLearningVariables --- include/snn/neural_network/layer/BaseLayer.hpp | 2 +- include/snn/neural_network/layer/Layer.hpp | 2 +- include/snn/neural_network/layer/Layer.tpp | 4 ++-- .../snn/neural_network/layer/neuron/Circular.hpp | 4 ++-- .../layer/neuron/GatedRecurrentUnit.hpp | 2 +- .../layer/neuron/LearningObject.hpp | 12 ++++++++++-- .../snn/neural_network/layer/neuron/Neuron.hpp | 2 +- .../layer/neuron/RecurrentNeuron.hpp | 2 +- src/neural_network/NeuralNetwork.cpp | 2 +- src/neural_network/layer/neuron/Circular.cpp | 12 ++++++------ .../layer/neuron/GatedRecurrentUnit.cpp | 8 ++++---- src/neural_network/layer/neuron/Neuron.cpp | 15 ++++++++++----- .../layer/neuron/RecurrentNeuron.cpp | 8 ++++++-- 13 files changed, 46 insertions(+), 29 deletions(-) diff --git a/include/snn/neural_network/layer/BaseLayer.hpp b/include/snn/neural_network/layer/BaseLayer.hpp index cbe66bf6..75254960 100644 --- a/include/snn/neural_network/layer/BaseLayer.hpp +++ b/include/snn/neural_network/layer/BaseLayer.hpp @@ -47,7 +47,7 @@ class BaseLayer [[nodiscard]] virtual auto isValid() const -> errorType = 0; - virtual void resetLearningVariables() = 0; + virtual void resetLearningVariables(int batchSize) = 0; [[nodiscard]] virtual auto summary() const -> std::string = 0; diff --git a/include/snn/neural_network/layer/Layer.hpp b/include/snn/neural_network/layer/Layer.hpp index 0dd01450..12010732 100644 --- a/include/snn/neural_network/layer/Layer.hpp +++ b/include/snn/neural_network/layer/Layer.hpp @@ -68,7 +68,7 @@ class Layer : public BaseLayer [[nodiscard]] auto isValid() const -> errorType override; - void resetLearningVariables() final; + void resetLearningVariables(int batchSize) final; auto operator==(const BaseLayer& layer) const -> bool override; }; diff --git a/include/snn/neural_network/layer/Layer.tpp b/include/snn/neural_network/layer/Layer.tpp index 61185cb2..ef225024 100644 --- a/include/snn/neural_network/layer/Layer.tpp +++ b/include/snn/neural_network/layer/Layer.tpp @@ -151,11 +151,11 @@ auto Layer::getNumberOfParameters() const -> int } template -void Layer::resetLearningVariables() +void Layer::resetLearningVariables(int batchSize) { for (auto& neuron : this->neurons) { - neuron.resetLearningVariables(); + neuron.resetLearningVariables(batchSize); } } diff --git a/include/snn/neural_network/layer/neuron/Circular.hpp b/include/snn/neural_network/layer/neuron/Circular.hpp index 798913de..8c7074c0 100644 --- a/include/snn/neural_network/layer/neuron/Circular.hpp +++ b/include/snn/neural_network/layer/neuron/Circular.hpp @@ -34,10 +34,10 @@ class Circular final }; template <> -void Circular::initialize(size_t size, size_t dataSize); +void Circular::initialize(size_t queueSize, size_t dataSize); template <> -void Circular>::initialize(size_t size, size_t dataSize); +void Circular>::initialize(size_t queueSize, size_t dataSize); template <> void Circular::reset(); diff --git a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp index 364cc41f..bac57196 100644 --- a/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp +++ b/include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp @@ -50,7 +50,7 @@ class GatedRecurrentUnit final [[nodiscard]] auto getOptimizer() const -> NeuralNetworkOptimizer*; void setOptimizer(std::shared_ptr newOptimizer); - void resetLearningVariables(); + void resetLearningVariables(int batchSize); auto operator==(const GatedRecurrentUnit& neuron) const -> bool; }; diff --git a/include/snn/neural_network/layer/neuron/LearningObject.hpp b/include/snn/neural_network/layer/neuron/LearningObject.hpp index 29754c6a..e4a9eb62 100644 --- a/include/snn/neural_network/layer/neuron/LearningObject.hpp +++ b/include/snn/neural_network/layer/neuron/LearningObject.hpp @@ -1,6 +1,14 @@ #include template -concept LearningObject = requires(T t) { +concept WithoutBatchSize = requires(T t) { { t.resetLearningVariables() } -> std::same_as; -}; \ No newline at end of file +}; + +template +concept WithBatchSize = requires(T t, int batchSize) { + { t.resetLearningVariables(batchSize) } -> std::same_as; +}; + +template +concept LearningObject = WithoutBatchSize || WithBatchSize; \ No newline at end of file diff --git a/include/snn/neural_network/layer/neuron/Neuron.hpp b/include/snn/neural_network/layer/neuron/Neuron.hpp index 1594eb3a..0ef79d99 100644 --- a/include/snn/neural_network/layer/neuron/Neuron.hpp +++ b/include/snn/neural_network/layer/neuron/Neuron.hpp @@ -54,7 +54,7 @@ class Neuron [[nodiscard]] auto getOptimizer() const -> NeuralNetworkOptimizer*; void setOptimizer(std::shared_ptr newOptimizer); - void resetLearningVariables(); + void resetLearningVariables(int batchSize); auto operator==(const Neuron& neuron) const -> bool; }; diff --git a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp index e1dc8a37..f5db8645 100644 --- a/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp +++ b/include/snn/neural_network/layer/neuron/RecurrentNeuron.hpp @@ -36,7 +36,7 @@ class RecurrentNeuron final : public Neuron [[nodiscard]] auto isValid() const -> errorType; - void resetLearningVariables(); + void resetLearningVariables(int batchSize); auto operator==(const RecurrentNeuron& neuron) const -> bool = default; }; diff --git a/src/neural_network/NeuralNetwork.cpp b/src/neural_network/NeuralNetwork.cpp index 98ba3f6f..83b84f63 100644 --- a/src/neural_network/NeuralNetwork.cpp +++ b/src/neural_network/NeuralNetwork.cpp @@ -214,7 +214,7 @@ void NeuralNetwork::resetLearningVariables() { for (const auto& layer : this->layers) { - layer->resetLearningVariables(); + layer->resetLearningVariables(1); } } diff --git a/src/neural_network/layer/neuron/Circular.cpp b/src/neural_network/layer/neuron/Circular.cpp index bb578788..d473d0cc 100644 --- a/src/neural_network/layer/neuron/Circular.cpp +++ b/src/neural_network/layer/neuron/Circular.cpp @@ -10,21 +10,21 @@ template class Circular; template class Circular>; template <> -void Circular::initialize(const size_t size, [[maybe_unused]] const size_t dataSize) +void Circular::initialize(const size_t queueSize, [[maybe_unused]] const size_t dataSize) { assert(dataSize == 1); this->indexPush = 0; this->indexGet = 0; - this->divider = static_cast(size); - this->queue.assign(size, 0.0F); + this->divider = static_cast(queueSize); + this->queue.assign(queueSize, 0.0F); } template <> -void Circular>::initialize(const size_t size, const size_t dataSize) +void Circular>::initialize(const size_t queueSize, const size_t dataSize) { this->indexPush = 0; this->indexGet = 0; - this->divider = static_cast(size); - this->queue.resize(size); + this->divider = static_cast(queueSize); + this->queue.resize(queueSize); for (auto& d : this->queue) { d = std::vector(dataSize, 0.0F); diff --git a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp index 9fe89f42..9d7e6002 100644 --- a/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp +++ b/src/neural_network/layer/neuron/GatedRecurrentUnit.cpp @@ -129,12 +129,12 @@ void GatedRecurrentUnit::setOptimizer(std::shared_ptr ne this->outputGate.setOptimizer(newOptimizer); } -void GatedRecurrentUnit::resetLearningVariables() +void GatedRecurrentUnit::resetLearningVariables(int batchSize) { this->reset(); - this->resetGate.resetLearningVariables(); - this->updateGate.resetLearningVariables(); - this->outputGate.resetLearningVariables(); + this->resetGate.resetLearningVariables(batchSize); + this->updateGate.resetLearningVariables(batchSize); + this->outputGate.resetLearningVariables(batchSize); } auto GatedRecurrentUnit::operator==(const GatedRecurrentUnit& neuron) const -> bool diff --git a/src/neural_network/layer/neuron/Neuron.cpp b/src/neural_network/layer/neuron/Neuron.cpp index e4da9a78..c5bea622 100644 --- a/src/neural_network/layer/neuron/Neuron.cpp +++ b/src/neural_network/layer/neuron/Neuron.cpp @@ -21,7 +21,7 @@ Neuron::Neuron(NeuronModel model, std::shared_ptr optimi weight = randomInitializeWeight(model.numberOfWeights); } this->weights.back() = std::abs(this->weights.back()); - this->resetLearningVariables(); + this->resetLearningVariables(1); } auto Neuron::randomInitializeWeight(int numberOfWeights) -> float @@ -75,13 +75,18 @@ void Neuron::setOptimizer(std::shared_ptr newOptimizer) this->optimizer = std::move(newOptimizer); } -void Neuron::resetLearningVariables() +void Neuron::resetLearningVariables(int batchSize) { + if (batchSize < 1) + { + throw std::invalid_argument("The batch size must be at least 1."); + } this->deltaWeights.assign(this->weights.size(), 0.0F); this->errors.assign(this->numberOfInputs, 0.0F); - this->lastInputs.initialize(this->numberOfUses, this->numberOfInputs); - this->lastError.initialize(this->numberOfUses); - this->lastSum.initialize(this->numberOfUses); + const auto sizeOfCircular = this->numberOfUses * batchSize; + this->lastInputs.initialize(sizeOfCircular, this->numberOfInputs); + this->lastError.initialize(sizeOfCircular); + this->lastSum.initialize(sizeOfCircular); } auto Neuron::operator==(const Neuron& neuron) const -> bool diff --git a/src/neural_network/layer/neuron/RecurrentNeuron.cpp b/src/neural_network/layer/neuron/RecurrentNeuron.cpp index 29cf257f..edc627e1 100644 --- a/src/neural_network/layer/neuron/RecurrentNeuron.cpp +++ b/src/neural_network/layer/neuron/RecurrentNeuron.cpp @@ -78,9 +78,13 @@ auto RecurrentNeuron::isValid() const -> errorType return this->Neuron::isValid(); } -void RecurrentNeuron::resetLearningVariables() +void RecurrentNeuron::resetLearningVariables(int batchSize) { - this->Neuron::resetLearningVariables(); + if (batchSize != 1) + { + throw std::invalid_argument("The batch size should be 1 for reccurent neurons."); + } + this->Neuron::resetLearningVariables(batchSize); this->lastOutput = 0; this->previousOutput = 0; this->recurrentError = 0;