Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .clang-tidy
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
Checks: >
*,
-altera-unroll-loops,
-boost-use-ranges,
-cppcoreguidelines-non-private-member-variables-in-classes,
-fuchsia-default-arguments-calls,
Expand All @@ -19,7 +20,6 @@ Checks: >
-cert*
WarningsAsErrors: >
*,
-altera-unroll-loops,
-altera-id-dependent-backward-branch,
-bugprone-easily-swappable-parameters,
-fuchsia-overloaded-operator,
Expand Down
2 changes: 1 addition & 1 deletion examples/.clang-tidy
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
Checks: >
*,
-altera-unroll-loops,
-boost-use-ranges,
-cppcoreguidelines-non-private-member-variables-in-classes,
-fuchsia-default-arguments-calls,
Expand All @@ -23,7 +24,6 @@ Checks: >
-readability-magic-numbers
WarningsAsErrors: >
*,
-altera-unroll-loops,
-altera-id-dependent-backward-branch,
-bugprone-easily-swappable-parameters,
-fuchsia-overloaded-operator,
Expand Down
4 changes: 3 additions & 1 deletion include/snn/neural_network/NeuralNetwork.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,14 @@ class NeuralNetwork : public StatisticAnalysis

[[nodiscard]] auto isValid() const -> errorType;

void resetLearningVariables();

void trainOnce(const std::vector<float>& inputs, const std::vector<float>& desired,
const std::vector<float>& weighting = {}, bool temporalReset = true);

auto operator==(const NeuralNetwork& neuralNetwork) const -> bool;
auto operator!=(const NeuralNetwork& neuralNetwork) const -> bool;
};
static_assert(LearningObject<NeuralNetwork>);

template <class Archive>
void NeuralNetwork::serialize(Archive& archive, [[maybe_unused]] const uint32_t version)
Expand Down
3 changes: 1 addition & 2 deletions include/snn/neural_network/StatisticAnalysis.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,7 @@ class StatisticAnalysis
[[nodiscard]] auto getMeanAbsoluteErrorMin() const -> float;
[[nodiscard]] auto getRootMeanSquaredErrorMin() const -> float;

auto operator==(const StatisticAnalysis& other) const -> bool;
auto operator!=(const StatisticAnalysis& other) const -> bool;
auto operator==(const StatisticAnalysis& other) const -> bool = default;
};

template <class Archive>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,6 @@ class StraightforwardNeuralNetwork final : public internal::NeuralNetwork
}

auto operator==(const StraightforwardNeuralNetwork& neuralNetwork) const -> bool;
auto operator!=(const StraightforwardNeuralNetwork& neuralNetwork) const -> bool;
};

template <logLevel T>
Expand Down
1 change: 1 addition & 0 deletions include/snn/neural_network/binary_classification.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ struct binaryClassification
archive& this->trueNegative;
archive& this->falsePositive;
archive& this->falseNegative;
archive& this->totalError;
}
};
} // namespace snn::internal
3 changes: 2 additions & 1 deletion include/snn/neural_network/layer/BaseLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,10 @@ class BaseLayer

[[nodiscard]] virtual auto isValid() const -> errorType = 0;

virtual void resetLearningVariables(int batchSize) = 0;

[[nodiscard]] virtual auto summary() const -> std::string = 0;

virtual auto operator==(const BaseLayer& layer) const -> bool = 0;
virtual auto operator!=(const BaseLayer& layer) const -> bool = 0;
};
} // namespace snn::internal
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/Convolution1D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ class Convolution1D final : public Convolution
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/Convolution2D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ class Convolution2D final : public Convolution
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/FilterLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ class FilterLayer : public Layer<SimpleNeuron>
[[nodiscard]] auto isValid() const -> errorType override;

auto operator==(const BaseLayer& layer) const -> bool override;
auto operator!=(const BaseLayer& layer) const -> bool override;
};

template <class Archive>
Expand Down
3 changes: 2 additions & 1 deletion include/snn/neural_network/layer/Layer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -68,8 +68,9 @@ class Layer : public BaseLayer

[[nodiscard]] auto isValid() const -> errorType override;

void resetLearningVariables(int batchSize) final;

auto operator==(const BaseLayer& layer) const -> bool override;
auto operator!=(const BaseLayer& layer) const -> bool override;
};

template <BaseNeuron N>
Expand Down
15 changes: 9 additions & 6 deletions include/snn/neural_network/layer/Layer.tpp
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,15 @@ auto Layer<N>::getNumberOfParameters() const -> int
return sum;
}

template <BaseNeuron N>
void Layer<N>::resetLearningVariables(int batchSize)
{
for (auto& neuron : this->neurons)
{
neuron.resetLearningVariables(batchSize);
}
}

template <BaseNeuron N>
auto Layer<N>::operator==(const BaseLayer& layer) const -> bool
{
Expand All @@ -175,10 +184,4 @@ auto Layer<N>::operator==(const BaseLayer& layer) const -> bool
return false;
}
}

template <BaseNeuron N>
auto Layer<N>::operator!=(const BaseLayer& layer) const -> bool
{
return !(*this == layer);
}
} // namespace snn::internal
25 changes: 11 additions & 14 deletions include/snn/neural_network/layer/LayerFactory.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ extern auto Input(TInt... sizeOfInput) -> LayerModel
.numberOfNeurons = 0,
.numberOfOutputs = 0,
.neuron = {.numberOfInputs = 0,
.batchSize = 0,
.numberOfUses = 0,
.numberOfWeights = 0,
.bias = 0,
.activationFunction = activation::identity},
Expand All @@ -40,7 +40,7 @@ auto FullyConnected(int numberOfNeurons, activation activation = activation::sig
.numberOfNeurons = numberOfNeurons,
.numberOfOutputs = -1,
.neuron = {.numberOfInputs = -1,
.batchSize = -1,
.numberOfUses = -1,
.numberOfWeights = -1,
.bias = 1.0F,
.activationFunction = activation},
Expand All @@ -61,7 +61,7 @@ auto Recurrence(int numberOfNeurons, activation activation = activation::tanh, T
.numberOfNeurons = numberOfNeurons,
.numberOfOutputs = -1,
.neuron = {.numberOfInputs = -1,
.batchSize = -1,
.numberOfUses = -1,
.numberOfWeights = -1,
.bias = 1.0F,
.activationFunction = activation},
Expand All @@ -84,7 +84,7 @@ auto GruLayer(int numberOfNeurons, TOptimizer... optimizers) -> LayerModel
.neuron =
{
.numberOfInputs = -1,
.batchSize = -1,
.numberOfUses = -1,
.numberOfWeights = -1,
.bias = 1.0F,
.activationFunction = activation::tanh,
Expand All @@ -106,7 +106,7 @@ auto MaxPooling(int kernelSize) -> LayerModel
.numberOfNeurons = 0,
.numberOfOutputs = -1,
.neuron = {.numberOfInputs = 0,
.batchSize = 0,
.numberOfUses = 0,
.numberOfWeights = 0,
.bias = 0.0F,
.activationFunction = activation::identity},
Expand All @@ -128,7 +128,7 @@ auto LocallyConnected(int numberOfLocallyConnected, int kernelSize, activation a
.numberOfNeurons = -1,
.numberOfOutputs = -1,
.neuron = {.numberOfInputs = -1,
.batchSize = -1,
.numberOfUses = -1,
.numberOfWeights = -1,
.bias = almostZero,
.activationFunction = activation},
Expand All @@ -149,14 +149,11 @@ auto Convolution(int numberOfConvolution, int kernelSize, activation activation
.numberOfInputs = -1,
.numberOfNeurons = 1,
.numberOfOutputs = -1,
.neuron =
{
.numberOfInputs = -1,
.batchSize = -1,
.numberOfWeights = -1,
.bias = bias,
.activationFunction = activation,
},
.neuron = {.numberOfInputs = -1,
.numberOfUses = -1,
.numberOfWeights = -1,
.bias = bias,
.activationFunction = activation},
.numberOfFilters = numberOfConvolution,
.numberOfKernels = -1,
.numberOfKernelsPerFilter = -1,
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/LocallyConnected1D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ class LocallyConnected1D final : public FilterLayer
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/LocallyConnected2D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ class LocallyConnected2D final : public FilterLayer
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/MaxPooling1D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ class MaxPooling1D final : public FilterLayer
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/MaxPooling2D.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@ class MaxPooling2D final : public FilterLayer
[[nodiscard]] auto summary() const -> std::string final;

auto operator==(const BaseLayer& layer) const -> bool final;
auto operator!=(const BaseLayer& layer) const -> bool final;
};

template <class Archive>
Expand Down
1 change: 0 additions & 1 deletion include/snn/neural_network/layer/SimpleLayer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ class SimpleLayer : public Layer<N>
[[nodiscard]] auto isValid() const -> errorType final;

auto operator==(const BaseLayer& layer) const -> bool override;
auto operator!=(const BaseLayer& layer) const -> bool override;
};

template <BaseNeuron N>
Expand Down
6 changes: 0 additions & 6 deletions include/snn/neural_network/layer/SimpleLayer.tpp
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,4 @@ auto SimpleLayer<N>::operator==(const BaseLayer& layer) const -> bool
{
return Layer<N>::operator==(layer);
}

template <BaseNeuron N>
auto SimpleLayer<N>::operator!=(const BaseLayer& layer) const -> bool
{
return !(*this == layer);
}
} // namespace snn::internal
6 changes: 3 additions & 3 deletions include/snn/neural_network/layer/neuron/BaseNeuron.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#include <vector>

#include "../../optimizer/NeuralNetworkOptimizer.hpp"
#include "LearningObject.hpp"

namespace snn::internal
{
Expand Down Expand Up @@ -32,10 +33,9 @@ concept HasCommonConstMethods = requires(const N neuron) {
{ neuron.getNumberOfInputs() } -> std::same_as<int>;

{ neuron.operator==(neuron) } -> std::same_as<bool>;
{ neuron.operator!=(neuron) } -> std::same_as<bool>;
};

template <class N>
concept BaseNeuron =
HasCommonMethods<N> && HasCommonConstMethods<N> && (HasNonTemporalOuputMethod<N> || HasTemporalOuputMethod<N>);
concept BaseNeuron = HasCommonMethods<N> && HasCommonConstMethods<N> &&
(HasNonTemporalOuputMethod<N> || HasTemporalOuputMethod<N>) && LearningObject<N>;
} // namespace snn::internal
26 changes: 8 additions & 18 deletions include/snn/neural_network/layer/neuron/Circular.hpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
#pragma once
#include <boost/serialization/access.hpp>
#include <boost/serialization/base_object.hpp>
#include <cstdint>
#include <vector>

namespace snn::internal
Expand All @@ -11,10 +8,6 @@ class Circular final
{
private:
friend class Circular<std::vector<float>>;
friend class boost::serialization::access;
template <class Archive>
void serialize(Archive& archive, uint32_t version);

std::vector<T> queue;
size_t indexPush = 0;
size_t indexGet = 0;
Expand All @@ -29,6 +22,7 @@ class Circular final
~Circular() = default;

void initialize(size_t queueSize, size_t dataSize = 1); // Should be call after the ctor.
void reset(); // Do the same as initialize.

[[nodiscard]] auto getBack() -> const T*;
[[nodiscard]] auto getSum() const -> T;
Expand All @@ -39,21 +33,17 @@ class Circular final
auto operator<=>(const Circular<T>& other) const = default;
};

template <typename T>
template <class Archive>
void Circular<T>::serialize(Archive& archive, [[maybe_unused]] const uint32_t version)
{
archive & queue;
archive & indexGet;
archive & indexPush;
archive & divider;
}
template <>
void Circular<float>::initialize(size_t queueSize, size_t dataSize);

template <>
void Circular<std::vector<float>>::initialize(size_t queueSize, size_t dataSize);

template <>
void Circular<float>::initialize(size_t size, size_t dataSize);
void Circular<float>::reset();

template <>
void Circular<std::vector<float>>::initialize(size_t size, size_t dataSize);
void Circular<std::vector<float>>::reset();

template <>
auto Circular<float>::getSum() const -> float;
Expand Down
10 changes: 3 additions & 7 deletions include/snn/neural_network/layer/neuron/GatedRecurrentUnit.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ class GatedRecurrentUnit final
int numberOfInputs{};

float previousOutput = 0;
float recurrentError = 0;
float updateGateOutput = 0;
float outputGateOutput = 0;

Expand Down Expand Up @@ -51,19 +50,16 @@ class GatedRecurrentUnit final
[[nodiscard]] auto getOptimizer() const -> NeuralNetworkOptimizer*;
void setOptimizer(std::shared_ptr<NeuralNetworkOptimizer> newOptimizer);

void resetLearningVariables(int batchSize);

auto operator==(const GatedRecurrentUnit& neuron) const -> bool;
auto operator!=(const GatedRecurrentUnit& neuron) const -> bool;
};
static_assert(BaseNeuron<GatedRecurrentUnit>);

template <class Archive>
void GatedRecurrentUnit::serialize(Archive& archive, [[maybe_unused]] const uint32_t version)
{
archive& this->errors;
archive& this->numberOfInputs;
archive& this->previousOutput;
archive& this->recurrentError;
archive& this->updateGateOutput;
archive& this->outputGateOutput;
archive& this->resetGate;
archive& this->updateGate;
archive& this->outputGate;
Expand Down
14 changes: 14 additions & 0 deletions include/snn/neural_network/layer/neuron/LearningObject.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#include <concepts>

template <typename T>
concept WithoutBatchSize = requires(T t) {
{ t.resetLearningVariables() } -> std::same_as<void>;
};

template <typename T>
concept WithBatchSize = requires(T t, int batchSize) {
{ t.resetLearningVariables(batchSize) } -> std::same_as<void>;
};

template <typename T>
concept LearningObject = WithoutBatchSize<T> || WithBatchSize<T>;
Loading
Loading