summaryrefslogtreecommitdiffstats
path: root/ml/Dimension.cc
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2022-04-14 18:12:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2022-04-14 18:12:14 +0000
commitbb50acdcb8073654ea667b8c0272e335bd43f844 (patch)
tree1e00c8a29871426f8182658928dcb62e42d57ce8 /ml/Dimension.cc
parentReleasing debian version 1.33.1-1. (diff)
downloadnetdata-bb50acdcb8073654ea667b8c0272e335bd43f844.tar.xz
netdata-bb50acdcb8073654ea667b8c0272e335bd43f844.zip
Merging upstream version 1.34.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'ml/Dimension.cc')
-rw-r--r--ml/Dimension.cc15
1 files changed, 13 insertions, 2 deletions
diff --git a/ml/Dimension.cc b/ml/Dimension.cc
index c27f30bb4..290d4c743 100644
--- a/ml/Dimension.cc
+++ b/ml/Dimension.cc
@@ -125,9 +125,15 @@ MLResult TrainableDimension::trainModel() {
if (!CNs)
return MLResult::MissingData;
- SamplesBuffer SB = SamplesBuffer(CNs, N, 1, Cfg.DiffN, Cfg.SmoothN, Cfg.LagN);
+ unsigned TargetNumSamples = Cfg.MaxTrainSamples * Cfg.RandomSamplingRatio;
+ double SamplingRatio = std::min(static_cast<double>(TargetNumSamples) / N, 1.0);
+
+ SamplesBuffer SB = SamplesBuffer(CNs, N, 1, Cfg.DiffN, Cfg.SmoothN, Cfg.LagN,
+ SamplingRatio, Cfg.RandomNums);
KM.train(SB, Cfg.MaxKMeansIters);
+
Trained = true;
+ ConstantModel = true;
delete[] CNs;
return MLResult::Success;
@@ -146,6 +152,10 @@ void PredictableDimension::addValue(CalculatedNumber Value, bool Exists) {
}
std::rotate(std::begin(CNs), std::begin(CNs) + 1, std::end(CNs));
+
+ if (CNs[N - 1] != Value)
+ ConstantModel = false;
+
CNs[N - 1] = Value;
}
@@ -157,7 +167,8 @@ std::pair<MLResult, bool> PredictableDimension::predict() {
CalculatedNumber *TmpCNs = new CalculatedNumber[N * (Cfg.LagN + 1)]();
std::memcpy(TmpCNs, CNs.data(), N * sizeof(CalculatedNumber));
- SamplesBuffer SB = SamplesBuffer(TmpCNs, N, 1, Cfg.DiffN, Cfg.SmoothN, Cfg.LagN);
+ SamplesBuffer SB = SamplesBuffer(TmpCNs, N, 1, Cfg.DiffN, Cfg.SmoothN, Cfg.LagN,
+ 1.0, Cfg.RandomNums);
AnomalyScore = computeAnomalyScore(SB);
delete[] TmpCNs;