summaryrefslogtreecommitdiffstats
path: root/src/lib/stats/tests
diff options
context:
space:
mode:
Diffstat (limited to 'src/lib/stats/tests')
-rw-r--r--src/lib/stats/tests/Makefile.am35
-rw-r--r--src/lib/stats/tests/Makefile.in1017
-rw-r--r--src/lib/stats/tests/context_unittest.cc126
-rw-r--r--src/lib/stats/tests/observation_unittest.cc619
-rw-r--r--src/lib/stats/tests/run_unittests.cc18
-rw-r--r--src/lib/stats/tests/stats_mgr_unittest.cc1117
6 files changed, 2932 insertions, 0 deletions
diff --git a/src/lib/stats/tests/Makefile.am b/src/lib/stats/tests/Makefile.am
new file mode 100644
index 0000000..c8f53aa
--- /dev/null
+++ b/src/lib/stats/tests/Makefile.am
@@ -0,0 +1,35 @@
+SUBDIRS = .
+
+AM_CPPFLAGS = -I$(top_builddir)/src/lib -I$(top_srcdir)/src/lib
+AM_CPPFLAGS += $(BOOST_INCLUDES)
+
+AM_CXXFLAGS = $(KEA_CXXFLAGS)
+
+if USE_STATIC_LINK
+AM_LDFLAGS = -static
+endif
+
+if HAVE_GTEST
+
+TESTS = libstats_unittests
+
+libstats_unittests_SOURCES = run_unittests.cc
+libstats_unittests_SOURCES += observation_unittest.cc
+libstats_unittests_SOURCES += context_unittest.cc
+libstats_unittests_SOURCES += stats_mgr_unittest.cc
+
+libstats_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
+libstats_unittests_LDFLAGS = $(AM_LDFLAGS) $(GTEST_LDFLAGS)
+libstats_unittests_CXXFLAGS = $(AM_CXXFLAGS)
+
+libstats_unittests_LDADD = $(top_builddir)/src/lib/stats/libkea-stats.la
+libstats_unittests_LDADD += $(top_builddir)/src/lib/cc/libkea-cc.la
+libstats_unittests_LDADD += $(top_builddir)/src/lib/asiolink/libkea-asiolink.la
+libstats_unittests_LDADD += $(top_builddir)/src/lib/log/libkea-log.la
+libstats_unittests_LDADD += $(top_builddir)/src/lib/util/libkea-util.la
+libstats_unittests_LDADD += $(top_builddir)/src/lib/exceptions/libkea-exceptions.la
+libstats_unittests_LDADD += $(LOG4CPLUS_LIBS) $(GTEST_LDADD) $(BOOST_LIBS)
+
+endif
+
+noinst_PROGRAMS = $(TESTS)
diff --git a/src/lib/stats/tests/Makefile.in b/src/lib/stats/tests/Makefile.in
new file mode 100644
index 0000000..1c78036
--- /dev/null
+++ b/src/lib/stats/tests/Makefile.in
@@ -0,0 +1,1017 @@
+# Makefile.in generated by automake 1.16.1 from Makefile.am.
+# @configure_input@
+
+# Copyright (C) 1994-2018 Free Software Foundation, Inc.
+
+# This Makefile.in is free software; the Free Software Foundation
+# gives unlimited permission to copy and/or distribute it,
+# with or without modifications, as long as this notice is preserved.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
+# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
+# PARTICULAR PURPOSE.
+
+@SET_MAKE@
+
+VPATH = @srcdir@
+am__is_gnu_make = { \
+ if test -z '$(MAKELEVEL)'; then \
+ false; \
+ elif test -n '$(MAKE_HOST)'; then \
+ true; \
+ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
+ true; \
+ else \
+ false; \
+ fi; \
+}
+am__make_running_with_option = \
+ case $${target_option-} in \
+ ?) ;; \
+ *) echo "am__make_running_with_option: internal error: invalid" \
+ "target option '$${target_option-}' specified" >&2; \
+ exit 1;; \
+ esac; \
+ has_opt=no; \
+ sane_makeflags=$$MAKEFLAGS; \
+ if $(am__is_gnu_make); then \
+ sane_makeflags=$$MFLAGS; \
+ else \
+ case $$MAKEFLAGS in \
+ *\\[\ \ ]*) \
+ bs=\\; \
+ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
+ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
+ esac; \
+ fi; \
+ skip_next=no; \
+ strip_trailopt () \
+ { \
+ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
+ }; \
+ for flg in $$sane_makeflags; do \
+ test $$skip_next = yes && { skip_next=no; continue; }; \
+ case $$flg in \
+ *=*|--*) continue;; \
+ -*I) strip_trailopt 'I'; skip_next=yes;; \
+ -*I?*) strip_trailopt 'I';; \
+ -*O) strip_trailopt 'O'; skip_next=yes;; \
+ -*O?*) strip_trailopt 'O';; \
+ -*l) strip_trailopt 'l'; skip_next=yes;; \
+ -*l?*) strip_trailopt 'l';; \
+ -[dEDm]) skip_next=yes;; \
+ -[JT]) skip_next=yes;; \
+ esac; \
+ case $$flg in \
+ *$$target_option*) has_opt=yes; break;; \
+ esac; \
+ done; \
+ test $$has_opt = yes
+am__make_dryrun = (target_option=n; $(am__make_running_with_option))
+am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
+pkgdatadir = $(datadir)/@PACKAGE@
+pkgincludedir = $(includedir)/@PACKAGE@
+pkglibdir = $(libdir)/@PACKAGE@
+pkglibexecdir = $(libexecdir)/@PACKAGE@
+am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
+install_sh_DATA = $(install_sh) -c -m 644
+install_sh_PROGRAM = $(install_sh) -c
+install_sh_SCRIPT = $(install_sh) -c
+INSTALL_HEADER = $(INSTALL_DATA)
+transform = $(program_transform_name)
+NORMAL_INSTALL = :
+PRE_INSTALL = :
+POST_INSTALL = :
+NORMAL_UNINSTALL = :
+PRE_UNINSTALL = :
+POST_UNINSTALL = :
+build_triplet = @build@
+host_triplet = @host@
+@HAVE_GTEST_TRUE@TESTS = libstats_unittests$(EXEEXT)
+noinst_PROGRAMS = $(am__EXEEXT_1)
+subdir = src/lib/stats/tests
+ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
+am__aclocal_m4_deps = $(top_srcdir)/m4macros/ax_boost_for_kea.m4 \
+ $(top_srcdir)/m4macros/ax_cpp11.m4 \
+ $(top_srcdir)/m4macros/ax_crypto.m4 \
+ $(top_srcdir)/m4macros/ax_find_library.m4 \
+ $(top_srcdir)/m4macros/ax_gssapi.m4 \
+ $(top_srcdir)/m4macros/ax_gtest.m4 \
+ $(top_srcdir)/m4macros/ax_isc_rpath.m4 \
+ $(top_srcdir)/m4macros/ax_sysrepo.m4 \
+ $(top_srcdir)/m4macros/libtool.m4 \
+ $(top_srcdir)/m4macros/ltoptions.m4 \
+ $(top_srcdir)/m4macros/ltsugar.m4 \
+ $(top_srcdir)/m4macros/ltversion.m4 \
+ $(top_srcdir)/m4macros/lt~obsolete.m4 \
+ $(top_srcdir)/configure.ac
+am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
+ $(ACLOCAL_M4)
+DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
+mkinstalldirs = $(install_sh) -d
+CONFIG_HEADER = $(top_builddir)/config.h
+CONFIG_CLEAN_FILES =
+CONFIG_CLEAN_VPATH_FILES =
+@HAVE_GTEST_TRUE@am__EXEEXT_1 = libstats_unittests$(EXEEXT)
+PROGRAMS = $(noinst_PROGRAMS)
+am__libstats_unittests_SOURCES_DIST = run_unittests.cc \
+ observation_unittest.cc context_unittest.cc \
+ stats_mgr_unittest.cc
+@HAVE_GTEST_TRUE@am_libstats_unittests_OBJECTS = \
+@HAVE_GTEST_TRUE@ libstats_unittests-run_unittests.$(OBJEXT) \
+@HAVE_GTEST_TRUE@ libstats_unittests-observation_unittest.$(OBJEXT) \
+@HAVE_GTEST_TRUE@ libstats_unittests-context_unittest.$(OBJEXT) \
+@HAVE_GTEST_TRUE@ libstats_unittests-stats_mgr_unittest.$(OBJEXT)
+libstats_unittests_OBJECTS = $(am_libstats_unittests_OBJECTS)
+am__DEPENDENCIES_1 =
+@HAVE_GTEST_TRUE@libstats_unittests_DEPENDENCIES = $(top_builddir)/src/lib/stats/libkea-stats.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/cc/libkea-cc.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/asiolink/libkea-asiolink.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/log/libkea-log.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/util/libkea-util.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/exceptions/libkea-exceptions.la \
+@HAVE_GTEST_TRUE@ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
+@HAVE_GTEST_TRUE@ $(am__DEPENDENCIES_1)
+AM_V_lt = $(am__v_lt_@AM_V@)
+am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@)
+am__v_lt_0 = --silent
+am__v_lt_1 =
+libstats_unittests_LINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX \
+ $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) --mode=link $(CXXLD) \
+ $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) \
+ $(libstats_unittests_LDFLAGS) $(LDFLAGS) -o $@
+AM_V_P = $(am__v_P_@AM_V@)
+am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+am__v_P_0 = false
+am__v_P_1 = :
+AM_V_GEN = $(am__v_GEN_@AM_V@)
+am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
+am__v_GEN_0 = @echo " GEN " $@;
+am__v_GEN_1 =
+AM_V_at = $(am__v_at_@AM_V@)
+am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
+am__v_at_0 = @
+am__v_at_1 =
+DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir)
+depcomp = $(SHELL) $(top_srcdir)/depcomp
+am__maybe_remake_depfiles = depfiles
+am__depfiles_remade = \
+ ./$(DEPDIR)/libstats_unittests-context_unittest.Po \
+ ./$(DEPDIR)/libstats_unittests-observation_unittest.Po \
+ ./$(DEPDIR)/libstats_unittests-run_unittests.Po \
+ ./$(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po
+am__mv = mv -f
+CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \
+ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS)
+LTCXXCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
+ $(LIBTOOLFLAGS) --mode=compile $(CXX) $(DEFS) \
+ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \
+ $(AM_CXXFLAGS) $(CXXFLAGS)
+AM_V_CXX = $(am__v_CXX_@AM_V@)
+am__v_CXX_ = $(am__v_CXX_@AM_DEFAULT_V@)
+am__v_CXX_0 = @echo " CXX " $@;
+am__v_CXX_1 =
+CXXLD = $(CXX)
+CXXLINK = $(LIBTOOL) $(AM_V_lt) --tag=CXX $(AM_LIBTOOLFLAGS) \
+ $(LIBTOOLFLAGS) --mode=link $(CXXLD) $(AM_CXXFLAGS) \
+ $(CXXFLAGS) $(AM_LDFLAGS) $(LDFLAGS) -o $@
+AM_V_CXXLD = $(am__v_CXXLD_@AM_V@)
+am__v_CXXLD_ = $(am__v_CXXLD_@AM_DEFAULT_V@)
+am__v_CXXLD_0 = @echo " CXXLD " $@;
+am__v_CXXLD_1 =
+SOURCES = $(libstats_unittests_SOURCES)
+DIST_SOURCES = $(am__libstats_unittests_SOURCES_DIST)
+RECURSIVE_TARGETS = all-recursive check-recursive cscopelist-recursive \
+ ctags-recursive dvi-recursive html-recursive info-recursive \
+ install-data-recursive install-dvi-recursive \
+ install-exec-recursive install-html-recursive \
+ install-info-recursive install-pdf-recursive \
+ install-ps-recursive install-recursive installcheck-recursive \
+ installdirs-recursive pdf-recursive ps-recursive \
+ tags-recursive uninstall-recursive
+am__can_run_installinfo = \
+ case $$AM_UPDATE_INFO_DIR in \
+ n|no|NO) false;; \
+ *) (install-info --version) >/dev/null 2>&1;; \
+ esac
+RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \
+ distclean-recursive maintainer-clean-recursive
+am__recursive_targets = \
+ $(RECURSIVE_TARGETS) \
+ $(RECURSIVE_CLEAN_TARGETS) \
+ $(am__extra_recursive_targets)
+AM_RECURSIVE_TARGETS = $(am__recursive_targets:-recursive=) TAGS CTAGS \
+ distdir distdir-am
+am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
+# Read a list of newline-separated strings from the standard input,
+# and print each of them once, without duplicates. Input order is
+# *not* preserved.
+am__uniquify_input = $(AWK) '\
+ BEGIN { nonempty = 0; } \
+ { items[$$0] = 1; nonempty = 1; } \
+ END { if (nonempty) { for (i in items) print i; }; } \
+'
+# Make sure the list of sources is unique. This is necessary because,
+# e.g., the same source file might be shared among _SOURCES variables
+# for different programs/libraries.
+am__define_uniq_tagged_files = \
+ list='$(am__tagged_files)'; \
+ unique=`for i in $$list; do \
+ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \
+ done | $(am__uniquify_input)`
+ETAGS = etags
+CTAGS = ctags
+am__tty_colors_dummy = \
+ mgn= red= grn= lgn= blu= brg= std=; \
+ am__color_tests=no
+am__tty_colors = { \
+ $(am__tty_colors_dummy); \
+ if test "X$(AM_COLOR_TESTS)" = Xno; then \
+ am__color_tests=no; \
+ elif test "X$(AM_COLOR_TESTS)" = Xalways; then \
+ am__color_tests=yes; \
+ elif test "X$$TERM" != Xdumb && { test -t 1; } 2>/dev/null; then \
+ am__color_tests=yes; \
+ fi; \
+ if test $$am__color_tests = yes; then \
+ red=''; \
+ grn=''; \
+ lgn=''; \
+ blu=''; \
+ mgn=''; \
+ brg=''; \
+ std=''; \
+ fi; \
+}
+DIST_SUBDIRS = $(SUBDIRS)
+am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp
+DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
+am__relativize = \
+ dir0=`pwd`; \
+ sed_first='s,^\([^/]*\)/.*$$,\1,'; \
+ sed_rest='s,^[^/]*/*,,'; \
+ sed_last='s,^.*/\([^/]*\)$$,\1,'; \
+ sed_butlast='s,/*[^/]*$$,,'; \
+ while test -n "$$dir1"; do \
+ first=`echo "$$dir1" | sed -e "$$sed_first"`; \
+ if test "$$first" != "."; then \
+ if test "$$first" = ".."; then \
+ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \
+ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \
+ else \
+ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \
+ if test "$$first2" = "$$first"; then \
+ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \
+ else \
+ dir2="../$$dir2"; \
+ fi; \
+ dir0="$$dir0"/"$$first"; \
+ fi; \
+ fi; \
+ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \
+ done; \
+ reldir="$$dir2"
+ACLOCAL = @ACLOCAL@
+AMTAR = @AMTAR@
+AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
+AR = @AR@
+ASCIIDOC = @ASCIIDOC@
+AUTOCONF = @AUTOCONF@
+AUTOHEADER = @AUTOHEADER@
+AUTOMAKE = @AUTOMAKE@
+AWK = @AWK@
+BOOST_INCLUDES = @BOOST_INCLUDES@
+BOOST_LIBS = @BOOST_LIBS@
+BOTAN_TOOL = @BOTAN_TOOL@
+CC = @CC@
+CCDEPMODE = @CCDEPMODE@
+CFLAGS = @CFLAGS@
+CONTRIB_DIR = @CONTRIB_DIR@
+CPP = @CPP@
+CPPFLAGS = @CPPFLAGS@
+CRYPTO_CFLAGS = @CRYPTO_CFLAGS@
+CRYPTO_INCLUDES = @CRYPTO_INCLUDES@
+CRYPTO_LDFLAGS = @CRYPTO_LDFLAGS@
+CRYPTO_LIBS = @CRYPTO_LIBS@
+CRYPTO_PACKAGE = @CRYPTO_PACKAGE@
+CRYPTO_RPATH = @CRYPTO_RPATH@
+CXX = @CXX@
+CXXCPP = @CXXCPP@
+CXXDEPMODE = @CXXDEPMODE@
+CXXFLAGS = @CXXFLAGS@
+CYGPATH_W = @CYGPATH_W@
+DEFS = @DEFS@
+DEPDIR = @DEPDIR@
+DISTCHECK_BOOST_CONFIGURE_FLAG = @DISTCHECK_BOOST_CONFIGURE_FLAG@
+DISTCHECK_CONTRIB_CONFIGURE_FLAG = @DISTCHECK_CONTRIB_CONFIGURE_FLAG@
+DISTCHECK_CRYPTO_CONFIGURE_FLAG = @DISTCHECK_CRYPTO_CONFIGURE_FLAG@
+DISTCHECK_GTEST_CONFIGURE_FLAG = @DISTCHECK_GTEST_CONFIGURE_FLAG@
+DISTCHECK_KEA_SHELL_CONFIGURE_FLAG = @DISTCHECK_KEA_SHELL_CONFIGURE_FLAG@
+DISTCHECK_LOG4CPLUS_CONFIGURE_FLAG = @DISTCHECK_LOG4CPLUS_CONFIGURE_FLAG@
+DISTCHECK_PERFDHCP_CONFIGURE_FLAG = @DISTCHECK_PERFDHCP_CONFIGURE_FLAG@
+DISTCHECK_PREMIUM_CONFIGURE_FLAG = @DISTCHECK_PREMIUM_CONFIGURE_FLAG@
+DLLTOOL = @DLLTOOL@
+DSYMUTIL = @DSYMUTIL@
+DUMPBIN = @DUMPBIN@
+ECHO_C = @ECHO_C@
+ECHO_N = @ECHO_N@
+ECHO_T = @ECHO_T@
+EGREP = @EGREP@
+EXEEXT = @EXEEXT@
+FGREP = @FGREP@
+GENHTML = @GENHTML@
+GREP = @GREP@
+GSSAPI_CFLAGS = @GSSAPI_CFLAGS@
+GSSAPI_LIBS = @GSSAPI_LIBS@
+GTEST_CONFIG = @GTEST_CONFIG@
+GTEST_INCLUDES = @GTEST_INCLUDES@
+GTEST_LDADD = @GTEST_LDADD@
+GTEST_LDFLAGS = @GTEST_LDFLAGS@
+GTEST_SOURCE = @GTEST_SOURCE@
+HAVE_SYSREPO = @HAVE_SYSREPO@
+INSTALL = @INSTALL@
+INSTALL_DATA = @INSTALL_DATA@
+INSTALL_PROGRAM = @INSTALL_PROGRAM@
+INSTALL_SCRIPT = @INSTALL_SCRIPT@
+INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
+KEA_CXXFLAGS = @KEA_CXXFLAGS@
+KEA_SRCID = @KEA_SRCID@
+KRB5_CONFIG = @KRB5_CONFIG@
+LCOV = @LCOV@
+LD = @LD@
+LDFLAGS = @LDFLAGS@
+LEX = @LEX@
+LEXLIB = @LEXLIB@
+LEX_OUTPUT_ROOT = @LEX_OUTPUT_ROOT@
+LIBOBJS = @LIBOBJS@
+LIBS = @LIBS@
+LIBTOOL = @LIBTOOL@
+LIPO = @LIPO@
+LN_S = @LN_S@
+LOG4CPLUS_INCLUDES = @LOG4CPLUS_INCLUDES@
+LOG4CPLUS_LIBS = @LOG4CPLUS_LIBS@
+LTLIBOBJS = @LTLIBOBJS@
+LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
+MAKEINFO = @MAKEINFO@
+MANIFEST_TOOL = @MANIFEST_TOOL@
+MKDIR_P = @MKDIR_P@
+MYSQL_CPPFLAGS = @MYSQL_CPPFLAGS@
+MYSQL_LIBS = @MYSQL_LIBS@
+NM = @NM@
+NMEDIT = @NMEDIT@
+OBJDUMP = @OBJDUMP@
+OBJEXT = @OBJEXT@
+OTOOL = @OTOOL@
+OTOOL64 = @OTOOL64@
+PACKAGE = @PACKAGE@
+PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
+PACKAGE_NAME = @PACKAGE_NAME@
+PACKAGE_STRING = @PACKAGE_STRING@
+PACKAGE_TARNAME = @PACKAGE_TARNAME@
+PACKAGE_URL = @PACKAGE_URL@
+PACKAGE_VERSION = @PACKAGE_VERSION@
+PACKAGE_VERSION_TYPE = @PACKAGE_VERSION_TYPE@
+PATH_SEPARATOR = @PATH_SEPARATOR@
+PDFLATEX = @PDFLATEX@
+PERL = @PERL@
+PGSQL_CPPFLAGS = @PGSQL_CPPFLAGS@
+PGSQL_LIBS = @PGSQL_LIBS@
+PKGPYTHONDIR = @PKGPYTHONDIR@
+PKG_CONFIG = @PKG_CONFIG@
+PLANTUML = @PLANTUML@
+PREMIUM_DIR = @PREMIUM_DIR@
+PYTHON = @PYTHON@
+PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
+PYTHON_PLATFORM = @PYTHON_PLATFORM@
+PYTHON_PREFIX = @PYTHON_PREFIX@
+PYTHON_VERSION = @PYTHON_VERSION@
+RANLIB = @RANLIB@
+SED = @SED@
+SEP = @SEP@
+SET_MAKE = @SET_MAKE@
+SHELL = @SHELL@
+SPHINXBUILD = @SPHINXBUILD@
+SRPD_PLUGINS_PATH = @SRPD_PLUGINS_PATH@
+SR_REPO_PATH = @SR_REPO_PATH@
+STRIP = @STRIP@
+SYSREPOCPP_VERSION = @SYSREPOCPP_VERSION@
+SYSREPO_CPPFLAGS = @SYSREPO_CPPFLAGS@
+SYSREPO_INCLUDEDIR = @SYSREPO_INCLUDEDIR@
+SYSREPO_LIBS = @SYSREPO_LIBS@
+SYSREPO_VERSION = @SYSREPO_VERSION@
+USE_LCOV = @USE_LCOV@
+VALGRIND = @VALGRIND@
+VERSION = @VERSION@
+WARNING_GCC_44_STRICT_ALIASING_CFLAG = @WARNING_GCC_44_STRICT_ALIASING_CFLAG@
+YACC = @YACC@
+abs_builddir = @abs_builddir@
+abs_srcdir = @abs_srcdir@
+abs_top_builddir = @abs_top_builddir@
+abs_top_srcdir = @abs_top_srcdir@
+ac_ct_AR = @ac_ct_AR@
+ac_ct_CC = @ac_ct_CC@
+ac_ct_CXX = @ac_ct_CXX@
+ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
+am__include = @am__include@
+am__leading_dot = @am__leading_dot@
+am__quote = @am__quote@
+am__tar = @am__tar@
+am__untar = @am__untar@
+bindir = @bindir@
+build = @build@
+build_alias = @build_alias@
+build_cpu = @build_cpu@
+build_os = @build_os@
+build_vendor = @build_vendor@
+builddir = @builddir@
+datadir = @datadir@
+datarootdir = @datarootdir@
+docdir = @docdir@
+dvidir = @dvidir@
+exec_prefix = @exec_prefix@
+host = @host@
+host_alias = @host_alias@
+host_cpu = @host_cpu@
+host_os = @host_os@
+host_vendor = @host_vendor@
+htmldir = @htmldir@
+includedir = @includedir@
+infodir = @infodir@
+install_sh = @install_sh@
+libdir = @libdir@
+libexecdir = @libexecdir@
+localedir = @localedir@
+localstatedir = @localstatedir@
+mandir = @mandir@
+mkdir_p = @mkdir_p@
+oldincludedir = @oldincludedir@
+pdfdir = @pdfdir@
+pkgpyexecdir = @pkgpyexecdir@
+pkgpythondir = @pkgpythondir@
+prefix = @prefix@
+program_transform_name = @program_transform_name@
+psdir = @psdir@
+pyexecdir = @pyexecdir@
+pythondir = @pythondir@
+runstatedir = @runstatedir@
+sbindir = @sbindir@
+sharedstatedir = @sharedstatedir@
+srcdir = @srcdir@
+sysconfdir = @sysconfdir@
+target_alias = @target_alias@
+top_build_prefix = @top_build_prefix@
+top_builddir = @top_builddir@
+top_srcdir = @top_srcdir@
+SUBDIRS = .
+AM_CPPFLAGS = -I$(top_builddir)/src/lib -I$(top_srcdir)/src/lib \
+ $(BOOST_INCLUDES)
+AM_CXXFLAGS = $(KEA_CXXFLAGS)
+@USE_STATIC_LINK_TRUE@AM_LDFLAGS = -static
+@HAVE_GTEST_TRUE@libstats_unittests_SOURCES = run_unittests.cc \
+@HAVE_GTEST_TRUE@ observation_unittest.cc context_unittest.cc \
+@HAVE_GTEST_TRUE@ stats_mgr_unittest.cc
+@HAVE_GTEST_TRUE@libstats_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
+@HAVE_GTEST_TRUE@libstats_unittests_LDFLAGS = $(AM_LDFLAGS) $(GTEST_LDFLAGS)
+@HAVE_GTEST_TRUE@libstats_unittests_CXXFLAGS = $(AM_CXXFLAGS)
+@HAVE_GTEST_TRUE@libstats_unittests_LDADD = $(top_builddir)/src/lib/stats/libkea-stats.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/cc/libkea-cc.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/asiolink/libkea-asiolink.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/log/libkea-log.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/util/libkea-util.la \
+@HAVE_GTEST_TRUE@ $(top_builddir)/src/lib/exceptions/libkea-exceptions.la \
+@HAVE_GTEST_TRUE@ $(LOG4CPLUS_LIBS) $(GTEST_LDADD) \
+@HAVE_GTEST_TRUE@ $(BOOST_LIBS)
+all: all-recursive
+
+.SUFFIXES:
+.SUFFIXES: .cc .lo .o .obj
+$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
+ @for dep in $?; do \
+ case '$(am__configure_deps)' in \
+ *$$dep*) \
+ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
+ && { if test -f $@; then exit 0; else break; fi; }; \
+ exit 1;; \
+ esac; \
+ done; \
+ echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/lib/stats/tests/Makefile'; \
+ $(am__cd) $(top_srcdir) && \
+ $(AUTOMAKE) --foreign src/lib/stats/tests/Makefile
+Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
+ @case '$?' in \
+ *config.status*) \
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
+ *) \
+ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
+ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
+ esac;
+
+$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+
+$(top_srcdir)/configure: $(am__configure_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(ACLOCAL_M4): $(am__aclocal_m4_deps)
+ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
+$(am__aclocal_m4_deps):
+
+clean-noinstPROGRAMS:
+ @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \
+ echo " rm -f" $$list; \
+ rm -f $$list || exit $$?; \
+ test -n "$(EXEEXT)" || exit 0; \
+ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \
+ echo " rm -f" $$list; \
+ rm -f $$list
+
+libstats_unittests$(EXEEXT): $(libstats_unittests_OBJECTS) $(libstats_unittests_DEPENDENCIES) $(EXTRA_libstats_unittests_DEPENDENCIES)
+ @rm -f libstats_unittests$(EXEEXT)
+ $(AM_V_CXXLD)$(libstats_unittests_LINK) $(libstats_unittests_OBJECTS) $(libstats_unittests_LDADD) $(LIBS)
+
+mostlyclean-compile:
+ -rm -f *.$(OBJEXT)
+
+distclean-compile:
+ -rm -f *.tab.c
+
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libstats_unittests-context_unittest.Po@am__quote@ # am--include-marker
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libstats_unittests-observation_unittest.Po@am__quote@ # am--include-marker
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libstats_unittests-run_unittests.Po@am__quote@ # am--include-marker
+@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po@am__quote@ # am--include-marker
+
+$(am__depfiles_remade):
+ @$(MKDIR_P) $(@D)
+ @echo '# dummy' >$@-t && $(am__mv) $@-t $@
+
+am--depfiles: $(am__depfiles_remade)
+
+.cc.o:
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ $<
+
+.cc.obj:
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'`
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'`
+
+.cc.lo:
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $<
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(LTCXXCOMPILE) -c -o $@ $<
+
+libstats_unittests-run_unittests.o: run_unittests.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-run_unittests.o -MD -MP -MF $(DEPDIR)/libstats_unittests-run_unittests.Tpo -c -o libstats_unittests-run_unittests.o `test -f 'run_unittests.cc' || echo '$(srcdir)/'`run_unittests.cc
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-run_unittests.Tpo $(DEPDIR)/libstats_unittests-run_unittests.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='run_unittests.cc' object='libstats_unittests-run_unittests.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-run_unittests.o `test -f 'run_unittests.cc' || echo '$(srcdir)/'`run_unittests.cc
+
+libstats_unittests-run_unittests.obj: run_unittests.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-run_unittests.obj -MD -MP -MF $(DEPDIR)/libstats_unittests-run_unittests.Tpo -c -o libstats_unittests-run_unittests.obj `if test -f 'run_unittests.cc'; then $(CYGPATH_W) 'run_unittests.cc'; else $(CYGPATH_W) '$(srcdir)/run_unittests.cc'; fi`
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-run_unittests.Tpo $(DEPDIR)/libstats_unittests-run_unittests.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='run_unittests.cc' object='libstats_unittests-run_unittests.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-run_unittests.obj `if test -f 'run_unittests.cc'; then $(CYGPATH_W) 'run_unittests.cc'; else $(CYGPATH_W) '$(srcdir)/run_unittests.cc'; fi`
+
+libstats_unittests-observation_unittest.o: observation_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-observation_unittest.o -MD -MP -MF $(DEPDIR)/libstats_unittests-observation_unittest.Tpo -c -o libstats_unittests-observation_unittest.o `test -f 'observation_unittest.cc' || echo '$(srcdir)/'`observation_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-observation_unittest.Tpo $(DEPDIR)/libstats_unittests-observation_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='observation_unittest.cc' object='libstats_unittests-observation_unittest.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-observation_unittest.o `test -f 'observation_unittest.cc' || echo '$(srcdir)/'`observation_unittest.cc
+
+libstats_unittests-observation_unittest.obj: observation_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-observation_unittest.obj -MD -MP -MF $(DEPDIR)/libstats_unittests-observation_unittest.Tpo -c -o libstats_unittests-observation_unittest.obj `if test -f 'observation_unittest.cc'; then $(CYGPATH_W) 'observation_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/observation_unittest.cc'; fi`
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-observation_unittest.Tpo $(DEPDIR)/libstats_unittests-observation_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='observation_unittest.cc' object='libstats_unittests-observation_unittest.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-observation_unittest.obj `if test -f 'observation_unittest.cc'; then $(CYGPATH_W) 'observation_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/observation_unittest.cc'; fi`
+
+libstats_unittests-context_unittest.o: context_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-context_unittest.o -MD -MP -MF $(DEPDIR)/libstats_unittests-context_unittest.Tpo -c -o libstats_unittests-context_unittest.o `test -f 'context_unittest.cc' || echo '$(srcdir)/'`context_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-context_unittest.Tpo $(DEPDIR)/libstats_unittests-context_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='context_unittest.cc' object='libstats_unittests-context_unittest.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-context_unittest.o `test -f 'context_unittest.cc' || echo '$(srcdir)/'`context_unittest.cc
+
+libstats_unittests-context_unittest.obj: context_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-context_unittest.obj -MD -MP -MF $(DEPDIR)/libstats_unittests-context_unittest.Tpo -c -o libstats_unittests-context_unittest.obj `if test -f 'context_unittest.cc'; then $(CYGPATH_W) 'context_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/context_unittest.cc'; fi`
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-context_unittest.Tpo $(DEPDIR)/libstats_unittests-context_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='context_unittest.cc' object='libstats_unittests-context_unittest.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-context_unittest.obj `if test -f 'context_unittest.cc'; then $(CYGPATH_W) 'context_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/context_unittest.cc'; fi`
+
+libstats_unittests-stats_mgr_unittest.o: stats_mgr_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-stats_mgr_unittest.o -MD -MP -MF $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Tpo -c -o libstats_unittests-stats_mgr_unittest.o `test -f 'stats_mgr_unittest.cc' || echo '$(srcdir)/'`stats_mgr_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Tpo $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='stats_mgr_unittest.cc' object='libstats_unittests-stats_mgr_unittest.o' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-stats_mgr_unittest.o `test -f 'stats_mgr_unittest.cc' || echo '$(srcdir)/'`stats_mgr_unittest.cc
+
+libstats_unittests-stats_mgr_unittest.obj: stats_mgr_unittest.cc
+@am__fastdepCXX_TRUE@ $(AM_V_CXX)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -MT libstats_unittests-stats_mgr_unittest.obj -MD -MP -MF $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Tpo -c -o libstats_unittests-stats_mgr_unittest.obj `if test -f 'stats_mgr_unittest.cc'; then $(CYGPATH_W) 'stats_mgr_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/stats_mgr_unittest.cc'; fi`
+@am__fastdepCXX_TRUE@ $(AM_V_at)$(am__mv) $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Tpo $(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ $(AM_V_CXX)source='stats_mgr_unittest.cc' object='libstats_unittests-stats_mgr_unittest.obj' libtool=no @AMDEPBACKSLASH@
+@AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+@am__fastdepCXX_FALSE@ $(AM_V_CXX@am__nodep@)$(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(libstats_unittests_CPPFLAGS) $(CPPFLAGS) $(libstats_unittests_CXXFLAGS) $(CXXFLAGS) -c -o libstats_unittests-stats_mgr_unittest.obj `if test -f 'stats_mgr_unittest.cc'; then $(CYGPATH_W) 'stats_mgr_unittest.cc'; else $(CYGPATH_W) '$(srcdir)/stats_mgr_unittest.cc'; fi`
+
+mostlyclean-libtool:
+ -rm -f *.lo
+
+clean-libtool:
+ -rm -rf .libs _libs
+
+# This directory's subdirectories are mostly independent; you can cd
+# into them and run 'make' without going through this Makefile.
+# To change the values of 'make' variables: instead of editing Makefiles,
+# (1) if the variable is set in 'config.status', edit 'config.status'
+# (which will cause the Makefiles to be regenerated when you run 'make');
+# (2) otherwise, pass the desired values on the 'make' command line.
+$(am__recursive_targets):
+ @fail=; \
+ if $(am__make_keepgoing); then \
+ failcom='fail=yes'; \
+ else \
+ failcom='exit 1'; \
+ fi; \
+ dot_seen=no; \
+ target=`echo $@ | sed s/-recursive//`; \
+ case "$@" in \
+ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \
+ *) list='$(SUBDIRS)' ;; \
+ esac; \
+ for subdir in $$list; do \
+ echo "Making $$target in $$subdir"; \
+ if test "$$subdir" = "."; then \
+ dot_seen=yes; \
+ local_target="$$target-am"; \
+ else \
+ local_target="$$target"; \
+ fi; \
+ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \
+ || eval $$failcom; \
+ done; \
+ if test "$$dot_seen" = "no"; then \
+ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \
+ fi; test -z "$$fail"
+
+ID: $(am__tagged_files)
+ $(am__define_uniq_tagged_files); mkid -fID $$unique
+tags: tags-recursive
+TAGS: tags
+
+tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+ set x; \
+ here=`pwd`; \
+ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \
+ include_option=--etags-include; \
+ empty_fix=.; \
+ else \
+ include_option=--include; \
+ empty_fix=; \
+ fi; \
+ list='$(SUBDIRS)'; for subdir in $$list; do \
+ if test "$$subdir" = .; then :; else \
+ test ! -f $$subdir/TAGS || \
+ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \
+ fi; \
+ done; \
+ $(am__define_uniq_tagged_files); \
+ shift; \
+ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \
+ test -n "$$unique" || unique=$$empty_fix; \
+ if test $$# -gt 0; then \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ "$$@" $$unique; \
+ else \
+ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \
+ $$unique; \
+ fi; \
+ fi
+ctags: ctags-recursive
+
+CTAGS: ctags
+ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files)
+ $(am__define_uniq_tagged_files); \
+ test -z "$(CTAGS_ARGS)$$unique" \
+ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \
+ $$unique
+
+GTAGS:
+ here=`$(am__cd) $(top_builddir) && pwd` \
+ && $(am__cd) $(top_srcdir) \
+ && gtags -i $(GTAGS_ARGS) "$$here"
+cscopelist: cscopelist-recursive
+
+cscopelist-am: $(am__tagged_files)
+ list='$(am__tagged_files)'; \
+ case "$(srcdir)" in \
+ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \
+ *) sdir=$(subdir)/$(srcdir) ;; \
+ esac; \
+ for i in $$list; do \
+ if test -f "$$i"; then \
+ echo "$(subdir)/$$i"; \
+ else \
+ echo "$$sdir/$$i"; \
+ fi; \
+ done >> $(top_builddir)/cscope.files
+
+distclean-tags:
+ -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags
+
+check-TESTS: $(TESTS)
+ @failed=0; all=0; xfail=0; xpass=0; skip=0; \
+ srcdir=$(srcdir); export srcdir; \
+ list=' $(TESTS) '; \
+ $(am__tty_colors); \
+ if test -n "$$list"; then \
+ for tst in $$list; do \
+ if test -f ./$$tst; then dir=./; \
+ elif test -f $$tst; then dir=; \
+ else dir="$(srcdir)/"; fi; \
+ if $(TESTS_ENVIRONMENT) $${dir}$$tst $(AM_TESTS_FD_REDIRECT); then \
+ all=`expr $$all + 1`; \
+ case " $(XFAIL_TESTS) " in \
+ *[\ \ ]$$tst[\ \ ]*) \
+ xpass=`expr $$xpass + 1`; \
+ failed=`expr $$failed + 1`; \
+ col=$$red; res=XPASS; \
+ ;; \
+ *) \
+ col=$$grn; res=PASS; \
+ ;; \
+ esac; \
+ elif test $$? -ne 77; then \
+ all=`expr $$all + 1`; \
+ case " $(XFAIL_TESTS) " in \
+ *[\ \ ]$$tst[\ \ ]*) \
+ xfail=`expr $$xfail + 1`; \
+ col=$$lgn; res=XFAIL; \
+ ;; \
+ *) \
+ failed=`expr $$failed + 1`; \
+ col=$$red; res=FAIL; \
+ ;; \
+ esac; \
+ else \
+ skip=`expr $$skip + 1`; \
+ col=$$blu; res=SKIP; \
+ fi; \
+ echo "$${col}$$res$${std}: $$tst"; \
+ done; \
+ if test "$$all" -eq 1; then \
+ tests="test"; \
+ All=""; \
+ else \
+ tests="tests"; \
+ All="All "; \
+ fi; \
+ if test "$$failed" -eq 0; then \
+ if test "$$xfail" -eq 0; then \
+ banner="$$All$$all $$tests passed"; \
+ else \
+ if test "$$xfail" -eq 1; then failures=failure; else failures=failures; fi; \
+ banner="$$All$$all $$tests behaved as expected ($$xfail expected $$failures)"; \
+ fi; \
+ else \
+ if test "$$xpass" -eq 0; then \
+ banner="$$failed of $$all $$tests failed"; \
+ else \
+ if test "$$xpass" -eq 1; then passes=pass; else passes=passes; fi; \
+ banner="$$failed of $$all $$tests did not behave as expected ($$xpass unexpected $$passes)"; \
+ fi; \
+ fi; \
+ dashes="$$banner"; \
+ skipped=""; \
+ if test "$$skip" -ne 0; then \
+ if test "$$skip" -eq 1; then \
+ skipped="($$skip test was not run)"; \
+ else \
+ skipped="($$skip tests were not run)"; \
+ fi; \
+ test `echo "$$skipped" | wc -c` -le `echo "$$banner" | wc -c` || \
+ dashes="$$skipped"; \
+ fi; \
+ report=""; \
+ if test "$$failed" -ne 0 && test -n "$(PACKAGE_BUGREPORT)"; then \
+ report="Please report to $(PACKAGE_BUGREPORT)"; \
+ test `echo "$$report" | wc -c` -le `echo "$$banner" | wc -c` || \
+ dashes="$$report"; \
+ fi; \
+ dashes=`echo "$$dashes" | sed s/./=/g`; \
+ if test "$$failed" -eq 0; then \
+ col="$$grn"; \
+ else \
+ col="$$red"; \
+ fi; \
+ echo "$${col}$$dashes$${std}"; \
+ echo "$${col}$$banner$${std}"; \
+ test -z "$$skipped" || echo "$${col}$$skipped$${std}"; \
+ test -z "$$report" || echo "$${col}$$report$${std}"; \
+ echo "$${col}$$dashes$${std}"; \
+ test "$$failed" -eq 0; \
+ else :; fi
+
+distdir: $(BUILT_SOURCES)
+ $(MAKE) $(AM_MAKEFLAGS) distdir-am
+
+distdir-am: $(DISTFILES)
+ @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
+ list='$(DISTFILES)'; \
+ dist_files=`for file in $$list; do echo $$file; done | \
+ sed -e "s|^$$srcdirstrip/||;t" \
+ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
+ case $$dist_files in \
+ */*) $(MKDIR_P) `echo "$$dist_files" | \
+ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
+ sort -u` ;; \
+ esac; \
+ for file in $$dist_files; do \
+ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
+ if test -d $$d/$$file; then \
+ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
+ if test -d "$(distdir)/$$file"; then \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
+ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
+ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
+ fi; \
+ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
+ else \
+ test -f "$(distdir)/$$file" \
+ || cp -p $$d/$$file "$(distdir)/$$file" \
+ || exit 1; \
+ fi; \
+ done
+ @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \
+ if test "$$subdir" = .; then :; else \
+ $(am__make_dryrun) \
+ || test -d "$(distdir)/$$subdir" \
+ || $(MKDIR_P) "$(distdir)/$$subdir" \
+ || exit 1; \
+ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \
+ $(am__relativize); \
+ new_distdir=$$reldir; \
+ dir1=$$subdir; dir2="$(top_distdir)"; \
+ $(am__relativize); \
+ new_top_distdir=$$reldir; \
+ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \
+ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \
+ ($(am__cd) $$subdir && \
+ $(MAKE) $(AM_MAKEFLAGS) \
+ top_distdir="$$new_top_distdir" \
+ distdir="$$new_distdir" \
+ am__remove_distdir=: \
+ am__skip_length_check=: \
+ am__skip_mode_fix=: \
+ distdir) \
+ || exit 1; \
+ fi; \
+ done
+check-am: all-am
+ $(MAKE) $(AM_MAKEFLAGS) check-TESTS
+check: check-recursive
+all-am: Makefile $(PROGRAMS)
+installdirs: installdirs-recursive
+installdirs-am:
+install: install-recursive
+install-exec: install-exec-recursive
+install-data: install-data-recursive
+uninstall: uninstall-recursive
+
+install-am: all-am
+ @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
+
+installcheck: installcheck-recursive
+install-strip:
+ if test -z '$(STRIP)'; then \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ install; \
+ else \
+ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
+ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
+ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
+ fi
+mostlyclean-generic:
+
+clean-generic:
+
+distclean-generic:
+ -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
+ -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
+
+maintainer-clean-generic:
+ @echo "This command is intended for maintainers to use"
+ @echo "it deletes files that may require special tools to rebuild."
+clean: clean-recursive
+
+clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \
+ mostlyclean-am
+
+distclean: distclean-recursive
+ -rm -f ./$(DEPDIR)/libstats_unittests-context_unittest.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-observation_unittest.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-run_unittests.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po
+ -rm -f Makefile
+distclean-am: clean-am distclean-compile distclean-generic \
+ distclean-tags
+
+dvi: dvi-recursive
+
+dvi-am:
+
+html: html-recursive
+
+html-am:
+
+info: info-recursive
+
+info-am:
+
+install-data-am:
+
+install-dvi: install-dvi-recursive
+
+install-dvi-am:
+
+install-exec-am:
+
+install-html: install-html-recursive
+
+install-html-am:
+
+install-info: install-info-recursive
+
+install-info-am:
+
+install-man:
+
+install-pdf: install-pdf-recursive
+
+install-pdf-am:
+
+install-ps: install-ps-recursive
+
+install-ps-am:
+
+installcheck-am:
+
+maintainer-clean: maintainer-clean-recursive
+ -rm -f ./$(DEPDIR)/libstats_unittests-context_unittest.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-observation_unittest.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-run_unittests.Po
+ -rm -f ./$(DEPDIR)/libstats_unittests-stats_mgr_unittest.Po
+ -rm -f Makefile
+maintainer-clean-am: distclean-am maintainer-clean-generic
+
+mostlyclean: mostlyclean-recursive
+
+mostlyclean-am: mostlyclean-compile mostlyclean-generic \
+ mostlyclean-libtool
+
+pdf: pdf-recursive
+
+pdf-am:
+
+ps: ps-recursive
+
+ps-am:
+
+uninstall-am:
+
+.MAKE: $(am__recursive_targets) check-am install-am install-strip
+
+.PHONY: $(am__recursive_targets) CTAGS GTAGS TAGS all all-am \
+ am--depfiles check check-TESTS check-am clean clean-generic \
+ clean-libtool clean-noinstPROGRAMS cscopelist-am ctags \
+ ctags-am distclean distclean-compile distclean-generic \
+ distclean-libtool distclean-tags distdir dvi dvi-am html \
+ html-am info info-am install install-am install-data \
+ install-data-am install-dvi install-dvi-am install-exec \
+ install-exec-am install-html install-html-am install-info \
+ install-info-am install-man install-pdf install-pdf-am \
+ install-ps install-ps-am install-strip installcheck \
+ installcheck-am installdirs installdirs-am maintainer-clean \
+ maintainer-clean-generic mostlyclean mostlyclean-compile \
+ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
+ tags tags-am uninstall uninstall-am
+
+.PRECIOUS: Makefile
+
+
+# Tell versions [3.59,3.63) of GNU make to not export all variables.
+# Otherwise a system limit (for SysV at least) may be exceeded.
+.NOEXPORT:
diff --git a/src/lib/stats/tests/context_unittest.cc b/src/lib/stats/tests/context_unittest.cc
new file mode 100644
index 0000000..92c754c
--- /dev/null
+++ b/src/lib/stats/tests/context_unittest.cc
@@ -0,0 +1,126 @@
+// Copyright (C) 2015-2020 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <config.h>
+
+#include <stats/context.h>
+#include <gtest/gtest.h>
+#include <util/chrono_time_utils.h>
+#include <string>
+
+using namespace isc::data;
+using namespace isc::stats;
+using namespace std;
+using namespace std::chrono;
+
+// Basic test that checks get, add, del methods
+TEST(ContextTest, basic) {
+
+ // Let's create a couple observations. Using floating point,
+ // as they're easiest to initialize.
+ ObservationPtr a(new Observation("alpha", 1.11));
+ ObservationPtr b(new Observation("beta", 2.22));
+ ObservationPtr c(new Observation("gamma", 3.33));
+ string expected_a = a->getJSON()->str();
+ string expected_b = b->getJSON()->str();
+ string expected_c = c->getJSON()->str();
+
+
+ // Context where we will store the observations.
+ StatContext ctx;
+
+ // By default the context does not hold any statistics.
+ EXPECT_EQ(0, ctx.size());
+
+ // It should be possible to add 'a' statistic
+ EXPECT_NO_THROW(ctx.add(a));
+
+ // We can't add a duplicate.
+ EXPECT_THROW(ctx.add(a), DuplicateStat);
+
+ // It should be ok to add other statistics
+ EXPECT_NO_THROW(ctx.add(b));
+ EXPECT_NO_THROW(ctx.add(c));
+
+ // By now we should have 3 statistics recorded
+ EXPECT_EQ(3, ctx.size());
+
+ // Let's try to retrieve them
+ ObservationPtr from_ctx;
+ EXPECT_NO_THROW(from_ctx = ctx.get("alpha"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(expected_a, from_ctx->getJSON()->str());
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("beta"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(expected_b, from_ctx->getJSON()->str());
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("gamma"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(expected_c, from_ctx->getJSON()->str());
+
+ // Let's try to retrieve non-existing stat
+ EXPECT_NO_THROW(from_ctx = ctx.get("delta"));
+ EXPECT_FALSE(from_ctx);
+
+ // Now delete one of the stats...
+ EXPECT_TRUE(ctx.del("beta"));
+
+ // ... and check that it's really gone.
+ EXPECT_FALSE(ctx.get("beta"));
+
+ // Attempt to delete non-existing stat should fail.
+ EXPECT_FALSE(ctx.del("beta"));
+
+ ConstElementPtr result;
+ EXPECT_NO_THROW(result = ctx.getAll());
+
+ ASSERT_TRUE(result);
+ ElementPtr expected_result = Element::createMap();
+ expected_result->set("alpha", a->getJSON());
+ expected_result->set("gamma", c->getJSON());
+ EXPECT_EQ(result->str(), expected_result->str());
+
+ // Reset all statistics.
+ EXPECT_NO_THROW(ctx.resetAll());
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("alpha"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_NE(expected_a, from_ctx->getJSON()->str());
+ EXPECT_EQ(0.0, a->getFloat().first);
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("gamma"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_NE(expected_c, from_ctx->getJSON()->str());
+ EXPECT_EQ(0.0, c->getFloat().first);
+
+ // Set sample count for all statistics
+ EXPECT_NO_THROW(ctx.setMaxSampleCountAll(50));
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("alpha"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(from_ctx->getMaxSampleCount().second, 50);
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("gamma"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(from_ctx->getMaxSampleCount().second, 50);
+
+ // Set sample age for all statistics
+ const StatsDuration& dur(minutes(4) + seconds(5) + milliseconds(3));
+ EXPECT_NO_THROW(ctx.setMaxSampleAgeAll(dur));
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("alpha"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(from_ctx->getMaxSampleAge().second, dur);
+
+ EXPECT_NO_THROW(from_ctx = ctx.get("gamma"));
+ ASSERT_TRUE(from_ctx);
+ EXPECT_EQ(from_ctx->getMaxSampleAge().second, dur);
+
+ // Clear all statistics.
+ EXPECT_NO_THROW(ctx.clear());
+ EXPECT_EQ(0, ctx.size());
+}
diff --git a/src/lib/stats/tests/observation_unittest.cc b/src/lib/stats/tests/observation_unittest.cc
new file mode 100644
index 0000000..ad10368
--- /dev/null
+++ b/src/lib/stats/tests/observation_unittest.cc
@@ -0,0 +1,619 @@
+// Copyright (C) 2015-2021 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <config.h>
+
+#include <stats/observation.h>
+#include <exceptions/exceptions.h>
+#include <util/chrono_time_utils.h>
+#include <boost/shared_ptr.hpp>
+#include <gtest/gtest.h>
+
+#include <iostream>
+#include <sstream>
+
+#include <unistd.h>
+
+using namespace isc;
+using namespace isc::stats;
+using namespace std::chrono;
+
+namespace {
+
+static const StatsDuration& dur1234(hours(1) + minutes(2) + seconds(3) +
+ milliseconds(4));
+static const StatsDuration& dur5678(hours(5) + minutes(6) + seconds(7) +
+ milliseconds(8));
+static const StatsDuration& dur681012(hours(6) + minutes(8) + seconds(10) +
+ milliseconds(12));
+static const StatsDuration& dur453(minutes(4) + seconds(5) + milliseconds(3));
+
+// This test verifies that the number of seconds can be retrieved.
+TEST(StatsDurationTest, toSeconds) {
+ StatsDuration dur = StatsDuration::zero();
+ dur += hours(1) + minutes(1) + seconds(1) + milliseconds(1);
+ EXPECT_EQ(3661, toSeconds(dur));
+}
+
+/// @brief Test class for Observation
+///
+/// This simple fixture class initializes four observations:
+/// a (integer), b (float), c(time duration) and d (string).
+class ObservationTest : public ::testing::Test {
+public:
+
+ /// @brief Constructor
+ /// Initializes four observations.
+ ObservationTest() :
+ a("alpha", static_cast<int64_t>(1234)), // integer
+ b("beta", 12.34), // float
+ c("gamma", dur1234), // duration
+ d("delta", "1234") { // string
+ }
+
+ Observation a;
+ Observation b;
+ Observation c;
+ Observation d;
+};
+
+// Basic tests for the Observation constructors. This test checks whether
+// parameters passed to the constructor initialize the object properly.
+TEST_F(ObservationTest, constructor) {
+ EXPECT_EQ(Observation::STAT_INTEGER, a.getType());
+ EXPECT_EQ(Observation::STAT_FLOAT, b.getType());
+ EXPECT_EQ(Observation::STAT_DURATION, c.getType());
+ EXPECT_EQ(Observation::STAT_STRING, d.getType());
+
+ EXPECT_EQ(1234, a.getInteger().first);
+ EXPECT_EQ(12.34, b.getFloat().first);
+ EXPECT_EQ(dur1234, c.getDuration().first);
+ EXPECT_EQ("1234", d.getString().first);
+
+ // Let's check that attempting to get a different type
+ // than used will cause an exception.
+ EXPECT_THROW(a.getFloat(), InvalidStatType);
+ EXPECT_THROW(a.getDuration(), InvalidStatType);
+ EXPECT_THROW(a.getString(), InvalidStatType);
+
+ EXPECT_THROW(b.getInteger(), InvalidStatType);
+ EXPECT_THROW(b.getDuration(), InvalidStatType);
+ EXPECT_THROW(b.getString(), InvalidStatType);
+
+ EXPECT_THROW(c.getInteger(), InvalidStatType);
+ EXPECT_THROW(c.getFloat(), InvalidStatType);
+ EXPECT_THROW(c.getString(), InvalidStatType);
+
+ EXPECT_THROW(d.getInteger(), InvalidStatType);
+ EXPECT_THROW(d.getFloat(), InvalidStatType);
+ EXPECT_THROW(d.getDuration(), InvalidStatType);
+}
+
+// This test checks whether it is possible to set to an absolute value for all
+// given types.
+TEST_F(ObservationTest, setValue) {
+ EXPECT_NO_THROW(a.setValue(static_cast<int64_t>(5678)));
+ EXPECT_NO_THROW(b.setValue(56e+78));
+ EXPECT_NO_THROW(c.setValue(dur5678));
+ EXPECT_NO_THROW(d.setValue("fiveSixSevenEight"));
+
+
+ EXPECT_EQ(5678, a.getInteger().first);
+ EXPECT_EQ(56e+78, b.getFloat().first);
+ EXPECT_EQ(dur5678, c.getDuration().first);
+ EXPECT_EQ("fiveSixSevenEight", d.getString().first);
+
+ // Now check whether setting value to a different type does
+ // throw an exception
+ EXPECT_THROW(a.setValue(56e+78), InvalidStatType);
+ EXPECT_THROW(a.setValue(dur5678), InvalidStatType);
+ EXPECT_THROW(a.setValue("fiveSixSevenEight"), InvalidStatType);
+
+ EXPECT_THROW(b.setValue(static_cast<int64_t>(5678)), InvalidStatType);
+ EXPECT_THROW(b.setValue(dur5678), InvalidStatType);
+ EXPECT_THROW(b.setValue("fiveSixSevenEight"), InvalidStatType);
+
+ EXPECT_THROW(c.setValue(static_cast<int64_t>(5678)), InvalidStatType);
+ EXPECT_THROW(c.setValue(56e+78), InvalidStatType);
+ EXPECT_THROW(c.setValue("fiveSixSevenEight"), InvalidStatType);
+
+ EXPECT_THROW(d.setValue(static_cast<int64_t>(5678)), InvalidStatType);
+ EXPECT_THROW(d.setValue(56e+78), InvalidStatType);
+ EXPECT_THROW(d.setValue(dur5678), InvalidStatType);
+}
+
+// This test checks whether it is possible to add value to existing
+// counter.
+TEST_F(ObservationTest, addValue) {
+ // Note: all Observations were set to 1234, 12.34 or similar in
+ // ObservationTest constructor.
+
+ EXPECT_NO_THROW(a.addValue(static_cast<int64_t>(5678)));
+ EXPECT_NO_THROW(b.addValue(56.78));
+ EXPECT_NO_THROW(c.addValue(dur5678));
+ EXPECT_NO_THROW(d.addValue("fiveSixSevenEight"));
+
+ EXPECT_EQ(6912, a.getInteger().first);
+ EXPECT_EQ(69.12, b.getFloat().first);
+ EXPECT_EQ(dur681012, c.getDuration().first);
+ EXPECT_EQ("1234fiveSixSevenEight", d.getString().first);
+
+ ASSERT_EQ(a.getSize(), 2);
+ ASSERT_EQ(b.getSize(), 2);
+ ASSERT_EQ(c.getSize(), 2);
+ ASSERT_EQ(d.getSize(), 2);
+}
+
+// This test checks if collecting more than one sample
+// works well.
+TEST_F(ObservationTest, moreThanOne) {
+ // Arrays of 4 types of samples
+ int64_t int_samples[3] = {1234, 6912, 5678};
+ double float_samples[3] = {12.34, 69.12, 56e+78};
+ StatsDuration duration_samples[3] = {dur1234,
+ dur681012, dur5678};
+ std::string string_samples[3] = {"1234", "1234fiveSixSevenEight", "fiveSixSevenEight"};
+
+ EXPECT_NO_THROW(a.addValue(static_cast<int64_t>(5678)));
+ EXPECT_NO_THROW(b.addValue(56.78));
+ EXPECT_NO_THROW(c.addValue(dur5678));
+ EXPECT_NO_THROW(d.addValue("fiveSixSevenEight"));
+
+ EXPECT_NO_THROW(a.setValue(static_cast<int64_t>(5678)));
+ EXPECT_NO_THROW(b.setValue(56e+78));
+ EXPECT_NO_THROW(c.setValue(dur5678));
+ EXPECT_NO_THROW(d.setValue("fiveSixSevenEight"));
+
+ ASSERT_EQ(a.getSize(), 3);
+ ASSERT_EQ(b.getSize(), 3);
+ ASSERT_EQ(c.getSize(), 3);
+ ASSERT_EQ(d.getSize(), 3);
+
+ ASSERT_NO_THROW(a.getIntegers());
+ ASSERT_NO_THROW(b.getFloats());
+ ASSERT_NO_THROW(c.getDurations());
+ ASSERT_NO_THROW(d.getStrings());
+
+ std::list<IntegerSample> samples_int = a.getIntegers(); // List of all integer samples
+ std::list<FloatSample> samples_float = b.getFloats(); // List of all float samples
+ std::list<DurationSample> samples_dur = c.getDurations(); // List of all duration samples
+ std::list<StringSample> samples_str = d.getStrings(); // List of all string samples
+
+ uint32_t i = 2; // Index pointed to the end of array of samples
+
+ for (std::list<IntegerSample>::iterator it = samples_int.begin(); it != samples_int.end(); ++it) {
+ EXPECT_EQ(int_samples[i], static_cast<int64_t>((*it).first));
+ --i;
+ }
+ i = 2;
+ for (std::list<FloatSample>::iterator it = samples_float.begin(); it != samples_float.end(); ++it) {
+ EXPECT_EQ(float_samples[i], (*it).first);
+ --i;
+ }
+ i = 2;
+ for (std::list<DurationSample>::iterator it = samples_dur.begin(); it != samples_dur.end(); ++it) {
+ EXPECT_EQ(duration_samples[i], (*it).first);
+ --i;
+ }
+ i = 2;
+ for (std::list<StringSample>::iterator it = samples_str.begin(); it != samples_str.end(); ++it) {
+ EXPECT_EQ(string_samples[i], (*it).first);
+ --i;
+ }
+}
+
+// This test checks whether the size of storage
+// is equal to the true value
+TEST_F(ObservationTest, getSize) {
+ // Check if size of storages is equal to 1
+ ASSERT_EQ(a.getSize(), 1);
+ ASSERT_EQ(b.getSize(), 1);
+ ASSERT_EQ(c.getSize(), 1);
+ ASSERT_EQ(d.getSize(), 1);
+
+ a.addValue(static_cast<int64_t>(5678));
+ b.addValue(56.78);
+ c.addValue(dur5678);
+ d.addValue("fiveSixSevenEight");
+
+ EXPECT_NO_THROW(a.getSize());
+ EXPECT_NO_THROW(b.getSize());
+ EXPECT_NO_THROW(c.getSize());
+ EXPECT_NO_THROW(d.getSize());
+
+ // Check if size of storages is equal to 2
+ ASSERT_EQ(a.getSize(), 2);
+ ASSERT_EQ(b.getSize(), 2);
+ ASSERT_EQ(c.getSize(), 2);
+ ASSERT_EQ(d.getSize(), 2);
+
+ a.setValue(static_cast<int64_t>(5678));
+ b.setValue(56e+78);
+ c.setValue(dur5678);
+ d.setValue("fiveSixSevenEight");
+
+ EXPECT_NO_THROW(a.getSize());
+ EXPECT_NO_THROW(b.getSize());
+ EXPECT_NO_THROW(c.getSize());
+ EXPECT_NO_THROW(d.getSize());
+
+ // Check if size of storages is equal to 3
+ ASSERT_EQ(a.getSize(), 3);
+ ASSERT_EQ(b.getSize(), 3);
+ ASSERT_EQ(c.getSize(), 3);
+ ASSERT_EQ(d.getSize(), 3);
+}
+
+// Checks whether setting amount limits works properly
+TEST_F(ObservationTest, setCountLimit) {
+ // Preparing of 21 test's samples for each type of storage
+ int64_t int_samples[22] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13,
+ 14, 15, 16, 17, 18, 19, 20, 21};
+ double float_samples[22] = {0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0,
+ 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0, 19.0,
+ 20.0, 21.0};
+ std::string string_samples[22] = {"a", "b", "c", "d", "e", "f", "g", "h",
+ "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u",
+ "v"};
+ StatsDuration duration_samples[22];
+
+ for (uint32_t i = 0; i < 22; ++i) {
+ duration_samples[i] = milliseconds(i);
+ }
+
+ // By default the max_sample_count is set to 20 and max_sample_age
+ // is deactivated
+ // Adding 21 samples to each type of Observation
+ for (uint32_t i = 0; i < 21; ++i) {
+ a.setValue(int_samples[i]);
+ }
+ for (uint32_t i = 0; i < 21; ++i) {
+ b.setValue(float_samples[i]);
+ }
+ for (uint32_t i = 0; i < 21; ++i) {
+ c.setValue(duration_samples[i]);
+ }
+ for (uint32_t i = 0; i < 21; ++i) {
+ d.setValue(string_samples[i]);
+ }
+
+ // Getting all 4 types of samples after inserting 21 values
+ std::list<IntegerSample> samples_int = a.getIntegers();
+ std::list<FloatSample> samples_float = b.getFloats();
+ std::list<DurationSample> samples_duration = c.getDurations();
+ std::list<StringSample> samples_string = d.getStrings();
+
+ // Check if size of storages is equal to 20
+ ASSERT_EQ(a.getSize(), 20);
+ ASSERT_EQ(b.getSize(), 20);
+ ASSERT_EQ(c.getSize(), 20);
+ ASSERT_EQ(d.getSize(), 20);
+
+ // And whether stored values are correct
+ uint32_t i = 20; // index of the last element in array of test's samples
+ for (std::list<IntegerSample>::iterator it = samples_int.begin(); it != samples_int.end(); ++it) {
+ EXPECT_EQ((*it).first, int_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<FloatSample>::iterator it = samples_float.begin(); it != samples_float.end(); ++it) {
+ EXPECT_EQ((*it).first, float_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<DurationSample>::iterator it = samples_duration.begin(); it != samples_duration.end(); ++it) {
+ EXPECT_EQ((*it).first, duration_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<StringSample>::iterator it = samples_string.begin(); it != samples_string.end(); ++it) {
+ EXPECT_EQ((*it).first, string_samples[i]);
+ --i;
+ }
+
+ // Change size of storage to smaller one
+ ASSERT_NO_THROW(a.setMaxSampleCount(10));
+ ASSERT_NO_THROW(b.setMaxSampleCount(10));
+ ASSERT_NO_THROW(c.setMaxSampleCount(10));
+ ASSERT_NO_THROW(d.setMaxSampleCount(10));
+
+ samples_int = a.getIntegers();
+ samples_float = b.getFloats();
+ samples_duration = c.getDurations();
+ samples_string = d.getStrings();
+
+ // Check if size of storages is equal to 10
+ ASSERT_EQ(a.getSize(), 10);
+ ASSERT_EQ(b.getSize(), 10);
+ ASSERT_EQ(c.getSize(), 10);
+ ASSERT_EQ(d.getSize(), 10);
+
+ // And whether storages contain only the 10 newest values
+ i = 20; // index of last element in array of test's samples
+ for (std::list<IntegerSample>::iterator it = samples_int.begin(); it != samples_int.end(); ++it) {
+ EXPECT_EQ((*it).first, int_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<FloatSample>::iterator it = samples_float.begin(); it != samples_float.end(); ++it) {
+ EXPECT_EQ((*it).first, float_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<DurationSample>::iterator it = samples_duration.begin(); it != samples_duration.end(); ++it) {
+ EXPECT_EQ((*it).first, duration_samples[i]);
+ --i;
+ }
+ i = 20; // index of last element in array of test's samples
+ for (std::list<StringSample>::iterator it = samples_string.begin(); it != samples_string.end(); ++it) {
+ EXPECT_EQ((*it).first, string_samples[i]);
+ --i;
+ }
+
+ // Resize max_sample_count to greater
+ ASSERT_NO_THROW(a.setMaxSampleCount(50));
+ ASSERT_NO_THROW(b.setMaxSampleCount(50));
+ ASSERT_NO_THROW(c.setMaxSampleCount(50));
+ ASSERT_NO_THROW(d.setMaxSampleCount(50));
+
+ // Check if size of storages did not change without adding new value
+ ASSERT_EQ(a.getSize(), 10);
+ ASSERT_EQ(b.getSize(), 10);
+ ASSERT_EQ(c.getSize(), 10);
+ ASSERT_EQ(d.getSize(), 10);
+
+ // Add new values to each type of Observation
+ a.setValue(static_cast<int64_t>(21));
+ b.setValue(21.0);
+ c.setValue(milliseconds(21));
+ d.setValue("v");
+
+ samples_int = a.getIntegers();
+ samples_float = b.getFloats();
+ samples_duration = c.getDurations();
+ samples_string = d.getStrings();
+
+ ASSERT_EQ(a.getSize(), 11);
+ ASSERT_EQ(b.getSize(), 11);
+ ASSERT_EQ(c.getSize(), 11);
+ ASSERT_EQ(d.getSize(), 11);
+
+ i = 21; // index of last element in array of test's samples
+ for (std::list<IntegerSample>::iterator it = samples_int.begin(); it != samples_int.end(); ++it) {
+ EXPECT_EQ((*it).first, int_samples[i]);
+ --i;
+ }
+ i = 21; // index of last element in array of test's samples
+ for (std::list<FloatSample>::iterator it = samples_float.begin(); it != samples_float.end(); ++it) {
+ EXPECT_EQ((*it).first, float_samples[i]);
+ --i;
+ }
+ i = 21; // index of last element in array of test's samples
+ for (std::list<DurationSample>::iterator it = samples_duration.begin(); it != samples_duration.end(); ++it) {
+ EXPECT_EQ((*it).first, duration_samples[i]);
+ --i;
+ }
+ i = 21; // index of last element in array of test's samples
+ for (std::list<StringSample>::iterator it = samples_string.begin(); it != samples_string.end(); ++it) {
+ EXPECT_EQ((*it).first, string_samples[i]);
+ --i;
+ }
+
+}
+
+// Checks whether setting age limits works properly
+TEST_F(ObservationTest, setAgeLimit) {
+ // Set max_sample_age to 1 second
+ ASSERT_NO_THROW(c.setMaxSampleAge(seconds(1)));
+ // Add some value
+ c.setValue(milliseconds(5));
+ // Wait 1 second
+ sleep(1);
+ // and add new value
+ c.setValue(milliseconds(3));
+
+ // get the list of all samples
+ std::list<DurationSample> samples_duration = c.getDurations();
+ // check whether the size of samples is equal to 1
+ ASSERT_EQ(c.getSize(), 1);
+ // and whether it contains an expected value
+ EXPECT_EQ((*samples_duration.begin()).first, milliseconds(3));
+
+ // Wait 1 second to ensure removing previously set value
+ sleep(1);
+ // add 10 new values
+ for (uint32_t i = 0; i < 10; ++i) {
+ c.setValue(milliseconds(i));
+ }
+ // change the max_sample_age to smaller
+ ASSERT_NO_THROW(c.setMaxSampleAge(milliseconds(300)));
+
+ samples_duration = c.getDurations();
+ // check whether the size of samples is equal to 10
+ ASSERT_EQ(c.getSize(), 10);
+
+ // and whether it contains expected values
+ uint32_t i = 9;
+ for (std::list<DurationSample>::iterator it = samples_duration.begin(); it != samples_duration.end(); ++it) {
+ EXPECT_EQ((*it).first, milliseconds(i));
+ --i;
+ }
+}
+
+// Test checks whether we can get max_sample_age_ and max_sample_count_
+// properly.
+TEST_F(ObservationTest, getLimits) {
+ // First checks whether getting default values works properly
+ EXPECT_EQ(a.getMaxSampleAge().first, false);
+ EXPECT_EQ(b.getMaxSampleAge().first, false);
+ EXPECT_EQ(c.getMaxSampleAge().first, false);
+ EXPECT_EQ(d.getMaxSampleAge().first, false);
+
+ EXPECT_EQ(a.getMaxSampleCount().first, true);
+ EXPECT_EQ(b.getMaxSampleCount().first, true);
+ EXPECT_EQ(c.getMaxSampleCount().first, true);
+ EXPECT_EQ(d.getMaxSampleCount().first, true);
+
+ EXPECT_EQ(a.getMaxSampleCount().second, 20);
+ EXPECT_EQ(b.getMaxSampleCount().second, 20);
+ EXPECT_EQ(c.getMaxSampleCount().second, 20);
+ EXPECT_EQ(d.getMaxSampleCount().second, 20);
+
+ // change limit to time duration
+ ASSERT_NO_THROW(a.setMaxSampleAge(dur453));
+ ASSERT_NO_THROW(b.setMaxSampleAge(dur453));
+ ASSERT_NO_THROW(c.setMaxSampleAge(dur453));
+ ASSERT_NO_THROW(d.setMaxSampleAge(dur453));
+
+ EXPECT_EQ(a.getMaxSampleAge().first, true);
+ EXPECT_EQ(b.getMaxSampleAge().first, true);
+ EXPECT_EQ(c.getMaxSampleAge().first, true);
+ EXPECT_EQ(d.getMaxSampleAge().first, true);
+
+ EXPECT_EQ(a.getMaxSampleAge().second, dur453);
+ EXPECT_EQ(b.getMaxSampleAge().second, dur453);
+ EXPECT_EQ(c.getMaxSampleAge().second, dur453);
+ EXPECT_EQ(d.getMaxSampleAge().second, dur453);
+
+ EXPECT_EQ(a.getMaxSampleCount().first, false);
+ EXPECT_EQ(b.getMaxSampleCount().first, false);
+ EXPECT_EQ(c.getMaxSampleCount().first, false);
+ EXPECT_EQ(d.getMaxSampleCount().first, false);
+
+ EXPECT_EQ(a.getMaxSampleCount().second, 20);
+ EXPECT_EQ(b.getMaxSampleCount().second, 20);
+ EXPECT_EQ(c.getMaxSampleCount().second, 20);
+ EXPECT_EQ(d.getMaxSampleCount().second, 20);
+}
+
+// limit defaults are tested with StatsMgr.
+
+// Test checks whether timing is reported properly.
+TEST_F(ObservationTest, timers) {
+ auto before = SampleClock::now();
+ b.setValue(123.0); // Set it to a random value and record the time.
+
+ // Allow a bit of imprecision. This test allows 500ms. That should be ok,
+ // when running on virtual machines.
+ auto after = before + milliseconds(500);
+
+ // Now wait some time. We want to confirm that the timestamp recorded is the
+ // time the observation took place, not current time.
+ sleep(1);
+
+ FloatSample sample = b.getFloat();
+
+ // Let's check that the timestamp is within (before, after) range:
+ // before < sample-time < after
+ EXPECT_TRUE(before <= sample.second);
+ EXPECT_TRUE(sample.second <= after);
+}
+
+// Checks whether an integer statistic can generate proper JSON structures.
+// See https://gitlab.isc.org/isc-projects/kea/wikis/designs/Stats-design
+/// for details.
+TEST_F(ObservationTest, integerToJSON) {
+ // String which contains first added sample
+ std::string first_sample = ", [ 1234, \"" +
+ isc::util::clockToText(a.getInteger().second) + "\" ] ]";
+
+ a.setValue(static_cast<int64_t>(1234));
+
+ std::string exp = "[ [ 1234, \"" +
+ isc::util::clockToText(a.getInteger().second) + "\" ]" + first_sample;
+
+ std::cout << a.getJSON()->str() << std::endl;
+ EXPECT_EQ(exp, a.getJSON()->str());
+}
+
+// Checks whether a floating point statistic can generate proper JSON
+// structures. See
+/// https://gitlab.isc.org/isc-projects/kea/wikis/designs/Stats-design
+/// for details.
+TEST_F(ObservationTest, floatToJSON) {
+ // String which contains first added sample
+ std::string first_sample = ", [ 12.34, \"" +
+ isc::util::clockToText(b.getFloat().second) + "\" ] ]";
+
+ // Let's use a value that converts easily to floating point.
+ // No need to deal with infinite fractions in binary systems.
+
+ b.setValue(1234.5);
+
+ std::string exp = "[ [ 1234.5, \"" +
+ isc::util::clockToText(b.getFloat().second) + "\" ]" + first_sample;
+
+ std::cout << b.getJSON()->str() << std::endl;
+ EXPECT_EQ(exp, b.getJSON()->str());
+}
+
+// Checks whether a time duration statistic can generate proper JSON structures.
+// See https://gitlab.isc.org/isc-projects/kea/wikis/designs/Stats-design for
+// details.
+TEST_F(ObservationTest, durationToJSON) {
+ // String which contains first added sample
+ std::string first_sample = ", [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(c.getDuration().second) + "\" ] ]";
+
+ // 1 hour 2 minutes 3 seconds and 4 milliseconds
+ c.setValue(dur1234);
+
+ std::string exp = "[ [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(c.getDuration().second) + "\" ]" + first_sample;
+
+ std::cout << c.getJSON()->str() << std::endl;
+ EXPECT_EQ(exp, c.getJSON()->str());
+}
+
+// Checks whether a string statistic can generate proper JSON structures.
+// See https://gitlab.isc.org/isc-projects/kea/wikis/designs/Stats-design
+// for details.
+TEST_F(ObservationTest, stringToJSON) {
+ // String which contains first added sample
+ std::string first_sample = ", [ \"1234\", \"" +
+ isc::util::clockToText(d.getString().second) + "\" ] ]";
+
+ d.setValue("Lorem ipsum dolor sit amet");
+
+ std::string exp = "[ [ \"Lorem ipsum dolor sit amet\", \"" +
+ isc::util::clockToText(d.getString().second) + "\" ]" + first_sample;
+
+ std::cout << d.getJSON()->str() << std::endl;
+ EXPECT_EQ(exp, d.getJSON()->str());
+}
+
+// Checks whether reset() resets the statistics properly.
+TEST_F(ObservationTest, reset) {
+ EXPECT_NO_THROW(a.addValue(static_cast<int64_t>(5678)));
+ EXPECT_NO_THROW(b.addValue(56.78));
+ EXPECT_NO_THROW(c.addValue(dur5678));
+ EXPECT_NO_THROW(d.addValue("fiveSixSevenEight"));
+
+ a.reset(); // integer
+ b.reset(); // float
+ c.reset(); // duration
+ d.reset(); // string
+
+ EXPECT_EQ(0, a.getInteger().first);
+ EXPECT_EQ(0.0, b.getFloat().first);
+ EXPECT_EQ(StatsDuration::zero(), c.getDuration().first);
+ EXPECT_EQ("", d.getString().first);
+
+ ASSERT_EQ(a.getSize(), 1);
+ ASSERT_EQ(b.getSize(), 1);
+ ASSERT_EQ(c.getSize(), 1);
+ ASSERT_EQ(d.getSize(), 1);
+}
+
+// Checks whether an observation can keep its name.
+TEST_F(ObservationTest, names) {
+ EXPECT_EQ("alpha", a.getName());
+ EXPECT_EQ("beta", b.getName());
+ EXPECT_EQ("gamma", c.getName());
+ EXPECT_EQ("delta", d.getName());
+}
+
+}
diff --git a/src/lib/stats/tests/run_unittests.cc b/src/lib/stats/tests/run_unittests.cc
new file mode 100644
index 0000000..9d621ae
--- /dev/null
+++ b/src/lib/stats/tests/run_unittests.cc
@@ -0,0 +1,18 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <config.h>
+
+#include <gtest/gtest.h>
+
+int
+main(int argc, char* argv[]) {
+ ::testing::InitGoogleTest(&argc, argv);
+
+ int result = RUN_ALL_TESTS();
+
+ return (result);
+}
diff --git a/src/lib/stats/tests/stats_mgr_unittest.cc b/src/lib/stats/tests/stats_mgr_unittest.cc
new file mode 100644
index 0000000..734d134
--- /dev/null
+++ b/src/lib/stats/tests/stats_mgr_unittest.cc
@@ -0,0 +1,1117 @@
+// Copyright (C) 2015-2020 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <config.h>
+
+#include <stats/stats_mgr.h>
+#include <exceptions/exceptions.h>
+#include <cc/data.h>
+#include <cc/command_interpreter.h>
+#include <util/chrono_time_utils.h>
+#include <boost/shared_ptr.hpp>
+#include <gtest/gtest.h>
+
+#include <iostream>
+#include <sstream>
+
+using namespace isc;
+using namespace isc::data;
+using namespace isc::stats;
+using namespace isc::config;
+using namespace std::chrono;
+
+namespace {
+
+static const StatsDuration& dur1234(hours(1) + minutes(2) + seconds(3) +
+ milliseconds(4));
+static const StatsDuration& dur5678(hours(5) + minutes(6) + seconds(7) +
+ milliseconds(8));
+static const StatsDuration& dur1245(hours(1) + minutes(2) + seconds(45));
+
+/// @brief Fixture class for StatsMgr testing
+///
+/// Very simple class that makes sure that StatsMgr is indeed instantiated
+/// before the test and any statistics are wiped out after it.
+class StatsMgrTest : public ::testing::Test {
+public:
+ /// @brief Constructor
+ /// Makes sure that the Statistics Manager is instantiated.
+ StatsMgrTest() {
+ StatsMgr::instance();
+ StatsMgr::instance().removeAll();
+ }
+
+ /// @brief Destructor
+ /// Removes all statistics and restores class defaults.
+ ~StatsMgrTest() {
+ StatsMgr::instance().removeAll();
+ StatsMgr::instance().setMaxSampleAgeDefault(StatsDuration::zero());
+ StatsMgr::instance().setMaxSampleCountDefault(20);
+ }
+};
+
+// Basic test for statistics manager interface.
+TEST_F(StatsMgrTest, basic) {
+ // Getting an instance
+ EXPECT_NO_THROW(StatsMgr::instance());
+
+ // Check that there are no statistics recorded by default.
+ EXPECT_EQ(0, StatsMgr::instance().count());
+}
+
+// Test checks whether it's possible to record and later report
+// an integer statistic.
+TEST_F(StatsMgrTest, integerStat) {
+ EXPECT_NO_THROW(StatsMgr::instance().setValue("alpha",
+ static_cast<int64_t>(1234)));
+
+ ObservationPtr alpha;
+ EXPECT_NO_THROW(alpha = StatsMgr::instance().getObservation("alpha"));
+ ASSERT_TRUE(alpha);
+
+ std::string exp = "{ \"alpha\": [ [ 1234, \"" +
+ isc::util::clockToText(alpha->getInteger().second) + "\" ] ] }";
+
+ EXPECT_EQ(exp, StatsMgr::instance().get("alpha")->str());
+}
+
+// Test checks whether it's possible to record and later report
+// a floating point statistic.
+TEST_F(StatsMgrTest, floatStat) {
+ EXPECT_NO_THROW(StatsMgr::instance().setValue("beta", 12.34));
+
+ ObservationPtr beta;
+ EXPECT_NO_THROW(beta = StatsMgr::instance().getObservation("beta"));
+ ASSERT_TRUE(beta);
+
+ std::string exp = "{ \"beta\": [ [ 12.34, \"" +
+ isc::util::clockToText(beta->getFloat().second) + "\" ] ] }";
+
+ EXPECT_EQ(exp, StatsMgr::instance().get("beta")->str());
+}
+
+// Test checks whether it's possible to record and later report
+// a duration statistic.
+TEST_F(StatsMgrTest, durationStat) {
+ EXPECT_NO_THROW(StatsMgr::instance().setValue("gamma", dur1234));
+
+ ObservationPtr gamma;
+ EXPECT_NO_THROW(gamma = StatsMgr::instance().getObservation("gamma"));
+ ASSERT_TRUE(gamma);
+
+ std::string exp = "{ \"gamma\": [ [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(gamma->getDuration().second) + "\" ] ] }";
+
+ EXPECT_EQ(exp, StatsMgr::instance().get("gamma")->str());
+}
+
+// Test checks whether it's possible to record and later report
+// a string statistic.
+TEST_F(StatsMgrTest, stringStat) {
+ EXPECT_NO_THROW(StatsMgr::instance().setValue("delta",
+ "Lorem ipsum"));
+
+ ObservationPtr delta;
+ EXPECT_NO_THROW(delta = StatsMgr::instance().getObservation("delta"));
+ ASSERT_TRUE(delta);
+
+ std::string exp = "{ \"delta\": [ [ \"Lorem ipsum\", \"" +
+ isc::util::clockToText(delta->getString().second) + "\" ] ] }";
+
+ EXPECT_EQ(exp, StatsMgr::instance().get("delta")->str());
+}
+
+// Basic test of getSize function.
+TEST_F(StatsMgrTest, getSize) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ EXPECT_NO_THROW(StatsMgr::instance().getSize("alpha"));
+ EXPECT_NO_THROW(StatsMgr::instance().getSize("beta"));
+ EXPECT_NO_THROW(StatsMgr::instance().getSize("gamma"));
+ EXPECT_NO_THROW(StatsMgr::instance().getSize("delta"));
+
+ EXPECT_EQ(StatsMgr::instance().getSize("alpha"), 1);
+ EXPECT_EQ(StatsMgr::instance().getSize("beta"), 1);
+ EXPECT_EQ(StatsMgr::instance().getSize("gamma"), 1);
+ EXPECT_EQ(StatsMgr::instance().getSize("delta"), 1);
+}
+
+// Test checks whether setting age limit and count limit works properly.
+TEST_F(StatsMgrTest, setLimits) {
+ // Initializing of an integer type observation
+ StatsMgr::instance().setValue("foo", static_cast<int64_t>(1));
+
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleAge("foo",
+ seconds(1)));
+
+ for (uint32_t i = 0; i < 10; ++i) {
+ if (i == 5) {
+ sleep(1); // wait one second to force exceeding the time limit
+ }
+ StatsMgr::instance().setValue("foo", static_cast<int64_t>(i));
+ }
+
+ EXPECT_EQ(StatsMgr::instance().getSize("foo"), 5);
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleCount("foo", 100));
+
+ for (int64_t i = 0; i < 200; ++i) {
+ StatsMgr::instance().setValue("foo", i);
+ }
+
+ EXPECT_EQ(StatsMgr::instance().getSize("foo"), 100);
+}
+
+// Test checks whether setting age limit and count limit to existing
+// statistics works properly.
+TEST_F(StatsMgrTest, setLimitsAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // check the setting of time limit to existing statistics
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleAgeAll(seconds(1)));
+
+ // check if time limit was set properly and whether count limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second,
+ seconds(1));
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second,
+ seconds(1));
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second,
+ seconds(1));
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second,
+ seconds(1));
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, false);
+
+ // check the setting of count limit to existing statistics
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleCountAll(1200));
+
+ // check if count limit was set properly and whether count limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 1200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 1200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 1200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 1200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+}
+
+// Test checks whether setting default age limit and count limit works
+// properly.
+TEST_F(StatsMgrTest, setLimitsDefault) {
+ ASSERT_EQ(StatsMgr::instance().getMaxSampleCountDefault(), 20);
+ ASSERT_EQ(StatsMgr::instance().getMaxSampleAgeDefault(), StatsDuration::zero());
+
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", seconds(1234));
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // check what default applied
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second, StatsDuration::zero());
+
+ // Retry with another default count limits.
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleCountDefault(10));
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleAgeDefault(seconds(5)));
+ ASSERT_EQ(StatsMgr::instance().getMaxSampleCountDefault(), 10);
+ ASSERT_EQ(StatsMgr::instance().getMaxSampleAgeDefault(), seconds(5));
+
+ // Check the existing statistics were not updated.
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second, StatsDuration::zero());
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 20);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second, StatsDuration::zero());
+
+ // Remove all test statistics.
+ EXPECT_NO_THROW(StatsMgr::instance().removeAll());
+
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", seconds(1234));
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second, seconds(5));
+
+ // Retry with count limit disable.
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleCountDefault(0));
+ ASSERT_EQ(StatsMgr::instance().getMaxSampleCountDefault(), 0);
+
+ // Check the existing statistics were not updated.
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second, seconds(5));
+
+ // Remove all test statistics.
+ EXPECT_NO_THROW(StatsMgr::instance().removeAll());
+
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", seconds(1234));
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second, seconds(5));
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, false);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 10);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second, seconds(5));
+
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleCountDefault(20));
+ EXPECT_NO_THROW(StatsMgr::instance().setMaxSampleAgeDefault(StatsDuration::zero()));
+}
+
+// This test checks whether a single (get("foo")) and all (getAll())
+// statistics are reported properly.
+TEST_F(StatsMgrTest, getGetAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem");
+
+ // The string's representation of firstly added statistics
+ std::string alpha_first = ", [ 1234, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("alpha")
+ ->getInteger().second) + "\" ] ]";
+ std::string beta_first = ", [ 12.34, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("beta")
+ ->getFloat().second) + "\" ] ]";
+ std::string gamma_first = ", [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("gamma")
+ ->getDuration().second) + "\" ] ]";
+ std::string delta_first = ", [ \"Lorem\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("delta")
+ ->getString().second) + "\" ] ]";
+
+ // Now add some values to them
+ StatsMgr::instance().addValue("alpha", static_cast<int64_t>(5678));
+ StatsMgr::instance().addValue("beta", 56.78);
+ StatsMgr::instance().addValue("gamma", dur5678);
+ StatsMgr::instance().addValue("delta", " ipsum");
+
+ // There should be 4 statistics reported
+ EXPECT_EQ(4, StatsMgr::instance().count());
+
+ // Now check whether they can be reported back
+ ConstElementPtr rep_alpha = StatsMgr::instance().get("alpha");
+ ConstElementPtr rep_beta = StatsMgr::instance().get("beta");
+ ConstElementPtr rep_gamma = StatsMgr::instance().get("gamma");
+ ConstElementPtr rep_delta = StatsMgr::instance().get("delta");
+
+ ASSERT_TRUE(rep_alpha);
+ ASSERT_TRUE(rep_beta);
+ ASSERT_TRUE(rep_gamma);
+ ASSERT_TRUE(rep_delta);
+
+ std::string exp_str_alpha = "[ [ 6912, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("alpha")
+ ->getInteger().second) + "\" ]" + alpha_first;
+ std::string exp_str_beta = "[ [ 69.12, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("beta")
+ ->getFloat().second) + "\" ]" + beta_first;
+ std::string exp_str_gamma = "[ [ \"06:08:10.012000\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("gamma")
+ ->getDuration().second) + "\" ]" + gamma_first;
+ std::string exp_str_delta = "[ [ \"Lorem ipsum\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("delta")
+ ->getString().second) + "\" ]" + delta_first;
+
+ // Check that individual stats are reported properly
+ EXPECT_EQ("{ \"alpha\": " + exp_str_alpha + " }", rep_alpha->str());
+ EXPECT_EQ("{ \"beta\": " + exp_str_beta + " }", rep_beta->str());
+ EXPECT_EQ("{ \"gamma\": " + exp_str_gamma + " }", rep_gamma->str());
+ EXPECT_EQ("{ \"delta\": " + exp_str_delta + " }", rep_delta->str());
+
+ // Check that non-existent metric is not reported.
+ EXPECT_EQ("{ }", StatsMgr::instance().get("epsilon")->str());
+
+ // Check that all of them can be reported at once
+ ConstElementPtr rep_all = StatsMgr::instance().getAll();
+ ASSERT_TRUE(rep_all);
+
+ // Verifying this is a bit more involved, as we don't know whether the
+ // order would be preserved or not.
+ EXPECT_EQ(4, rep_all->size());
+ ASSERT_TRUE(rep_all->get("alpha"));
+ ASSERT_TRUE(rep_all->get("beta"));
+ ASSERT_TRUE(rep_all->get("delta"));
+ ASSERT_TRUE(rep_all->get("gamma"));
+ EXPECT_FALSE(rep_all->get("epsilon"));
+
+ EXPECT_EQ(exp_str_alpha, rep_all->get("alpha")->str());
+ EXPECT_EQ(exp_str_beta, rep_all->get("beta")->str());
+ EXPECT_EQ(exp_str_gamma, rep_all->get("gamma")->str());
+ EXPECT_EQ(exp_str_delta, rep_all->get("delta")->str());
+}
+
+// This test checks whether existing statistics can be reset.
+TEST_F(StatsMgrTest, reset) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // This should reset alpha to 0
+ EXPECT_NO_THROW(StatsMgr::instance().reset("alpha"));
+ EXPECT_EQ(0,
+ StatsMgr::instance().getObservation("alpha")->getInteger().first);
+
+ // The other stats should remain untouched
+ EXPECT_EQ(12.34,
+ StatsMgr::instance().getObservation("beta")->getFloat().first);
+ EXPECT_EQ(dur1234,
+ StatsMgr::instance().getObservation("gamma")->getDuration().first);
+ EXPECT_EQ("Lorem ipsum",
+ StatsMgr::instance().getObservation("delta")->getString().first);
+
+ // Now let's wipe them, too.
+ EXPECT_NO_THROW(StatsMgr::instance().reset("beta"));
+ EXPECT_NO_THROW(StatsMgr::instance().reset("gamma"));
+ EXPECT_NO_THROW(StatsMgr::instance().reset("delta"));
+ EXPECT_EQ(0.0,
+ StatsMgr::instance().getObservation("beta")->getFloat().first);
+ EXPECT_EQ(StatsDuration::zero(),
+ StatsMgr::instance().getObservation("gamma")->getDuration().first);
+ EXPECT_EQ("",
+ StatsMgr::instance().getObservation("delta")->getString().first);
+
+ // Resetting statistics should not remove them
+ EXPECT_EQ(4, StatsMgr::instance().count());
+}
+
+// This test checks whether existing statistics can be reset.
+TEST_F(StatsMgrTest, resetAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // This should reset alpha to 0
+ EXPECT_NO_THROW(StatsMgr::instance().resetAll());
+ EXPECT_EQ(0,
+ StatsMgr::instance().getObservation("alpha")->getInteger().first);
+ EXPECT_EQ(0.0,
+ StatsMgr::instance().getObservation("beta")->getFloat().first);
+ EXPECT_EQ(StatsDuration::zero(),
+ StatsMgr::instance().getObservation("gamma")->getDuration().first);
+ EXPECT_EQ("",
+ StatsMgr::instance().getObservation("delta")->getString().first);
+
+ // Resetting all statistics should not remove them
+ EXPECT_EQ(4, StatsMgr::instance().count());
+}
+
+// This test checks whether statistics can be removed.
+TEST_F(StatsMgrTest, removeAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // This should reset alpha to 0
+ EXPECT_NO_THROW(StatsMgr::instance().removeAll());
+
+ // Resetting all statistics should not remove them
+ EXPECT_EQ(0, StatsMgr::instance().count());
+
+ // There should be no such statistics anymore
+ EXPECT_EQ("{ }", StatsMgr::instance().get("alpha")->str());
+ EXPECT_EQ("{ }", StatsMgr::instance().get("beta")->str());
+ EXPECT_EQ("{ }", StatsMgr::instance().get("gamma")->str());
+ EXPECT_EQ("{ }", StatsMgr::instance().get("delta")->str());
+
+ // There should be no such statistics anymore
+ EXPECT_FALSE(StatsMgr::instance().getObservation("alpha"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("beta"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("gamma"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("delta"));
+}
+
+// This is a performance benchmark that checks how long does it take
+// to increment a single statistic million times.
+//
+// Data points:
+// It took 00:00:00.363709 (363ms) on late 2013 Mac with Mac OS X 10.9.5.
+TEST_F(StatsMgrTest, DISABLED_performanceSingleAdd) {
+ StatsMgr::instance().removeAll();
+
+ uint32_t cycles = 1000000;
+
+ auto before = SampleClock::now();
+ for (uint32_t i = 0; i < cycles; ++i) {
+ StatsMgr::instance().addValue("metric1", 0.1 * i);
+ }
+ auto after = SampleClock::now();
+
+ auto dur = after - before;
+
+ std::cout << "Incrementing a single statistic " << cycles << " times took: "
+ << isc::util::durationToText(dur) << std::endl;
+}
+
+// This is a performance benchmark that checks how long does it take
+// to set absolute value of a single statistic million times.
+//
+// Data points:
+// It took 00:00:00.361003 (361ms) on late 2013 Mac with Mac OS X 10.9.5.
+TEST_F(StatsMgrTest, DISABLED_performanceSingleSet) {
+ StatsMgr::instance().removeAll();
+
+ uint32_t cycles = 1000000;
+
+ auto before = SampleClock::now();
+ for (uint32_t i = 0; i < cycles; ++i) {
+ StatsMgr::instance().setValue("metric1", 0.1 * i);
+ }
+ auto after = SampleClock::now();
+
+ auto dur = after - before;
+
+ std::cout << "Setting a single statistic " << cycles << " times took: "
+ << isc::util::durationToText(dur) << std::endl;
+}
+
+// This is a performance benchmark that checks how long does it take to
+// increment one statistic a million times, when there is 1000 other statistics
+// present.
+//
+// Data points:
+// 00:00:00.436943 (436ms) on late 2013 Mac with Mac OS X 10.9.5
+TEST_F(StatsMgrTest, DISABLED_performanceMultipleAdd) {
+ StatsMgr::instance().removeAll();
+
+ uint32_t cycles = 1000000;
+ uint32_t stats = 1000;
+
+ for (uint32_t i = 0; i < stats; ++i) {
+ std::stringstream tmp;
+ tmp << "statistic" << i;
+ StatsMgr::instance().setValue(tmp.str(), static_cast<int64_t>(i));
+ }
+
+ auto before = SampleClock::now();
+ for (uint32_t i = 0; i < cycles; ++i) {
+ StatsMgr::instance().addValue("metric1", static_cast<int64_t>(i));
+ }
+ auto after = SampleClock::now();
+
+ auto dur = after - before;
+
+ std::cout << "Incrementing one of " << stats << " statistics " << cycles
+ << " times took: " << isc::util::durationToText(dur) << std::endl;
+}
+
+// This is a performance benchmark that checks how long does it take to
+// set one statistic to a given value a million times, when there is 1000 other
+// statistics present.
+//
+// Data points:
+// 00:00:00.424518 (424ms) on late 2013 Mac with Mac OS X 10.9.5
+TEST_F(StatsMgrTest, DISABLED_performanceMultipleSet) {
+ StatsMgr::instance().removeAll();
+
+ uint32_t cycles = 1000000;
+ uint32_t stats = 1000;
+
+ for (uint32_t i = 0; i < stats; ++i) {
+ std::stringstream tmp;
+ tmp << "statistic" << i;
+ StatsMgr::instance().setValue(tmp.str(), static_cast<int64_t>(i));
+ }
+
+ auto before = SampleClock::now();
+ for (uint32_t i = 0; i < cycles; ++i) {
+ StatsMgr::instance().setValue("metric1", static_cast<int64_t>(i));
+ }
+ auto after = SampleClock::now();
+
+ auto dur = after - before;
+
+ std::cout << "Setting one of " << stats << " statistics " << cycles
+ << " times took: " << isc::util::durationToText(dur) << std::endl;
+}
+
+// Test checks whether statistics name can be generated using various
+// indexes.
+TEST_F(StatsMgrTest, generateName) {
+ // generateName is a templated method, so in principle anything printable
+ // to stream can be used as index. However, in practice only integers
+ // and possibly strings will be used.
+
+ // Let's text integer as index.
+ EXPECT_EQ("subnet[123].pkt4-received",
+ StatsMgr::generateName("subnet", 123, "pkt4-received"));
+
+ // Lets' test string as index.
+ EXPECT_EQ("subnet[foo].pkt4-received",
+ StatsMgr::generateName("subnet", "foo", "pkt4-received"));
+}
+
+// Test checks if statistic-get handler is able to return specified statistic.
+TEST_F(StatsMgrTest, commandStatisticGet) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+
+ ConstElementPtr rsp = StatsMgr::instance().statisticGetHandler("statistic-get",
+ params);
+
+ ObservationPtr alpha;
+ EXPECT_NO_THROW(alpha = StatsMgr::instance().getObservation("alpha"));
+ ASSERT_TRUE(alpha);
+
+ std::string exp = "{ \"alpha\": [ [ 1234, \"" +
+ isc::util::clockToText(alpha->getInteger().second) + "\" ] ] }";
+
+ EXPECT_EQ("{ \"arguments\": " + exp + ", \"result\": 0 }", rsp->str());
+}
+
+// Test checks if statistic-get is able to handle:
+// - a request without parameters
+// - a request with missing statistic name
+// - a request for non-existing statistic.
+TEST_F(StatsMgrTest, commandStatisticGetNegative) {
+ // Case 1: a request without parameters
+ ConstElementPtr rsp = StatsMgr::instance().statisticGetHandler("statistic-get",
+ ElementPtr());
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 2: a request with missing statistic name
+ ElementPtr params = Element::createMap();
+ rsp = StatsMgr::instance().statisticGetHandler("statistic-get", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 3: a request for non-existing statistic
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticGetHandler("statistic-get", params);
+ EXPECT_EQ("{ \"arguments\": { }, \"result\": 0 }", rsp->str());
+}
+
+// This test checks whether statistic-get-all command returns all statistics
+// correctly.
+TEST_F(StatsMgrTest, commandGetAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // Now get them. They're used to generate expected output
+ ConstElementPtr rep_alpha = StatsMgr::instance().get("alpha");
+ ConstElementPtr rep_beta = StatsMgr::instance().get("beta");
+ ConstElementPtr rep_gamma = StatsMgr::instance().get("gamma");
+ ConstElementPtr rep_delta = StatsMgr::instance().get("delta");
+
+ ASSERT_TRUE(rep_alpha);
+ ASSERT_TRUE(rep_beta);
+ ASSERT_TRUE(rep_gamma);
+ ASSERT_TRUE(rep_delta);
+
+ std::string exp_str_alpha = "[ [ 1234, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("alpha")
+ ->getInteger().second) + "\" ] ]";
+ std::string exp_str_beta = "[ [ 12.34, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("beta")
+ ->getFloat().second) + "\" ] ]";
+ std::string exp_str_gamma = "[ [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("gamma")
+ ->getDuration().second) + "\" ] ]";
+ std::string exp_str_delta = "[ [ \"Lorem ipsum\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("delta")
+ ->getString().second) + "\" ] ]";
+
+ // Check that all of them can be reported at once
+ ConstElementPtr rsp = StatsMgr::instance().statisticGetAllHandler(
+ "statistic-get-all", ElementPtr());
+ ASSERT_TRUE(rsp);
+ int status_code;
+ ConstElementPtr rep_all = parseAnswer(status_code, rsp);
+ ASSERT_EQ(0, status_code);
+ ASSERT_TRUE(rep_all);
+
+ // Verifying this is a bit more involved, as we don't know whether the
+ // order would be preserved or not.
+ EXPECT_EQ(4, rep_all->size());
+ ASSERT_TRUE(rep_all->get("alpha"));
+ ASSERT_TRUE(rep_all->get("beta"));
+ ASSERT_TRUE(rep_all->get("delta"));
+ ASSERT_TRUE(rep_all->get("gamma"));
+ EXPECT_FALSE(rep_all->get("epsilon"));
+
+ EXPECT_EQ(exp_str_alpha, rep_all->get("alpha")->str());
+ EXPECT_EQ(exp_str_beta, rep_all->get("beta")->str());
+ EXPECT_EQ(exp_str_gamma, rep_all->get("gamma")->str());
+ EXPECT_EQ(exp_str_delta, rep_all->get("delta")->str());
+}
+
+// Test checks if statistic-reset handler is able to reset specified statistic.
+TEST_F(StatsMgrTest, commandStatisticReset) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticResetHandler("statistic-reset", params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ ObservationPtr alpha;
+ EXPECT_NO_THROW(alpha = StatsMgr::instance().getObservation("alpha"));
+ ASSERT_TRUE(alpha);
+
+ // Check that it was indeed reset
+ EXPECT_EQ(0, alpha->getInteger().first);
+}
+
+// Test checks if statistic-reset is able to handle:
+// - a request without parameters
+// - a request with missing statistic name
+// - a request for non-existing statistic.
+TEST_F(StatsMgrTest, commandStatisticResetNegative) {
+ // Case 1: a request without parameters
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticResetHandler("statistic-reset", ElementPtr());
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 2: a request with missing statistic name
+ ElementPtr params = Element::createMap();
+ rsp = StatsMgr::instance().statisticResetHandler("statistic-reset", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 3: a request for non-existing statistic
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticResetHandler("statistic-reset", params);
+ EXPECT_EQ("{ \"result\": 1, \"text\": \"No 'alpha' statistic found\" }",
+ rsp->str());
+}
+
+// This test checks whether statistic-reset-all command really resets all
+// statistics correctly.
+TEST_F(StatsMgrTest, commandResetAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // Now get them. They're used to generate expected output
+ ConstElementPtr rep_alpha = StatsMgr::instance().get("alpha");
+ ConstElementPtr rep_beta = StatsMgr::instance().get("beta");
+ ConstElementPtr rep_gamma = StatsMgr::instance().get("gamma");
+ ConstElementPtr rep_delta = StatsMgr::instance().get("delta");
+
+ ASSERT_TRUE(rep_alpha);
+ ASSERT_TRUE(rep_beta);
+ ASSERT_TRUE(rep_gamma);
+ ASSERT_TRUE(rep_delta);
+
+ std::string exp_str_alpha = "[ [ 1234, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("alpha")
+ ->getInteger().second) + "\" ] ]";
+ std::string exp_str_beta = "[ [ 12.34, \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("beta")
+ ->getFloat().second) + "\" ] ]";
+ std::string exp_str_gamma = "[ [ \"01:02:03.004000\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("gamma")
+ ->getDuration().second) + "\" ] ]";
+ std::string exp_str_delta = "[ [ \"Lorem ipsum\", \"" +
+ isc::util::clockToText(StatsMgr::instance().getObservation("delta")
+ ->getString().second) + "\" ] ]";
+
+ // Check that all of them can be reset at once
+ ConstElementPtr rsp = StatsMgr::instance().statisticResetAllHandler(
+ "statistic-reset-all", ElementPtr());
+ ASSERT_TRUE(rsp);
+ int status_code;
+ ConstElementPtr rep_all = parseAnswer(status_code, rsp);
+ ASSERT_EQ(0, status_code);
+ ASSERT_TRUE(rep_all);
+
+ // Check that they're indeed reset
+ EXPECT_EQ(0,
+ StatsMgr::instance().getObservation("alpha")->getInteger().first);
+ EXPECT_EQ(0.0f,
+ StatsMgr::instance().getObservation("beta")->getFloat().first);
+ EXPECT_EQ(StatsDuration::zero(),
+ StatsMgr::instance().getObservation("gamma")->getDuration().first);
+ EXPECT_EQ("",
+ StatsMgr::instance().getObservation("delta")->getString().first);
+}
+
+// Test checks if statistic-remove handler is able to remove a statistic.
+TEST_F(StatsMgrTest, commandStatisticRemove) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticRemoveHandler("statistic-remove", params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ // It should be gone.
+ EXPECT_FALSE(StatsMgr::instance().getObservation("alpha"));
+ EXPECT_EQ(0, StatsMgr::instance().count());
+}
+
+// Test checks if statistic-remove is able to handle:
+// - a request without parameters
+// - a request with missing statistic name
+// - a request for non-existing statistic.
+TEST_F(StatsMgrTest, commandStatisticRemoveNegative) {
+ // Case 1: a request without parameters
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticRemoveHandler("statistic-remove", ElementPtr());
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 2: a request with missing statistic name
+ ElementPtr params = Element::createMap();
+ rsp = StatsMgr::instance().statisticRemoveHandler("statistic-remove", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 3: a request for non-existing statistic
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticRemoveHandler("statistic-remove", params);
+ EXPECT_EQ("{ \"result\": 1, \"text\": \"No 'alpha' statistic found\" }",
+ rsp->str());
+}
+
+// This test checks whether statistic-remove-all command really resets all
+// statistics correctly.
+TEST_F(StatsMgrTest, commandRemoveAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ // Check that all of them can be reset at once
+ ConstElementPtr rsp = StatsMgr::instance().statisticRemoveAllHandler(
+ "statistic-remove-all", ElementPtr());
+ ASSERT_TRUE(rsp);
+ int status_code;
+ ConstElementPtr rep_all = parseAnswer(status_code, rsp);
+ ASSERT_EQ(0, status_code);
+ ASSERT_TRUE(rep_all);
+ std::string exp = "\"Warning: statistic-remove-all command is deprecated.";
+ exp += " All statistics removed.\"";
+ EXPECT_EQ(exp, rep_all->str());
+
+ EXPECT_FALSE(StatsMgr::instance().getObservation("alpha"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("beta"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("gamma"));
+ EXPECT_FALSE(StatsMgr::instance().getObservation("delta"));
+ EXPECT_EQ(0, StatsMgr::instance().count());
+}
+
+// This test checks whether statistic-sample-age-set command really set
+// max_sample_age_ limit correctly.
+TEST_F(StatsMgrTest, commandSetMaxSampleAge) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+ params->set("duration", Element::create(1245)); // minutes(20) + seconds(45)
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleAgeHandler("statistic-sample-age-set", params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ // check if time limit was set properly and whether count limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second,
+ minutes(20) + seconds(45));
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, false);
+}
+
+// Test checks if statistic-sample-age-set is able to handle:
+// - a request without parameters
+// - a request without duration parameter
+// - a request with missing statistic name
+// - a request for non-existing statistic.
+TEST_F(StatsMgrTest, commandSetMaxSampleAgeNegative) {
+ // Case 1: a request without parameters
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleAgeHandler("statistic-sample-age-set", ElementPtr());
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 2: a request without duration parameter
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticSetMaxSampleAgeHandler("statistic-sample-age-set", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 3: a request with missing statistic name
+ params = Element::createMap();
+ params->set("duration", Element::create(100));
+ rsp = StatsMgr::instance().statisticSetMaxSampleAgeHandler("statistic-sample-age-set", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 4: a request for non-existing statistic
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticSetMaxSampleAgeHandler("statistic-sample-age-set", params);
+ EXPECT_EQ("{ \"result\": 1, \"text\": \"No 'alpha' statistic found\" }",
+ rsp->str());
+}
+
+TEST_F(StatsMgrTest, commandSetMaxSampleAgeAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ ElementPtr params = Element::createMap();
+ params->set("duration", Element::create(3765)); // set duration to 3765 seconds
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleAgeAllHandler(params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ // check defaults
+ EXPECT_EQ(StatsMgr::instance().getMaxSampleAgeDefault(), seconds(3765));
+ EXPECT_EQ(StatsMgr::instance().getMaxSampleCountDefault(), 0);
+
+ // check if time limit was set properly and whether count limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().second,
+ dur1245);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().second,
+ dur1245);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().second,
+ dur1245);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().second,
+ dur1245);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, false);
+}
+
+// This test checks whether statistic-sample-count-set command really set
+// max_sample_count_ limit correctly.
+TEST_F(StatsMgrTest, commandSetMaxSampleCount) {
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+ params->set("max-samples", Element::create(15));
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleCountHandler("statistic-sample-count-set", params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ // check if time limit was set properly and whether duration limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 15);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+}
+
+// Test checks if statistic-sample-count-set is able to handle:
+// - a request without parameters
+// - a request without max-samples parameter
+// - a request with missing statistic name
+// - a request for non-existing statistic.
+TEST_F(StatsMgrTest, commandSetMaxSampleCountNegative) {
+ // Case 1: a request without parameters
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleCountHandler("statistic-sample-count-set", ElementPtr());
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 2: a request without max-samples parameter
+ ElementPtr params = Element::createMap();
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticSetMaxSampleCountHandler("statistic-sample-count-set", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 3: a request with missing statistic name
+ params = Element::createMap();
+ params->set("max-samples", Element::create(10));
+ rsp = StatsMgr::instance().statisticSetMaxSampleCountHandler("statistic-sample-count-set", params);
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+
+ // Case 4: a request for non-existing statistic
+ params->set("name", Element::create("alpha"));
+ rsp = StatsMgr::instance().statisticSetMaxSampleCountHandler("statistic-sample-count-set", params);
+ EXPECT_EQ("{ \"result\": 1, \"text\": \"No 'alpha' statistic found\" }",
+ rsp->str());
+}
+
+TEST_F(StatsMgrTest, commandSetMaxSampleCountAll) {
+ // Set a couple of statistics
+ StatsMgr::instance().setValue("alpha", static_cast<int64_t>(1234));
+ StatsMgr::instance().setValue("beta", 12.34);
+ StatsMgr::instance().setValue("gamma", dur1234);
+ StatsMgr::instance().setValue("delta", "Lorem ipsum");
+
+ ElementPtr params = Element::createMap();
+ params->set("max-samples", Element::create(200));
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleCountAllHandler(params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(CONTROL_RESULT_SUCCESS, status_code);
+
+ // check default
+ EXPECT_EQ(StatsMgr::instance().getMaxSampleCountDefault(), 200);
+
+ // check if count limit was set properly and whether count limit is disabled
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleCount().second, 200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("alpha")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleCount().second, 200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("beta")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleCount().second, 200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("gamma")->getMaxSampleAge().first, false);
+
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().first, true);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleCount().second, 200);
+ EXPECT_EQ(StatsMgr::instance().getObservation("delta")->getMaxSampleAge().first, false);
+}
+
+// Test checks if statistics-sample-count-set-all fails on zero.
+TEST_F(StatsMgrTest, commandSetMaxSampleCountAllZero) {
+ ElementPtr params = Element::createMap();
+ params->set("max-samples", Element::create(0));
+
+ ConstElementPtr rsp =
+ StatsMgr::instance().statisticSetMaxSampleCountAllHandler(params);
+ int status_code;
+ ASSERT_NO_THROW(parseAnswer(status_code, rsp));
+ EXPECT_EQ(status_code, CONTROL_RESULT_ERROR);
+}
+
+}