diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 19:33:14 +0000 |
commit | 36d22d82aa202bb199967e9512281e9a53db42c9 (patch) | |
tree | 105e8c98ddea1c1e4784a60a5a6410fa416be2de /security/nss/gtests/google_test/gtest/test | |
parent | Initial commit. (diff) | |
download | firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip |
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'security/nss/gtests/google_test/gtest/test')
84 files changed, 31160 insertions, 0 deletions
diff --git a/security/nss/gtests/google_test/gtest/test/BUILD.bazel b/security/nss/gtests/google_test/gtest/test/BUILD.bazel new file mode 100644 index 0000000000..b06a00a106 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/BUILD.bazel @@ -0,0 +1,590 @@ +# Copyright 2017 Google Inc. +# All Rights Reserved. +# +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# Bazel BUILD for The Google C++ Testing Framework (Google Test) + +load("@rules_cc//cc:defs.bzl", "cc_binary", "cc_test") +load("@rules_python//python:defs.bzl", "py_library", "py_test") + +licenses(["notice"]) + +package(default_visibility = ["//:__subpackages__"]) + +#on windows exclude gtest-tuple.h +cc_test( + name = "gtest_all_test", + size = "small", + srcs = glob( + include = [ + "gtest-*.cc", + "googletest-*.cc", + "*.h", + "googletest/include/gtest/**/*.h", + ], + exclude = [ + "gtest-unittest-api_test.cc", + "googletest/src/gtest-all.cc", + "gtest_all_test.cc", + "gtest-death-test_ex_test.cc", + "gtest-listener_test.cc", + "gtest-unittest-api_test.cc", + "googletest-param-test-test.cc", + "googletest-param-test2-test.cc", + "googletest-catch-exceptions-test_.cc", + "googletest-color-test_.cc", + "googletest-env-var-test_.cc", + "googletest-failfast-unittest_.cc", + "googletest-filter-unittest_.cc", + "googletest-global-environment-unittest_.cc", + "googletest-break-on-failure-unittest_.cc", + "googletest-listener-test.cc", + "googletest-output-test_.cc", + "googletest-list-tests-unittest_.cc", + "googletest-shuffle-test_.cc", + "googletest-setuptestsuite-test_.cc", + "googletest-uninitialized-test_.cc", + "googletest-death-test_ex_test.cc", + "googletest-param-test-test", + "googletest-throw-on-failure-test_.cc", + "googletest-param-test-invalid-name1-test_.cc", + "googletest-param-test-invalid-name2-test_.cc", + ], + ) + select({ + "//:windows": [], + "//conditions:default": [], + }), + copts = select({ + "//:windows": ["-DGTEST_USE_OWN_TR1_TUPLE=0"], + "//conditions:default": ["-DGTEST_USE_OWN_TR1_TUPLE=1"], + }) + select({ + # Ensure MSVC treats source files as UTF-8 encoded. + "//:msvc_compiler": ["-utf-8"], + "//conditions:default": [], + }), + includes = [ + "googletest", + "googletest/include", + "googletest/include/internal", + "googletest/test", + ], + linkopts = select({ + "//:windows": [], + "//conditions:default": ["-pthread"], + }), + deps = ["//:gtest_main"], +) + +# Tests death tests. +cc_test( + name = "googletest-death-test-test", + size = "medium", + srcs = ["googletest-death-test-test.cc"], + deps = ["//:gtest_main"], +) + +cc_test( + name = "gtest_test_macro_stack_footprint_test", + size = "small", + srcs = ["gtest_test_macro_stack_footprint_test.cc"], + deps = ["//:gtest"], +) + +#These googletest tests have their own main() +cc_test( + name = "googletest-listener-test", + size = "small", + srcs = ["googletest-listener-test.cc"], + deps = ["//:gtest_main"], +) + +cc_test( + name = "gtest-unittest-api_test", + size = "small", + srcs = [ + "gtest-unittest-api_test.cc", + ], + deps = [ + "//:gtest", + ], +) + +cc_test( + name = "googletest-param-test-test", + size = "small", + srcs = [ + "googletest-param-test-test.cc", + "googletest-param-test-test.h", + "googletest-param-test2-test.cc", + ], + deps = ["//:gtest"], +) + +cc_test( + name = "gtest_unittest", + size = "small", + srcs = ["gtest_unittest.cc"], + shard_count = 2, + deps = ["//:gtest_main"], +) + +# Py tests + +py_library( + name = "gtest_test_utils", + testonly = 1, + srcs = ["gtest_test_utils.py"], + imports = ["."], +) + +cc_binary( + name = "gtest_help_test_", + testonly = 1, + srcs = ["gtest_help_test_.cc"], + deps = ["//:gtest_main"], +) + +py_test( + name = "gtest_help_test", + size = "small", + srcs = ["gtest_help_test.py"], + data = [":gtest_help_test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-output-test_", + testonly = 1, + srcs = ["googletest-output-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-output-test", + size = "small", + srcs = ["googletest-output-test.py"], + args = select({ + "//:has_absl": [], + "//conditions:default": ["--no_stacktrace_support"], + }), + data = [ + "googletest-output-test-golden-lin.txt", + ":googletest-output-test_", + ], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-color-test_", + testonly = 1, + srcs = ["googletest-color-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-color-test", + size = "small", + srcs = ["googletest-color-test.py"], + data = [":googletest-color-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-env-var-test_", + testonly = 1, + srcs = ["googletest-env-var-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-env-var-test", + size = "medium", + srcs = ["googletest-env-var-test.py"], + data = [":googletest-env-var-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-failfast-unittest_", + testonly = 1, + srcs = ["googletest-failfast-unittest_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-failfast-unittest", + size = "medium", + srcs = ["googletest-failfast-unittest.py"], + data = [":googletest-failfast-unittest_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-filter-unittest_", + testonly = 1, + srcs = ["googletest-filter-unittest_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-filter-unittest", + size = "medium", + srcs = ["googletest-filter-unittest.py"], + data = [":googletest-filter-unittest_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-global-environment-unittest_", + testonly = 1, + srcs = ["googletest-global-environment-unittest_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-global-environment-unittest", + size = "medium", + srcs = ["googletest-global-environment-unittest.py"], + data = [":googletest-global-environment-unittest_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-break-on-failure-unittest_", + testonly = 1, + srcs = ["googletest-break-on-failure-unittest_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-break-on-failure-unittest", + size = "small", + srcs = ["googletest-break-on-failure-unittest.py"], + data = [":googletest-break-on-failure-unittest_"], + deps = [":gtest_test_utils"], +) + +cc_test( + name = "gtest_assert_by_exception_test", + size = "small", + srcs = ["gtest_assert_by_exception_test.cc"], + deps = ["//:gtest"], +) + +cc_binary( + name = "googletest-throw-on-failure-test_", + testonly = 1, + srcs = ["googletest-throw-on-failure-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-throw-on-failure-test", + size = "small", + srcs = ["googletest-throw-on-failure-test.py"], + data = [":googletest-throw-on-failure-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-list-tests-unittest_", + testonly = 1, + srcs = ["googletest-list-tests-unittest_.cc"], + deps = ["//:gtest"], +) + +cc_test( + name = "gtest_skip_test", + size = "small", + srcs = ["gtest_skip_test.cc"], + deps = ["//:gtest_main"], +) + +cc_test( + name = "gtest_skip_in_environment_setup_test", + size = "small", + srcs = ["gtest_skip_in_environment_setup_test.cc"], + deps = ["//:gtest_main"], +) + +py_test( + name = "gtest_skip_check_output_test", + size = "small", + srcs = ["gtest_skip_check_output_test.py"], + data = [":gtest_skip_test"], + deps = [":gtest_test_utils"], +) + +py_test( + name = "gtest_skip_environment_check_output_test", + size = "small", + srcs = ["gtest_skip_environment_check_output_test.py"], + data = [ + ":gtest_skip_in_environment_setup_test", + ], + deps = [":gtest_test_utils"], +) + +py_test( + name = "googletest-list-tests-unittest", + size = "small", + srcs = ["googletest-list-tests-unittest.py"], + data = [":googletest-list-tests-unittest_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-shuffle-test_", + srcs = ["googletest-shuffle-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-shuffle-test", + size = "small", + srcs = ["googletest-shuffle-test.py"], + data = [":googletest-shuffle-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-catch-exceptions-no-ex-test_", + testonly = 1, + srcs = ["googletest-catch-exceptions-test_.cc"], + deps = ["//:gtest_main"], +) + +cc_binary( + name = "googletest-catch-exceptions-ex-test_", + testonly = 1, + srcs = ["googletest-catch-exceptions-test_.cc"], + copts = ["-fexceptions"], + deps = ["//:gtest_main"], +) + +py_test( + name = "googletest-catch-exceptions-test", + size = "small", + srcs = ["googletest-catch-exceptions-test.py"], + data = [ + ":googletest-catch-exceptions-ex-test_", + ":googletest-catch-exceptions-no-ex-test_", + ], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "gtest_xml_output_unittest_", + testonly = 1, + srcs = ["gtest_xml_output_unittest_.cc"], + deps = ["//:gtest"], +) + +cc_test( + name = "gtest_no_test_unittest", + size = "small", + srcs = ["gtest_no_test_unittest.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "gtest_xml_output_unittest", + size = "small", + srcs = [ + "gtest_xml_output_unittest.py", + "gtest_xml_test_utils.py", + ], + args = select({ + "//:has_absl": [], + "//conditions:default": ["--no_stacktrace_support"], + }), + data = [ + # We invoke gtest_no_test_unittest to verify the XML output + # when the test program contains no test definition. + ":gtest_no_test_unittest", + ":gtest_xml_output_unittest_", + ], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "gtest_xml_outfile1_test_", + testonly = 1, + srcs = ["gtest_xml_outfile1_test_.cc"], + deps = ["//:gtest_main"], +) + +cc_binary( + name = "gtest_xml_outfile2_test_", + testonly = 1, + srcs = ["gtest_xml_outfile2_test_.cc"], + deps = ["//:gtest_main"], +) + +py_test( + name = "gtest_xml_outfiles_test", + size = "small", + srcs = [ + "gtest_xml_outfiles_test.py", + "gtest_xml_test_utils.py", + ], + data = [ + ":gtest_xml_outfile1_test_", + ":gtest_xml_outfile2_test_", + ], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-setuptestsuite-test_", + testonly = 1, + srcs = ["googletest-setuptestsuite-test_.cc"], + deps = ["//:gtest_main"], +) + +py_test( + name = "googletest-setuptestsuite-test", + size = "medium", + srcs = ["googletest-setuptestsuite-test.py"], + data = [":googletest-setuptestsuite-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "googletest-uninitialized-test_", + testonly = 1, + srcs = ["googletest-uninitialized-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-uninitialized-test", + size = "medium", + srcs = ["googletest-uninitialized-test.py"], + data = ["googletest-uninitialized-test_"], + deps = [":gtest_test_utils"], +) + +cc_binary( + name = "gtest_testbridge_test_", + testonly = 1, + srcs = ["gtest_testbridge_test_.cc"], + deps = ["//:gtest_main"], +) + +# Tests that filtering via testbridge works +py_test( + name = "gtest_testbridge_test", + size = "small", + srcs = ["gtest_testbridge_test.py"], + data = [":gtest_testbridge_test_"], + deps = [":gtest_test_utils"], +) + +py_test( + name = "googletest-json-outfiles-test", + size = "small", + srcs = [ + "googletest-json-outfiles-test.py", + "gtest_json_test_utils.py", + ], + data = [ + ":gtest_xml_outfile1_test_", + ":gtest_xml_outfile2_test_", + ], + deps = [":gtest_test_utils"], +) + +py_test( + name = "googletest-json-output-unittest", + size = "medium", + srcs = [ + "googletest-json-output-unittest.py", + "gtest_json_test_utils.py", + ], + args = select({ + "//:has_absl": [], + "//conditions:default": ["--no_stacktrace_support"], + }), + data = [ + # We invoke gtest_no_test_unittest to verify the JSON output + # when the test program contains no test definition. + ":gtest_no_test_unittest", + ":gtest_xml_output_unittest_", + ], + deps = [":gtest_test_utils"], +) + +# Verifies interaction of death tests and exceptions. +cc_test( + name = "googletest-death-test_ex_catch_test", + size = "medium", + srcs = ["googletest-death-test_ex_test.cc"], + copts = ["-fexceptions"], + defines = ["GTEST_ENABLE_CATCH_EXCEPTIONS_=1"], + deps = ["//:gtest"], +) + +cc_binary( + name = "googletest-param-test-invalid-name1-test_", + testonly = 1, + srcs = ["googletest-param-test-invalid-name1-test_.cc"], + deps = ["//:gtest"], +) + +cc_binary( + name = "googletest-param-test-invalid-name2-test_", + testonly = 1, + srcs = ["googletest-param-test-invalid-name2-test_.cc"], + deps = ["//:gtest"], +) + +py_test( + name = "googletest-param-test-invalid-name1-test", + size = "small", + srcs = ["googletest-param-test-invalid-name1-test.py"], + data = [":googletest-param-test-invalid-name1-test_"], + tags = [ + "no_test_msvc2015", + "no_test_msvc2017", + ], + deps = [":gtest_test_utils"], +) + +py_test( + name = "googletest-param-test-invalid-name2-test", + size = "small", + srcs = ["googletest-param-test-invalid-name2-test.py"], + data = [":googletest-param-test-invalid-name2-test_"], + tags = [ + "no_test_msvc2015", + "no_test_msvc2017", + ], + deps = [":gtest_test_utils"], +) diff --git a/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest.py new file mode 100755 index 0000000000..a5dfbc693b --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest.py @@ -0,0 +1,208 @@ +#!/usr/bin/env python +# +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for Google Test's break-on-failure mode. + +A user can ask Google Test to seg-fault when an assertion fails, using +either the GTEST_BREAK_ON_FAILURE environment variable or the +--gtest_break_on_failure flag. This script tests such functionality +by invoking googletest-break-on-failure-unittest_ (a program written with +Google Test) with different environments and command line flags. +""" + +import os +import gtest_test_utils + +# Constants. + +IS_WINDOWS = os.name == 'nt' + +# The environment variable for enabling/disabling the break-on-failure mode. +BREAK_ON_FAILURE_ENV_VAR = 'GTEST_BREAK_ON_FAILURE' + +# The command line flag for enabling/disabling the break-on-failure mode. +BREAK_ON_FAILURE_FLAG = 'gtest_break_on_failure' + +# The environment variable for enabling/disabling the throw-on-failure mode. +THROW_ON_FAILURE_ENV_VAR = 'GTEST_THROW_ON_FAILURE' + +# The environment variable for enabling/disabling the catch-exceptions mode. +CATCH_EXCEPTIONS_ENV_VAR = 'GTEST_CATCH_EXCEPTIONS' + +# Path to the googletest-break-on-failure-unittest_ program. +EXE_PATH = gtest_test_utils.GetTestExecutablePath( + 'googletest-break-on-failure-unittest_') + + +environ = gtest_test_utils.environ +SetEnvVar = gtest_test_utils.SetEnvVar + +# Tests in this file run a Google-Test-based test program and expect it +# to terminate prematurely. Therefore they are incompatible with +# the premature-exit-file protocol by design. Unset the +# premature-exit filepath to prevent Google Test from creating +# the file. +SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None) + + +def Run(command): + """Runs a command; returns 1 if it was killed by a signal, or 0 otherwise.""" + + p = gtest_test_utils.Subprocess(command, env=environ) + if p.terminated_by_signal: + return 1 + else: + return 0 + + +# The tests. + + +class GTestBreakOnFailureUnitTest(gtest_test_utils.TestCase): + """Tests using the GTEST_BREAK_ON_FAILURE environment variable or + the --gtest_break_on_failure flag to turn assertion failures into + segmentation faults. + """ + + def RunAndVerify(self, env_var_value, flag_value, expect_seg_fault): + """Runs googletest-break-on-failure-unittest_ and verifies that it does + (or does not) have a seg-fault. + + Args: + env_var_value: value of the GTEST_BREAK_ON_FAILURE environment + variable; None if the variable should be unset. + flag_value: value of the --gtest_break_on_failure flag; + None if the flag should not be present. + expect_seg_fault: 1 if the program is expected to generate a seg-fault; + 0 otherwise. + """ + + SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, env_var_value) + + if env_var_value is None: + env_var_value_msg = ' is not set' + else: + env_var_value_msg = '=' + env_var_value + + if flag_value is None: + flag = '' + elif flag_value == '0': + flag = '--%s=0' % BREAK_ON_FAILURE_FLAG + else: + flag = '--%s' % BREAK_ON_FAILURE_FLAG + + command = [EXE_PATH] + if flag: + command.append(flag) + + if expect_seg_fault: + should_or_not = 'should' + else: + should_or_not = 'should not' + + has_seg_fault = Run(command) + + SetEnvVar(BREAK_ON_FAILURE_ENV_VAR, None) + + msg = ('when %s%s, an assertion failure in "%s" %s cause a seg-fault.' % + (BREAK_ON_FAILURE_ENV_VAR, env_var_value_msg, ' '.join(command), + should_or_not)) + self.assert_(has_seg_fault == expect_seg_fault, msg) + + def testDefaultBehavior(self): + """Tests the behavior of the default mode.""" + + self.RunAndVerify(env_var_value=None, + flag_value=None, + expect_seg_fault=0) + + def testEnvVar(self): + """Tests using the GTEST_BREAK_ON_FAILURE environment variable.""" + + self.RunAndVerify(env_var_value='0', + flag_value=None, + expect_seg_fault=0) + self.RunAndVerify(env_var_value='1', + flag_value=None, + expect_seg_fault=1) + + def testFlag(self): + """Tests using the --gtest_break_on_failure flag.""" + + self.RunAndVerify(env_var_value=None, + flag_value='0', + expect_seg_fault=0) + self.RunAndVerify(env_var_value=None, + flag_value='1', + expect_seg_fault=1) + + def testFlagOverridesEnvVar(self): + """Tests that the flag overrides the environment variable.""" + + self.RunAndVerify(env_var_value='0', + flag_value='0', + expect_seg_fault=0) + self.RunAndVerify(env_var_value='0', + flag_value='1', + expect_seg_fault=1) + self.RunAndVerify(env_var_value='1', + flag_value='0', + expect_seg_fault=0) + self.RunAndVerify(env_var_value='1', + flag_value='1', + expect_seg_fault=1) + + def testBreakOnFailureOverridesThrowOnFailure(self): + """Tests that gtest_break_on_failure overrides gtest_throw_on_failure.""" + + SetEnvVar(THROW_ON_FAILURE_ENV_VAR, '1') + try: + self.RunAndVerify(env_var_value=None, + flag_value='1', + expect_seg_fault=1) + finally: + SetEnvVar(THROW_ON_FAILURE_ENV_VAR, None) + + if IS_WINDOWS: + def testCatchExceptionsDoesNotInterfere(self): + """Tests that gtest_catch_exceptions doesn't interfere.""" + + SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, '1') + try: + self.RunAndVerify(env_var_value='1', + flag_value='1', + expect_seg_fault=1) + finally: + SetEnvVar(CATCH_EXCEPTIONS_ENV_VAR, None) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest_.cc b/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest_.cc new file mode 100644 index 0000000000..f84957a2d0 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-break-on-failure-unittest_.cc @@ -0,0 +1,86 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Unit test for Google Test's break-on-failure mode. +// +// A user can ask Google Test to seg-fault when an assertion fails, using +// either the GTEST_BREAK_ON_FAILURE environment variable or the +// --gtest_break_on_failure flag. This file is used for testing such +// functionality. +// +// This program will be invoked from a Python unit test. It is +// expected to fail. Don't run it directly. + +#include "gtest/gtest.h" + +#if GTEST_OS_WINDOWS +# include <windows.h> +# include <stdlib.h> +#endif + +namespace { + +// A test that's expected to fail. +TEST(Foo, Bar) { + EXPECT_EQ(2, 3); +} + +#if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE +// On Windows Mobile global exception handlers are not supported. +LONG WINAPI ExitWithExceptionCode( + struct _EXCEPTION_POINTERS* exception_pointers) { + exit(exception_pointers->ExceptionRecord->ExceptionCode); +} +#endif + +} // namespace + +int main(int argc, char **argv) { +#if GTEST_OS_WINDOWS + // Suppresses display of the Windows error dialog upon encountering + // a general protection fault (segment violation). + SetErrorMode(SEM_NOGPFAULTERRORBOX | SEM_FAILCRITICALERRORS); + +# if GTEST_HAS_SEH && !GTEST_OS_WINDOWS_MOBILE + + // The default unhandled exception filter does not always exit + // with the exception code as exit code - for example it exits with + // 0 for EXCEPTION_ACCESS_VIOLATION and 1 for EXCEPTION_BREAKPOINT + // if the application is compiled in debug mode. Thus we use our own + // filter which always exits with the exception code for unhandled + // exceptions. + SetUnhandledExceptionFilter(ExitWithExceptionCode); + +# endif +#endif // GTEST_OS_WINDOWS + testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test.py b/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test.py new file mode 100755 index 0000000000..94a5b33f23 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# +# Copyright 2010 Google Inc. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests Google Test's exception catching behavior. + +This script invokes googletest-catch-exceptions-test_ and +googletest-catch-exceptions-ex-test_ (programs written with +Google Test) and verifies their output. +""" + +import gtest_test_utils + +# Constants. +FLAG_PREFIX = '--gtest_' +LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests' +NO_CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions=0' +FILTER_FLAG = FLAG_PREFIX + 'filter' + +# Path to the googletest-catch-exceptions-ex-test_ binary, compiled with +# exceptions enabled. +EX_EXE_PATH = gtest_test_utils.GetTestExecutablePath( + 'googletest-catch-exceptions-ex-test_') + +# Path to the googletest-catch-exceptions-test_ binary, compiled with +# exceptions disabled. +EXE_PATH = gtest_test_utils.GetTestExecutablePath( + 'googletest-catch-exceptions-no-ex-test_') + +environ = gtest_test_utils.environ +SetEnvVar = gtest_test_utils.SetEnvVar + +# Tests in this file run a Google-Test-based test program and expect it +# to terminate prematurely. Therefore they are incompatible with +# the premature-exit-file protocol by design. Unset the +# premature-exit filepath to prevent Google Test from creating +# the file. +SetEnvVar(gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR, None) + +TEST_LIST = gtest_test_utils.Subprocess( + [EXE_PATH, LIST_TESTS_FLAG], env=environ).output + +SUPPORTS_SEH_EXCEPTIONS = 'ThrowsSehException' in TEST_LIST + +if SUPPORTS_SEH_EXCEPTIONS: + BINARY_OUTPUT = gtest_test_utils.Subprocess([EXE_PATH], env=environ).output + +EX_BINARY_OUTPUT = gtest_test_utils.Subprocess( + [EX_EXE_PATH], env=environ).output + + +# The tests. +if SUPPORTS_SEH_EXCEPTIONS: + # pylint:disable-msg=C6302 + class CatchSehExceptionsTest(gtest_test_utils.TestCase): + """Tests exception-catching behavior.""" + + + def TestSehExceptions(self, test_output): + self.assert_('SEH exception with code 0x2a thrown ' + 'in the test fixture\'s constructor' + in test_output) + self.assert_('SEH exception with code 0x2a thrown ' + 'in the test fixture\'s destructor' + in test_output) + self.assert_('SEH exception with code 0x2a thrown in SetUpTestSuite()' + in test_output) + self.assert_('SEH exception with code 0x2a thrown in TearDownTestSuite()' + in test_output) + self.assert_('SEH exception with code 0x2a thrown in SetUp()' + in test_output) + self.assert_('SEH exception with code 0x2a thrown in TearDown()' + in test_output) + self.assert_('SEH exception with code 0x2a thrown in the test body' + in test_output) + + def testCatchesSehExceptionsWithCxxExceptionsEnabled(self): + self.TestSehExceptions(EX_BINARY_OUTPUT) + + def testCatchesSehExceptionsWithCxxExceptionsDisabled(self): + self.TestSehExceptions(BINARY_OUTPUT) + + +class CatchCxxExceptionsTest(gtest_test_utils.TestCase): + """Tests C++ exception-catching behavior. + + Tests in this test case verify that: + * C++ exceptions are caught and logged as C++ (not SEH) exceptions + * Exception thrown affect the remainder of the test work flow in the + expected manner. + """ + + def testCatchesCxxExceptionsInFixtureConstructor(self): + self.assertTrue( + 'C++ exception with description ' + '"Standard C++ exception" thrown ' + 'in the test fixture\'s constructor' in EX_BINARY_OUTPUT, + EX_BINARY_OUTPUT) + self.assert_('unexpected' not in EX_BINARY_OUTPUT, + 'This failure belongs in this test only if ' + '"CxxExceptionInConstructorTest" (no quotes) ' + 'appears on the same line as words "called unexpectedly"') + + if ('CxxExceptionInDestructorTest.ThrowsExceptionInDestructor' in + EX_BINARY_OUTPUT): + + def testCatchesCxxExceptionsInFixtureDestructor(self): + self.assertTrue( + 'C++ exception with description ' + '"Standard C++ exception" thrown ' + 'in the test fixture\'s destructor' in EX_BINARY_OUTPUT, + EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInDestructorTest::TearDownTestSuite() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + + def testCatchesCxxExceptionsInSetUpTestCase(self): + self.assertTrue( + 'C++ exception with description "Standard C++ exception"' + ' thrown in SetUpTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInConstructorTest::TearDownTestSuite() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTestSuiteTest constructor ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTestSuiteTest destructor ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTestSuiteTest::SetUp() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTestSuiteTest::TearDown() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTestSuiteTest test body ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + + def testCatchesCxxExceptionsInTearDownTestCase(self): + self.assertTrue( + 'C++ exception with description "Standard C++ exception"' + ' thrown in TearDownTestSuite()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + + def testCatchesCxxExceptionsInSetUp(self): + self.assertTrue( + 'C++ exception with description "Standard C++ exception"' + ' thrown in SetUp()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTest::TearDownTestSuite() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTest destructor ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInSetUpTest::TearDown() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assert_('unexpected' not in EX_BINARY_OUTPUT, + 'This failure belongs in this test only if ' + '"CxxExceptionInSetUpTest" (no quotes) ' + 'appears on the same line as words "called unexpectedly"') + + def testCatchesCxxExceptionsInTearDown(self): + self.assertTrue( + 'C++ exception with description "Standard C++ exception"' + ' thrown in TearDown()' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInTearDownTest::TearDownTestSuite() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInTearDownTest destructor ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + + def testCatchesCxxExceptionsInTestBody(self): + self.assertTrue( + 'C++ exception with description "Standard C++ exception"' + ' thrown in the test body' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInTestBodyTest::TearDownTestSuite() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInTestBodyTest destructor ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + self.assertTrue( + 'CxxExceptionInTestBodyTest::TearDown() ' + 'called as expected.' in EX_BINARY_OUTPUT, EX_BINARY_OUTPUT) + + def testCatchesNonStdCxxExceptions(self): + self.assertTrue( + 'Unknown C++ exception thrown in the test body' in EX_BINARY_OUTPUT, + EX_BINARY_OUTPUT) + + def testUnhandledCxxExceptionsAbortTheProgram(self): + # Filters out SEH exception tests on Windows. Unhandled SEH exceptions + # cause tests to show pop-up windows there. + FITLER_OUT_SEH_TESTS_FLAG = FILTER_FLAG + '=-*Seh*' + # By default, Google Test doesn't catch the exceptions. + uncaught_exceptions_ex_binary_output = gtest_test_utils.Subprocess( + [EX_EXE_PATH, + NO_CATCH_EXCEPTIONS_FLAG, + FITLER_OUT_SEH_TESTS_FLAG], + env=environ).output + + self.assert_('Unhandled C++ exception terminating the program' + in uncaught_exceptions_ex_binary_output) + self.assert_('unexpected' not in uncaught_exceptions_ex_binary_output) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test_.cc new file mode 100644 index 0000000000..8c127d40b1 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-catch-exceptions-test_.cc @@ -0,0 +1,293 @@ +// Copyright 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for Google Test itself. Tests in this file throw C++ or SEH +// exceptions, and the output is verified by +// googletest-catch-exceptions-test.py. + +#include <stdio.h> // NOLINT +#include <stdlib.h> // For exit(). + +#include "gtest/gtest.h" + +#if GTEST_HAS_SEH +# include <windows.h> +#endif + +#if GTEST_HAS_EXCEPTIONS +# include <exception> // For set_terminate(). +# include <stdexcept> +#endif + +using testing::Test; + +#if GTEST_HAS_SEH + +class SehExceptionInConstructorTest : public Test { + public: + SehExceptionInConstructorTest() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInConstructorTest, ThrowsExceptionInConstructor) {} + +class SehExceptionInDestructorTest : public Test { + public: + ~SehExceptionInDestructorTest() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInDestructorTest, ThrowsExceptionInDestructor) {} + +class SehExceptionInSetUpTestSuiteTest : public Test { + public: + static void SetUpTestSuite() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) {} + +class SehExceptionInTearDownTestSuiteTest : public Test { + public: + static void TearDownTestSuite() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInTearDownTestSuiteTest, + ThrowsExceptionInTearDownTestSuite) {} + +class SehExceptionInSetUpTest : public Test { + protected: + virtual void SetUp() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInSetUpTest, ThrowsExceptionInSetUp) {} + +class SehExceptionInTearDownTest : public Test { + protected: + virtual void TearDown() { RaiseException(42, 0, 0, NULL); } +}; + +TEST_F(SehExceptionInTearDownTest, ThrowsExceptionInTearDown) {} + +TEST(SehExceptionTest, ThrowsSehException) { + RaiseException(42, 0, 0, NULL); +} + +#endif // GTEST_HAS_SEH + +#if GTEST_HAS_EXCEPTIONS + +class CxxExceptionInConstructorTest : public Test { + public: + CxxExceptionInConstructorTest() { + // Without this macro VC++ complains about unreachable code at the end of + // the constructor. + GTEST_SUPPRESS_UNREACHABLE_CODE_WARNING_BELOW_( + throw std::runtime_error("Standard C++ exception")); + } + + static void TearDownTestSuite() { + printf("%s", + "CxxExceptionInConstructorTest::TearDownTestSuite() " + "called as expected.\n"); + } + + protected: + ~CxxExceptionInConstructorTest() override { + ADD_FAILURE() << "CxxExceptionInConstructorTest destructor " + << "called unexpectedly."; + } + + void SetUp() override { + ADD_FAILURE() << "CxxExceptionInConstructorTest::SetUp() " + << "called unexpectedly."; + } + + void TearDown() override { + ADD_FAILURE() << "CxxExceptionInConstructorTest::TearDown() " + << "called unexpectedly."; + } +}; + +TEST_F(CxxExceptionInConstructorTest, ThrowsExceptionInConstructor) { + ADD_FAILURE() << "CxxExceptionInConstructorTest test body " + << "called unexpectedly."; +} + +class CxxExceptionInSetUpTestSuiteTest : public Test { + public: + CxxExceptionInSetUpTestSuiteTest() { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest constructor " + "called as expected.\n"); + } + + static void SetUpTestSuite() { + throw std::runtime_error("Standard C++ exception"); + } + + static void TearDownTestSuite() { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest::TearDownTestSuite() " + "called as expected.\n"); + } + + protected: + ~CxxExceptionInSetUpTestSuiteTest() override { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest destructor " + "called as expected.\n"); + } + + void SetUp() override { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest::SetUp() " + "called as expected.\n"); + } + + void TearDown() override { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest::TearDown() " + "called as expected.\n"); + } +}; + +TEST_F(CxxExceptionInSetUpTestSuiteTest, ThrowsExceptionInSetUpTestSuite) { + printf("%s", + "CxxExceptionInSetUpTestSuiteTest test body " + "called as expected.\n"); +} + +class CxxExceptionInTearDownTestSuiteTest : public Test { + public: + static void TearDownTestSuite() { + throw std::runtime_error("Standard C++ exception"); + } +}; + +TEST_F(CxxExceptionInTearDownTestSuiteTest, + ThrowsExceptionInTearDownTestSuite) {} + +class CxxExceptionInSetUpTest : public Test { + public: + static void TearDownTestSuite() { + printf("%s", + "CxxExceptionInSetUpTest::TearDownTestSuite() " + "called as expected.\n"); + } + + protected: + ~CxxExceptionInSetUpTest() override { + printf("%s", + "CxxExceptionInSetUpTest destructor " + "called as expected.\n"); + } + + void SetUp() override { throw std::runtime_error("Standard C++ exception"); } + + void TearDown() override { + printf("%s", + "CxxExceptionInSetUpTest::TearDown() " + "called as expected.\n"); + } +}; + +TEST_F(CxxExceptionInSetUpTest, ThrowsExceptionInSetUp) { + ADD_FAILURE() << "CxxExceptionInSetUpTest test body " + << "called unexpectedly."; +} + +class CxxExceptionInTearDownTest : public Test { + public: + static void TearDownTestSuite() { + printf("%s", + "CxxExceptionInTearDownTest::TearDownTestSuite() " + "called as expected.\n"); + } + + protected: + ~CxxExceptionInTearDownTest() override { + printf("%s", + "CxxExceptionInTearDownTest destructor " + "called as expected.\n"); + } + + void TearDown() override { + throw std::runtime_error("Standard C++ exception"); + } +}; + +TEST_F(CxxExceptionInTearDownTest, ThrowsExceptionInTearDown) {} + +class CxxExceptionInTestBodyTest : public Test { + public: + static void TearDownTestSuite() { + printf("%s", + "CxxExceptionInTestBodyTest::TearDownTestSuite() " + "called as expected.\n"); + } + + protected: + ~CxxExceptionInTestBodyTest() override { + printf("%s", + "CxxExceptionInTestBodyTest destructor " + "called as expected.\n"); + } + + void TearDown() override { + printf("%s", + "CxxExceptionInTestBodyTest::TearDown() " + "called as expected.\n"); + } +}; + +TEST_F(CxxExceptionInTestBodyTest, ThrowsStdCxxException) { + throw std::runtime_error("Standard C++ exception"); +} + +TEST(CxxExceptionTest, ThrowsNonStdCxxException) { + throw "C-string"; +} + +// This terminate handler aborts the program using exit() rather than abort(). +// This avoids showing pop-ups on Windows systems and core dumps on Unix-like +// ones. +void TerminateHandler() { + fprintf(stderr, "%s\n", "Unhandled C++ exception terminating the program."); + fflush(nullptr); + exit(3); +} + +#endif // GTEST_HAS_EXCEPTIONS + +int main(int argc, char** argv) { +#if GTEST_HAS_EXCEPTIONS + std::set_terminate(&TerminateHandler); +#endif + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-color-test.py b/security/nss/gtests/google_test/gtest/test/googletest-color-test.py new file mode 100755 index 0000000000..f3b7c9990b --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-color-test.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python +# +# Copyright 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that Google Test correctly determines whether to use colors.""" + +import os +import gtest_test_utils + +IS_WINDOWS = os.name == 'nt' + +COLOR_ENV_VAR = 'GTEST_COLOR' +COLOR_FLAG = 'gtest_color' +COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-color-test_') + + +def SetEnvVar(env_var, value): + """Sets the env variable to 'value'; unsets it when 'value' is None.""" + + if value is not None: + os.environ[env_var] = value + elif env_var in os.environ: + del os.environ[env_var] + + +def UsesColor(term, color_env_var, color_flag): + """Runs googletest-color-test_ and returns its exit code.""" + + SetEnvVar('TERM', term) + SetEnvVar(COLOR_ENV_VAR, color_env_var) + + if color_flag is None: + args = [] + else: + args = ['--%s=%s' % (COLOR_FLAG, color_flag)] + p = gtest_test_utils.Subprocess([COMMAND] + args) + return not p.exited or p.exit_code + + +class GTestColorTest(gtest_test_utils.TestCase): + def testNoEnvVarNoFlag(self): + """Tests the case when there's neither GTEST_COLOR nor --gtest_color.""" + + if not IS_WINDOWS: + self.assert_(not UsesColor('dumb', None, None)) + self.assert_(not UsesColor('emacs', None, None)) + self.assert_(not UsesColor('xterm-mono', None, None)) + self.assert_(not UsesColor('unknown', None, None)) + self.assert_(not UsesColor(None, None, None)) + self.assert_(UsesColor('linux', None, None)) + self.assert_(UsesColor('cygwin', None, None)) + self.assert_(UsesColor('xterm', None, None)) + self.assert_(UsesColor('xterm-color', None, None)) + self.assert_(UsesColor('xterm-256color', None, None)) + + def testFlagOnly(self): + """Tests the case when there's --gtest_color but not GTEST_COLOR.""" + + self.assert_(not UsesColor('dumb', None, 'no')) + self.assert_(not UsesColor('xterm-color', None, 'no')) + if not IS_WINDOWS: + self.assert_(not UsesColor('emacs', None, 'auto')) + self.assert_(UsesColor('xterm', None, 'auto')) + self.assert_(UsesColor('dumb', None, 'yes')) + self.assert_(UsesColor('xterm', None, 'yes')) + + def testEnvVarOnly(self): + """Tests the case when there's GTEST_COLOR but not --gtest_color.""" + + self.assert_(not UsesColor('dumb', 'no', None)) + self.assert_(not UsesColor('xterm-color', 'no', None)) + if not IS_WINDOWS: + self.assert_(not UsesColor('dumb', 'auto', None)) + self.assert_(UsesColor('xterm-color', 'auto', None)) + self.assert_(UsesColor('dumb', 'yes', None)) + self.assert_(UsesColor('xterm-color', 'yes', None)) + + def testEnvVarAndFlag(self): + """Tests the case when there are both GTEST_COLOR and --gtest_color.""" + + self.assert_(not UsesColor('xterm-color', 'no', 'no')) + self.assert_(UsesColor('dumb', 'no', 'yes')) + self.assert_(UsesColor('xterm-color', 'no', 'auto')) + + def testAliasesOfYesAndNo(self): + """Tests using aliases in specifying --gtest_color.""" + + self.assert_(UsesColor('dumb', None, 'true')) + self.assert_(UsesColor('dumb', None, 'YES')) + self.assert_(UsesColor('dumb', None, 'T')) + self.assert_(UsesColor('dumb', None, '1')) + + self.assert_(not UsesColor('xterm', None, 'f')) + self.assert_(not UsesColor('xterm', None, 'false')) + self.assert_(not UsesColor('xterm', None, '0')) + self.assert_(not UsesColor('xterm', None, 'unknown')) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-color-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-color-test_.cc new file mode 100644 index 0000000000..220a3a0054 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-color-test_.cc @@ -0,0 +1,62 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// A helper program for testing how Google Test determines whether to use +// colors in the output. It prints "YES" and returns 1 if Google Test +// decides to use colors, and prints "NO" and returns 0 otherwise. + +#include <stdio.h> + +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +using testing::internal::ShouldUseColor; + +// The purpose of this is to ensure that the UnitTest singleton is +// created before main() is entered, and thus that ShouldUseColor() +// works the same way as in a real Google-Test-based test. We don't actual +// run the TEST itself. +TEST(GTestColorTest, Dummy) { +} + +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + + if (ShouldUseColor(true)) { + // Google Test decides to use colors in the output (assuming it + // goes to a TTY). + printf("YES\n"); + return 1; + } else { + // Google Test decides not to use colors in the output. + printf("NO\n"); + return 0; + } +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-death-test-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-death-test-test.cc new file mode 100644 index 0000000000..c0b3d1f21d --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-death-test-test.cc @@ -0,0 +1,1542 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for death tests. + +#include "gtest/gtest-death-test.h" + +#include "gtest/gtest.h" +#include "gtest/internal/gtest-filepath.h" + +using testing::internal::AlwaysFalse; +using testing::internal::AlwaysTrue; + +#if GTEST_HAS_DEATH_TEST + +# if GTEST_OS_WINDOWS +# include <fcntl.h> // For O_BINARY +# include <direct.h> // For chdir(). +# include <io.h> +# else +# include <unistd.h> +# include <sys/wait.h> // For waitpid. +# endif // GTEST_OS_WINDOWS + +# include <limits.h> +# include <signal.h> +# include <stdio.h> + +# if GTEST_OS_LINUX +# include <sys/time.h> +# endif // GTEST_OS_LINUX + +# include "gtest/gtest-spi.h" +# include "src/gtest-internal-inl.h" + +namespace posix = ::testing::internal::posix; + +using testing::ContainsRegex; +using testing::Matcher; +using testing::Message; +using testing::internal::DeathTest; +using testing::internal::DeathTestFactory; +using testing::internal::FilePath; +using testing::internal::GetLastErrnoDescription; +using testing::internal::GetUnitTestImpl; +using testing::internal::InDeathTestChild; +using testing::internal::ParseNaturalNumber; + +namespace testing { +namespace internal { + +// A helper class whose objects replace the death test factory for a +// single UnitTest object during their lifetimes. +class ReplaceDeathTestFactory { + public: + explicit ReplaceDeathTestFactory(DeathTestFactory* new_factory) + : unit_test_impl_(GetUnitTestImpl()) { + old_factory_ = unit_test_impl_->death_test_factory_.release(); + unit_test_impl_->death_test_factory_.reset(new_factory); + } + + ~ReplaceDeathTestFactory() { + unit_test_impl_->death_test_factory_.release(); + unit_test_impl_->death_test_factory_.reset(old_factory_); + } + private: + // Prevents copying ReplaceDeathTestFactory objects. + ReplaceDeathTestFactory(const ReplaceDeathTestFactory&); + void operator=(const ReplaceDeathTestFactory&); + + UnitTestImpl* unit_test_impl_; + DeathTestFactory* old_factory_; +}; + +} // namespace internal +} // namespace testing + +namespace { + +void DieWithMessage(const ::std::string& message) { + fprintf(stderr, "%s", message.c_str()); + fflush(stderr); // Make sure the text is printed before the process exits. + + // We call _exit() instead of exit(), as the former is a direct + // system call and thus safer in the presence of threads. exit() + // will invoke user-defined exit-hooks, which may do dangerous + // things that conflict with death tests. + // + // Some compilers can recognize that _exit() never returns and issue the + // 'unreachable code' warning for code following this function, unless + // fooled by a fake condition. + if (AlwaysTrue()) + _exit(1); +} + +void DieInside(const ::std::string& function) { + DieWithMessage("death inside " + function + "()."); +} + +// Tests that death tests work. + +class TestForDeathTest : public testing::Test { + protected: + TestForDeathTest() : original_dir_(FilePath::GetCurrentDir()) {} + + ~TestForDeathTest() override { posix::ChDir(original_dir_.c_str()); } + + // A static member function that's expected to die. + static void StaticMemberFunction() { DieInside("StaticMemberFunction"); } + + // A method of the test fixture that may die. + void MemberFunction() { + if (should_die_) + DieInside("MemberFunction"); + } + + // True if and only if MemberFunction() should die. + bool should_die_; + const FilePath original_dir_; +}; + +// A class with a member function that may die. +class MayDie { + public: + explicit MayDie(bool should_die) : should_die_(should_die) {} + + // A member function that may die. + void MemberFunction() const { + if (should_die_) + DieInside("MayDie::MemberFunction"); + } + + private: + // True if and only if MemberFunction() should die. + bool should_die_; +}; + +// A global function that's expected to die. +void GlobalFunction() { DieInside("GlobalFunction"); } + +// A non-void function that's expected to die. +int NonVoidFunction() { + DieInside("NonVoidFunction"); + return 1; +} + +// A unary function that may die. +void DieIf(bool should_die) { + if (should_die) + DieInside("DieIf"); +} + +// A binary function that may die. +bool DieIfLessThan(int x, int y) { + if (x < y) { + DieInside("DieIfLessThan"); + } + return true; +} + +// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture. +void DeathTestSubroutine() { + EXPECT_DEATH(GlobalFunction(), "death.*GlobalFunction"); + ASSERT_DEATH(GlobalFunction(), "death.*GlobalFunction"); +} + +// Death in dbg, not opt. +int DieInDebugElse12(int* sideeffect) { + if (sideeffect) *sideeffect = 12; + +# ifndef NDEBUG + + DieInside("DieInDebugElse12"); + +# endif // NDEBUG + + return 12; +} + +# if GTEST_OS_WINDOWS + +// Death in dbg due to Windows CRT assertion failure, not opt. +int DieInCRTDebugElse12(int* sideeffect) { + if (sideeffect) *sideeffect = 12; + + // Create an invalid fd by closing a valid one + int fdpipe[2]; + EXPECT_EQ(_pipe(fdpipe, 256, O_BINARY), 0); + EXPECT_EQ(_close(fdpipe[0]), 0); + EXPECT_EQ(_close(fdpipe[1]), 0); + + // _dup() should crash in debug mode + EXPECT_EQ(_dup(fdpipe[0]), -1); + + return 12; +} + +#endif // GTEST_OS_WINDOWS + +# if GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA + +// Tests the ExitedWithCode predicate. +TEST(ExitStatusPredicateTest, ExitedWithCode) { + // On Windows, the process's exit code is the same as its exit status, + // so the predicate just compares the its input with its parameter. + EXPECT_TRUE(testing::ExitedWithCode(0)(0)); + EXPECT_TRUE(testing::ExitedWithCode(1)(1)); + EXPECT_TRUE(testing::ExitedWithCode(42)(42)); + EXPECT_FALSE(testing::ExitedWithCode(0)(1)); + EXPECT_FALSE(testing::ExitedWithCode(1)(0)); +} + +# else + +// Returns the exit status of a process that calls _exit(2) with a +// given exit code. This is a helper function for the +// ExitStatusPredicateTest test suite. +static int NormalExitStatus(int exit_code) { + pid_t child_pid = fork(); + if (child_pid == 0) { + _exit(exit_code); + } + int status; + waitpid(child_pid, &status, 0); + return status; +} + +// Returns the exit status of a process that raises a given signal. +// If the signal does not cause the process to die, then it returns +// instead the exit status of a process that exits normally with exit +// code 1. This is a helper function for the ExitStatusPredicateTest +// test suite. +static int KilledExitStatus(int signum) { + pid_t child_pid = fork(); + if (child_pid == 0) { + raise(signum); + _exit(1); + } + int status; + waitpid(child_pid, &status, 0); + return status; +} + +// Tests the ExitedWithCode predicate. +TEST(ExitStatusPredicateTest, ExitedWithCode) { + const int status0 = NormalExitStatus(0); + const int status1 = NormalExitStatus(1); + const int status42 = NormalExitStatus(42); + const testing::ExitedWithCode pred0(0); + const testing::ExitedWithCode pred1(1); + const testing::ExitedWithCode pred42(42); + EXPECT_PRED1(pred0, status0); + EXPECT_PRED1(pred1, status1); + EXPECT_PRED1(pred42, status42); + EXPECT_FALSE(pred0(status1)); + EXPECT_FALSE(pred42(status0)); + EXPECT_FALSE(pred1(status42)); +} + +// Tests the KilledBySignal predicate. +TEST(ExitStatusPredicateTest, KilledBySignal) { + const int status_segv = KilledExitStatus(SIGSEGV); + const int status_kill = KilledExitStatus(SIGKILL); + const testing::KilledBySignal pred_segv(SIGSEGV); + const testing::KilledBySignal pred_kill(SIGKILL); + EXPECT_PRED1(pred_segv, status_segv); + EXPECT_PRED1(pred_kill, status_kill); + EXPECT_FALSE(pred_segv(status_kill)); + EXPECT_FALSE(pred_kill(status_segv)); +} + +# endif // GTEST_OS_WINDOWS || GTEST_OS_FUCHSIA + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +// Tests that the death test macros expand to code which may or may not +// be followed by operator<<, and that in either case the complete text +// comprises only a single C++ statement. +TEST_F(TestForDeathTest, SingleStatement) { + if (AlwaysFalse()) + // This would fail if executed; this is a compilation test only + ASSERT_DEATH(return, ""); + + if (AlwaysTrue()) + EXPECT_DEATH(_exit(1), ""); + else + // This empty "else" branch is meant to ensure that EXPECT_DEATH + // doesn't expand into an "if" statement without an "else" + ; + + if (AlwaysFalse()) + ASSERT_DEATH(return, "") << "did not die"; + + if (AlwaysFalse()) + ; + else + EXPECT_DEATH(_exit(1), "") << 1 << 2 << 3; +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +# if GTEST_USES_PCRE + +void DieWithEmbeddedNul() { + fprintf(stderr, "Hello%cmy null world.\n", '\0'); + fflush(stderr); + _exit(1); +} + +// Tests that EXPECT_DEATH and ASSERT_DEATH work when the error +// message has a NUL character in it. +TEST_F(TestForDeathTest, EmbeddedNulInMessage) { + EXPECT_DEATH(DieWithEmbeddedNul(), "my null world"); + ASSERT_DEATH(DieWithEmbeddedNul(), "my null world"); +} + +# endif // GTEST_USES_PCRE + +// Tests that death test macros expand to code which interacts well with switch +// statements. +TEST_F(TestForDeathTest, SwitchStatement) { + // Microsoft compiler usually complains about switch statements without + // case labels. We suppress that warning for this test. + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065) + + switch (0) + default: + ASSERT_DEATH(_exit(1), "") << "exit in default switch handler"; + + switch (0) + case 0: + EXPECT_DEATH(_exit(1), "") << "exit in switch case"; + + GTEST_DISABLE_MSC_WARNINGS_POP_() +} + +// Tests that a static member function can be used in a "fast" style +// death test. +TEST_F(TestForDeathTest, StaticMemberFunctionFastStyle) { + testing::GTEST_FLAG(death_test_style) = "fast"; + ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember"); +} + +// Tests that a method of the test fixture can be used in a "fast" +// style death test. +TEST_F(TestForDeathTest, MemberFunctionFastStyle) { + testing::GTEST_FLAG(death_test_style) = "fast"; + should_die_ = true; + EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction"); +} + +void ChangeToRootDir() { posix::ChDir(GTEST_PATH_SEP_); } + +// Tests that death tests work even if the current directory has been +// changed. +TEST_F(TestForDeathTest, FastDeathTestInChangedDir) { + testing::GTEST_FLAG(death_test_style) = "fast"; + + ChangeToRootDir(); + EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), ""); + + ChangeToRootDir(); + ASSERT_DEATH(_exit(1), ""); +} + +# if GTEST_OS_LINUX +void SigprofAction(int, siginfo_t*, void*) { /* no op */ } + +// Sets SIGPROF action and ITIMER_PROF timer (interval: 1ms). +void SetSigprofActionAndTimer() { + struct sigaction signal_action; + memset(&signal_action, 0, sizeof(signal_action)); + sigemptyset(&signal_action.sa_mask); + signal_action.sa_sigaction = SigprofAction; + signal_action.sa_flags = SA_RESTART | SA_SIGINFO; + ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, nullptr)); + // timer comes second, to avoid SIGPROF premature delivery, as suggested at + // https://www.gnu.org/software/libc/manual/html_node/Setting-an-Alarm.html + struct itimerval timer; + timer.it_interval.tv_sec = 0; + timer.it_interval.tv_usec = 1; + timer.it_value = timer.it_interval; + ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, nullptr)); +} + +// Disables ITIMER_PROF timer and ignores SIGPROF signal. +void DisableSigprofActionAndTimer(struct sigaction* old_signal_action) { + struct itimerval timer; + timer.it_interval.tv_sec = 0; + timer.it_interval.tv_usec = 0; + timer.it_value = timer.it_interval; + ASSERT_EQ(0, setitimer(ITIMER_PROF, &timer, nullptr)); + struct sigaction signal_action; + memset(&signal_action, 0, sizeof(signal_action)); + sigemptyset(&signal_action.sa_mask); + signal_action.sa_handler = SIG_IGN; + ASSERT_EQ(0, sigaction(SIGPROF, &signal_action, old_signal_action)); +} + +// Tests that death tests work when SIGPROF handler and timer are set. +TEST_F(TestForDeathTest, FastSigprofActionSet) { + testing::GTEST_FLAG(death_test_style) = "fast"; + SetSigprofActionAndTimer(); + EXPECT_DEATH(_exit(1), ""); + struct sigaction old_signal_action; + DisableSigprofActionAndTimer(&old_signal_action); + EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction); +} + +TEST_F(TestForDeathTest, ThreadSafeSigprofActionSet) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + SetSigprofActionAndTimer(); + EXPECT_DEATH(_exit(1), ""); + struct sigaction old_signal_action; + DisableSigprofActionAndTimer(&old_signal_action); + EXPECT_TRUE(old_signal_action.sa_sigaction == SigprofAction); +} +# endif // GTEST_OS_LINUX + +// Repeats a representative sample of death tests in the "threadsafe" style: + +TEST_F(TestForDeathTest, StaticMemberFunctionThreadsafeStyle) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + ASSERT_DEATH(StaticMemberFunction(), "death.*StaticMember"); +} + +TEST_F(TestForDeathTest, MemberFunctionThreadsafeStyle) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + should_die_ = true; + EXPECT_DEATH(MemberFunction(), "inside.*MemberFunction"); +} + +TEST_F(TestForDeathTest, ThreadsafeDeathTestInLoop) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + + for (int i = 0; i < 3; ++i) + EXPECT_EXIT(_exit(i), testing::ExitedWithCode(i), "") << ": i = " << i; +} + +TEST_F(TestForDeathTest, ThreadsafeDeathTestInChangedDir) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + + ChangeToRootDir(); + EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), ""); + + ChangeToRootDir(); + ASSERT_DEATH(_exit(1), ""); +} + +TEST_F(TestForDeathTest, MixedStyles) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + EXPECT_DEATH(_exit(1), ""); + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_DEATH(_exit(1), ""); +} + +# if GTEST_HAS_CLONE && GTEST_HAS_PTHREAD + +bool pthread_flag; + +void SetPthreadFlag() { + pthread_flag = true; +} + +TEST_F(TestForDeathTest, DoesNotExecuteAtforkHooks) { + if (!testing::GTEST_FLAG(death_test_use_fork)) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + pthread_flag = false; + ASSERT_EQ(0, pthread_atfork(&SetPthreadFlag, nullptr, nullptr)); + ASSERT_DEATH(_exit(1), ""); + ASSERT_FALSE(pthread_flag); + } +} + +# endif // GTEST_HAS_CLONE && GTEST_HAS_PTHREAD + +// Tests that a method of another class can be used in a death test. +TEST_F(TestForDeathTest, MethodOfAnotherClass) { + const MayDie x(true); + ASSERT_DEATH(x.MemberFunction(), "MayDie\\:\\:MemberFunction"); +} + +// Tests that a global function can be used in a death test. +TEST_F(TestForDeathTest, GlobalFunction) { + EXPECT_DEATH(GlobalFunction(), "GlobalFunction"); +} + +// Tests that any value convertible to an RE works as a second +// argument to EXPECT_DEATH. +TEST_F(TestForDeathTest, AcceptsAnythingConvertibleToRE) { + static const char regex_c_str[] = "GlobalFunction"; + EXPECT_DEATH(GlobalFunction(), regex_c_str); + + const testing::internal::RE regex(regex_c_str); + EXPECT_DEATH(GlobalFunction(), regex); + +# if !GTEST_USES_PCRE + + const ::std::string regex_std_str(regex_c_str); + EXPECT_DEATH(GlobalFunction(), regex_std_str); + + // This one is tricky; a temporary pointer into another temporary. Reference + // lifetime extension of the pointer is not sufficient. + EXPECT_DEATH(GlobalFunction(), ::std::string(regex_c_str).c_str()); + +# endif // !GTEST_USES_PCRE +} + +// Tests that a non-void function can be used in a death test. +TEST_F(TestForDeathTest, NonVoidFunction) { + ASSERT_DEATH(NonVoidFunction(), "NonVoidFunction"); +} + +// Tests that functions that take parameter(s) can be used in a death test. +TEST_F(TestForDeathTest, FunctionWithParameter) { + EXPECT_DEATH(DieIf(true), "DieIf\\(\\)"); + EXPECT_DEATH(DieIfLessThan(2, 3), "DieIfLessThan"); +} + +// Tests that ASSERT_DEATH can be used outside a TEST, TEST_F, or test fixture. +TEST_F(TestForDeathTest, OutsideFixture) { + DeathTestSubroutine(); +} + +// Tests that death tests can be done inside a loop. +TEST_F(TestForDeathTest, InsideLoop) { + for (int i = 0; i < 5; i++) { + EXPECT_DEATH(DieIfLessThan(-1, i), "DieIfLessThan") << "where i == " << i; + } +} + +// Tests that a compound statement can be used in a death test. +TEST_F(TestForDeathTest, CompoundStatement) { + EXPECT_DEATH({ // NOLINT + const int x = 2; + const int y = x + 1; + DieIfLessThan(x, y); + }, + "DieIfLessThan"); +} + +// Tests that code that doesn't die causes a death test to fail. +TEST_F(TestForDeathTest, DoesNotDie) { + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(DieIf(false), "DieIf"), + "failed to die"); +} + +// Tests that a death test fails when the error message isn't expected. +TEST_F(TestForDeathTest, ErrorMessageMismatch) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_DEATH(DieIf(true), "DieIfLessThan") << "End of death test message."; + }, "died but not with expected error"); +} + +// On exit, *aborted will be true if and only if the EXPECT_DEATH() +// statement aborted the function. +void ExpectDeathTestHelper(bool* aborted) { + *aborted = true; + EXPECT_DEATH(DieIf(false), "DieIf"); // This assertion should fail. + *aborted = false; +} + +// Tests that EXPECT_DEATH doesn't abort the test on failure. +TEST_F(TestForDeathTest, EXPECT_DEATH) { + bool aborted = true; + EXPECT_NONFATAL_FAILURE(ExpectDeathTestHelper(&aborted), + "failed to die"); + EXPECT_FALSE(aborted); +} + +// Tests that ASSERT_DEATH does abort the test on failure. +TEST_F(TestForDeathTest, ASSERT_DEATH) { + static bool aborted; + EXPECT_FATAL_FAILURE({ // NOLINT + aborted = true; + ASSERT_DEATH(DieIf(false), "DieIf"); // This assertion should fail. + aborted = false; + }, "failed to die"); + EXPECT_TRUE(aborted); +} + +// Tests that EXPECT_DEATH evaluates the arguments exactly once. +TEST_F(TestForDeathTest, SingleEvaluation) { + int x = 3; + EXPECT_DEATH(DieIf((++x) == 4), "DieIf"); + + const char* regex = "DieIf"; + const char* regex_save = regex; + EXPECT_DEATH(DieIfLessThan(3, 4), regex++); + EXPECT_EQ(regex_save + 1, regex); +} + +// Tests that run-away death tests are reported as failures. +TEST_F(TestForDeathTest, RunawayIsFailure) { + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(static_cast<void>(0), "Foo"), + "failed to die."); +} + +// Tests that death tests report executing 'return' in the statement as +// failure. +TEST_F(TestForDeathTest, ReturnIsFailure) { + EXPECT_FATAL_FAILURE(ASSERT_DEATH(return, "Bar"), + "illegal return in test statement."); +} + +// Tests that EXPECT_DEBUG_DEATH works as expected, that is, you can stream a +// message to it, and in debug mode it: +// 1. Asserts on death. +// 2. Has no side effect. +// +// And in opt mode, it: +// 1. Has side effects but does not assert. +TEST_F(TestForDeathTest, TestExpectDebugDeath) { + int sideeffect = 0; + + // Put the regex in a local variable to make sure we don't get an "unused" + // warning in opt mode. + const char* regex = "death.*DieInDebugElse12"; + + EXPECT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), regex) + << "Must accept a streamed message"; + +# ifdef NDEBUG + + // Checks that the assignment occurs in opt mode (sideeffect). + EXPECT_EQ(12, sideeffect); + +# else + + // Checks that the assignment does not occur in dbg mode (no sideeffect). + EXPECT_EQ(0, sideeffect); + +# endif +} + +# if GTEST_OS_WINDOWS + +// Tests that EXPECT_DEBUG_DEATH works as expected when in debug mode +// the Windows CRT crashes the process with an assertion failure. +// 1. Asserts on death. +// 2. Has no side effect (doesn't pop up a window or wait for user input). +// +// And in opt mode, it: +// 1. Has side effects but does not assert. +TEST_F(TestForDeathTest, CRTDebugDeath) { + int sideeffect = 0; + + // Put the regex in a local variable to make sure we don't get an "unused" + // warning in opt mode. + const char* regex = "dup.* : Assertion failed"; + + EXPECT_DEBUG_DEATH(DieInCRTDebugElse12(&sideeffect), regex) + << "Must accept a streamed message"; + +# ifdef NDEBUG + + // Checks that the assignment occurs in opt mode (sideeffect). + EXPECT_EQ(12, sideeffect); + +# else + + // Checks that the assignment does not occur in dbg mode (no sideeffect). + EXPECT_EQ(0, sideeffect); + +# endif +} + +# endif // GTEST_OS_WINDOWS + +// Tests that ASSERT_DEBUG_DEATH works as expected, that is, you can stream a +// message to it, and in debug mode it: +// 1. Asserts on death. +// 2. Has no side effect. +// +// And in opt mode, it: +// 1. Has side effects but does not assert. +TEST_F(TestForDeathTest, TestAssertDebugDeath) { + int sideeffect = 0; + + ASSERT_DEBUG_DEATH(DieInDebugElse12(&sideeffect), "death.*DieInDebugElse12") + << "Must accept a streamed message"; + +# ifdef NDEBUG + + // Checks that the assignment occurs in opt mode (sideeffect). + EXPECT_EQ(12, sideeffect); + +# else + + // Checks that the assignment does not occur in dbg mode (no sideeffect). + EXPECT_EQ(0, sideeffect); + +# endif +} + +# ifndef NDEBUG + +void ExpectDebugDeathHelper(bool* aborted) { + *aborted = true; + EXPECT_DEBUG_DEATH(return, "") << "This is expected to fail."; + *aborted = false; +} + +# if GTEST_OS_WINDOWS +TEST(PopUpDeathTest, DoesNotShowPopUpOnAbort) { + printf("This test should be considered failing if it shows " + "any pop-up dialogs.\n"); + fflush(stdout); + + EXPECT_DEATH({ + testing::GTEST_FLAG(catch_exceptions) = false; + abort(); + }, ""); +} +# endif // GTEST_OS_WINDOWS + +// Tests that EXPECT_DEBUG_DEATH in debug mode does not abort +// the function. +TEST_F(TestForDeathTest, ExpectDebugDeathDoesNotAbort) { + bool aborted = true; + EXPECT_NONFATAL_FAILURE(ExpectDebugDeathHelper(&aborted), ""); + EXPECT_FALSE(aborted); +} + +void AssertDebugDeathHelper(bool* aborted) { + *aborted = true; + GTEST_LOG_(INFO) << "Before ASSERT_DEBUG_DEATH"; + ASSERT_DEBUG_DEATH(GTEST_LOG_(INFO) << "In ASSERT_DEBUG_DEATH"; return, "") + << "This is expected to fail."; + GTEST_LOG_(INFO) << "After ASSERT_DEBUG_DEATH"; + *aborted = false; +} + +// Tests that ASSERT_DEBUG_DEATH in debug mode aborts the function on +// failure. +TEST_F(TestForDeathTest, AssertDebugDeathAborts) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts2) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts3) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts4) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts5) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts6) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts7) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts8) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts9) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +TEST_F(TestForDeathTest, AssertDebugDeathAborts10) { + static bool aborted; + aborted = false; + EXPECT_FATAL_FAILURE(AssertDebugDeathHelper(&aborted), ""); + EXPECT_TRUE(aborted); +} + +# endif // _NDEBUG + +// Tests the *_EXIT family of macros, using a variety of predicates. +static void TestExitMacros() { + EXPECT_EXIT(_exit(1), testing::ExitedWithCode(1), ""); + ASSERT_EXIT(_exit(42), testing::ExitedWithCode(42), ""); + +# if GTEST_OS_WINDOWS + + // Of all signals effects on the process exit code, only those of SIGABRT + // are documented on Windows. + // See https://msdn.microsoft.com/en-us/query-bi/m/dwwzkt4c. + EXPECT_EXIT(raise(SIGABRT), testing::ExitedWithCode(3), "") << "b_ar"; + +# elif !GTEST_OS_FUCHSIA + + // Fuchsia has no unix signals. + EXPECT_EXIT(raise(SIGKILL), testing::KilledBySignal(SIGKILL), "") << "foo"; + ASSERT_EXIT(raise(SIGUSR2), testing::KilledBySignal(SIGUSR2), "") << "bar"; + + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_EXIT(_exit(0), testing::KilledBySignal(SIGSEGV), "") + << "This failure is expected, too."; + }, "This failure is expected, too."); + +# endif // GTEST_OS_WINDOWS + + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_EXIT(raise(SIGSEGV), testing::ExitedWithCode(0), "") + << "This failure is expected."; + }, "This failure is expected."); +} + +TEST_F(TestForDeathTest, ExitMacros) { + TestExitMacros(); +} + +TEST_F(TestForDeathTest, ExitMacrosUsingFork) { + testing::GTEST_FLAG(death_test_use_fork) = true; + TestExitMacros(); +} + +TEST_F(TestForDeathTest, InvalidStyle) { + testing::GTEST_FLAG(death_test_style) = "rococo"; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_DEATH(_exit(0), "") << "This failure is expected."; + }, "This failure is expected."); +} + +TEST_F(TestForDeathTest, DeathTestFailedOutput) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_NONFATAL_FAILURE( + EXPECT_DEATH(DieWithMessage("death\n"), + "expected message"), + "Actual msg:\n" + "[ DEATH ] death\n"); +} + +TEST_F(TestForDeathTest, DeathTestUnexpectedReturnOutput) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_NONFATAL_FAILURE( + EXPECT_DEATH({ + fprintf(stderr, "returning\n"); + fflush(stderr); + return; + }, ""), + " Result: illegal return in test statement.\n" + " Error msg:\n" + "[ DEATH ] returning\n"); +} + +TEST_F(TestForDeathTest, DeathTestBadExitCodeOutput) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_NONFATAL_FAILURE( + EXPECT_EXIT(DieWithMessage("exiting with rc 1\n"), + testing::ExitedWithCode(3), + "expected message"), + " Result: died but not with expected exit code:\n" + " Exited with exit status 1\n" + "Actual msg:\n" + "[ DEATH ] exiting with rc 1\n"); +} + +TEST_F(TestForDeathTest, DeathTestMultiLineMatchFail) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_NONFATAL_FAILURE( + EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"), + "line 1\nxyz\nline 3\n"), + "Actual msg:\n" + "[ DEATH ] line 1\n" + "[ DEATH ] line 2\n" + "[ DEATH ] line 3\n"); +} + +TEST_F(TestForDeathTest, DeathTestMultiLineMatchPass) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_DEATH(DieWithMessage("line 1\nline 2\nline 3\n"), + "line 1\nline 2\nline 3\n"); +} + +// A DeathTestFactory that returns MockDeathTests. +class MockDeathTestFactory : public DeathTestFactory { + public: + MockDeathTestFactory(); + bool Create(const char* statement, + testing::Matcher<const std::string&> matcher, const char* file, + int line, DeathTest** test) override; + + // Sets the parameters for subsequent calls to Create. + void SetParameters(bool create, DeathTest::TestRole role, + int status, bool passed); + + // Accessors. + int AssumeRoleCalls() const { return assume_role_calls_; } + int WaitCalls() const { return wait_calls_; } + size_t PassedCalls() const { return passed_args_.size(); } + bool PassedArgument(int n) const { + return passed_args_[static_cast<size_t>(n)]; + } + size_t AbortCalls() const { return abort_args_.size(); } + DeathTest::AbortReason AbortArgument(int n) const { + return abort_args_[static_cast<size_t>(n)]; + } + bool TestDeleted() const { return test_deleted_; } + + private: + friend class MockDeathTest; + // If true, Create will return a MockDeathTest; otherwise it returns + // NULL. + bool create_; + // The value a MockDeathTest will return from its AssumeRole method. + DeathTest::TestRole role_; + // The value a MockDeathTest will return from its Wait method. + int status_; + // The value a MockDeathTest will return from its Passed method. + bool passed_; + + // Number of times AssumeRole was called. + int assume_role_calls_; + // Number of times Wait was called. + int wait_calls_; + // The arguments to the calls to Passed since the last call to + // SetParameters. + std::vector<bool> passed_args_; + // The arguments to the calls to Abort since the last call to + // SetParameters. + std::vector<DeathTest::AbortReason> abort_args_; + // True if the last MockDeathTest returned by Create has been + // deleted. + bool test_deleted_; +}; + + +// A DeathTest implementation useful in testing. It returns values set +// at its creation from its various inherited DeathTest methods, and +// reports calls to those methods to its parent MockDeathTestFactory +// object. +class MockDeathTest : public DeathTest { + public: + MockDeathTest(MockDeathTestFactory *parent, + TestRole role, int status, bool passed) : + parent_(parent), role_(role), status_(status), passed_(passed) { + } + ~MockDeathTest() override { parent_->test_deleted_ = true; } + TestRole AssumeRole() override { + ++parent_->assume_role_calls_; + return role_; + } + int Wait() override { + ++parent_->wait_calls_; + return status_; + } + bool Passed(bool exit_status_ok) override { + parent_->passed_args_.push_back(exit_status_ok); + return passed_; + } + void Abort(AbortReason reason) override { + parent_->abort_args_.push_back(reason); + } + + private: + MockDeathTestFactory* const parent_; + const TestRole role_; + const int status_; + const bool passed_; +}; + + +// MockDeathTestFactory constructor. +MockDeathTestFactory::MockDeathTestFactory() + : create_(true), + role_(DeathTest::OVERSEE_TEST), + status_(0), + passed_(true), + assume_role_calls_(0), + wait_calls_(0), + passed_args_(), + abort_args_() { +} + + +// Sets the parameters for subsequent calls to Create. +void MockDeathTestFactory::SetParameters(bool create, + DeathTest::TestRole role, + int status, bool passed) { + create_ = create; + role_ = role; + status_ = status; + passed_ = passed; + + assume_role_calls_ = 0; + wait_calls_ = 0; + passed_args_.clear(); + abort_args_.clear(); +} + + +// Sets test to NULL (if create_ is false) or to the address of a new +// MockDeathTest object with parameters taken from the last call +// to SetParameters (if create_ is true). Always returns true. +bool MockDeathTestFactory::Create( + const char* /*statement*/, testing::Matcher<const std::string&> /*matcher*/, + const char* /*file*/, int /*line*/, DeathTest** test) { + test_deleted_ = false; + if (create_) { + *test = new MockDeathTest(this, role_, status_, passed_); + } else { + *test = nullptr; + } + return true; +} + +// A test fixture for testing the logic of the GTEST_DEATH_TEST_ macro. +// It installs a MockDeathTestFactory that is used for the duration +// of the test case. +class MacroLogicDeathTest : public testing::Test { + protected: + static testing::internal::ReplaceDeathTestFactory* replacer_; + static MockDeathTestFactory* factory_; + + static void SetUpTestSuite() { + factory_ = new MockDeathTestFactory; + replacer_ = new testing::internal::ReplaceDeathTestFactory(factory_); + } + + static void TearDownTestSuite() { + delete replacer_; + replacer_ = nullptr; + delete factory_; + factory_ = nullptr; + } + + // Runs a death test that breaks the rules by returning. Such a death + // test cannot be run directly from a test routine that uses a + // MockDeathTest, or the remainder of the routine will not be executed. + static void RunReturningDeathTest(bool* flag) { + ASSERT_DEATH({ // NOLINT + *flag = true; + return; + }, ""); + } +}; + +testing::internal::ReplaceDeathTestFactory* MacroLogicDeathTest::replacer_ = + nullptr; +MockDeathTestFactory* MacroLogicDeathTest::factory_ = nullptr; + +// Test that nothing happens when the factory doesn't return a DeathTest: +TEST_F(MacroLogicDeathTest, NothingHappens) { + bool flag = false; + factory_->SetParameters(false, DeathTest::OVERSEE_TEST, 0, true); + EXPECT_DEATH(flag = true, ""); + EXPECT_FALSE(flag); + EXPECT_EQ(0, factory_->AssumeRoleCalls()); + EXPECT_EQ(0, factory_->WaitCalls()); + EXPECT_EQ(0U, factory_->PassedCalls()); + EXPECT_EQ(0U, factory_->AbortCalls()); + EXPECT_FALSE(factory_->TestDeleted()); +} + +// Test that the parent process doesn't run the death test code, +// and that the Passed method returns false when the (simulated) +// child process exits with status 0: +TEST_F(MacroLogicDeathTest, ChildExitsSuccessfully) { + bool flag = false; + factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 0, true); + EXPECT_DEATH(flag = true, ""); + EXPECT_FALSE(flag); + EXPECT_EQ(1, factory_->AssumeRoleCalls()); + EXPECT_EQ(1, factory_->WaitCalls()); + ASSERT_EQ(1U, factory_->PassedCalls()); + EXPECT_FALSE(factory_->PassedArgument(0)); + EXPECT_EQ(0U, factory_->AbortCalls()); + EXPECT_TRUE(factory_->TestDeleted()); +} + +// Tests that the Passed method was given the argument "true" when +// the (simulated) child process exits with status 1: +TEST_F(MacroLogicDeathTest, ChildExitsUnsuccessfully) { + bool flag = false; + factory_->SetParameters(true, DeathTest::OVERSEE_TEST, 1, true); + EXPECT_DEATH(flag = true, ""); + EXPECT_FALSE(flag); + EXPECT_EQ(1, factory_->AssumeRoleCalls()); + EXPECT_EQ(1, factory_->WaitCalls()); + ASSERT_EQ(1U, factory_->PassedCalls()); + EXPECT_TRUE(factory_->PassedArgument(0)); + EXPECT_EQ(0U, factory_->AbortCalls()); + EXPECT_TRUE(factory_->TestDeleted()); +} + +// Tests that the (simulated) child process executes the death test +// code, and is aborted with the correct AbortReason if it +// executes a return statement. +TEST_F(MacroLogicDeathTest, ChildPerformsReturn) { + bool flag = false; + factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true); + RunReturningDeathTest(&flag); + EXPECT_TRUE(flag); + EXPECT_EQ(1, factory_->AssumeRoleCalls()); + EXPECT_EQ(0, factory_->WaitCalls()); + EXPECT_EQ(0U, factory_->PassedCalls()); + EXPECT_EQ(1U, factory_->AbortCalls()); + EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT, + factory_->AbortArgument(0)); + EXPECT_TRUE(factory_->TestDeleted()); +} + +// Tests that the (simulated) child process is aborted with the +// correct AbortReason if it does not die. +TEST_F(MacroLogicDeathTest, ChildDoesNotDie) { + bool flag = false; + factory_->SetParameters(true, DeathTest::EXECUTE_TEST, 0, true); + EXPECT_DEATH(flag = true, ""); + EXPECT_TRUE(flag); + EXPECT_EQ(1, factory_->AssumeRoleCalls()); + EXPECT_EQ(0, factory_->WaitCalls()); + EXPECT_EQ(0U, factory_->PassedCalls()); + // This time there are two calls to Abort: one since the test didn't + // die, and another from the ReturnSentinel when it's destroyed. The + // sentinel normally isn't destroyed if a test doesn't die, since + // _exit(2) is called in that case by ForkingDeathTest, but not by + // our MockDeathTest. + ASSERT_EQ(2U, factory_->AbortCalls()); + EXPECT_EQ(DeathTest::TEST_DID_NOT_DIE, + factory_->AbortArgument(0)); + EXPECT_EQ(DeathTest::TEST_ENCOUNTERED_RETURN_STATEMENT, + factory_->AbortArgument(1)); + EXPECT_TRUE(factory_->TestDeleted()); +} + +// Tests that a successful death test does not register a successful +// test part. +TEST(SuccessRegistrationDeathTest, NoSuccessPart) { + EXPECT_DEATH(_exit(1), ""); + EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +TEST(StreamingAssertionsDeathTest, DeathTest) { + EXPECT_DEATH(_exit(1), "") << "unexpected failure"; + ASSERT_DEATH(_exit(1), "") << "unexpected failure"; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_DEATH(_exit(0), "") << "expected failure"; + }, "expected failure"); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_DEATH(_exit(0), "") << "expected failure"; + }, "expected failure"); +} + +// Tests that GetLastErrnoDescription returns an empty string when the +// last error is 0 and non-empty string when it is non-zero. +TEST(GetLastErrnoDescription, GetLastErrnoDescriptionWorks) { + errno = ENOENT; + EXPECT_STRNE("", GetLastErrnoDescription().c_str()); + errno = 0; + EXPECT_STREQ("", GetLastErrnoDescription().c_str()); +} + +# if GTEST_OS_WINDOWS +TEST(AutoHandleTest, AutoHandleWorks) { + HANDLE handle = ::CreateEvent(NULL, FALSE, FALSE, NULL); + ASSERT_NE(INVALID_HANDLE_VALUE, handle); + + // Tests that the AutoHandle is correctly initialized with a handle. + testing::internal::AutoHandle auto_handle(handle); + EXPECT_EQ(handle, auto_handle.Get()); + + // Tests that Reset assigns INVALID_HANDLE_VALUE. + // Note that this cannot verify whether the original handle is closed. + auto_handle.Reset(); + EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle.Get()); + + // Tests that Reset assigns the new handle. + // Note that this cannot verify whether the original handle is closed. + handle = ::CreateEvent(NULL, FALSE, FALSE, NULL); + ASSERT_NE(INVALID_HANDLE_VALUE, handle); + auto_handle.Reset(handle); + EXPECT_EQ(handle, auto_handle.Get()); + + // Tests that AutoHandle contains INVALID_HANDLE_VALUE by default. + testing::internal::AutoHandle auto_handle2; + EXPECT_EQ(INVALID_HANDLE_VALUE, auto_handle2.Get()); +} +# endif // GTEST_OS_WINDOWS + +# if GTEST_OS_WINDOWS +typedef unsigned __int64 BiggestParsable; +typedef signed __int64 BiggestSignedParsable; +# else +typedef unsigned long long BiggestParsable; +typedef signed long long BiggestSignedParsable; +# endif // GTEST_OS_WINDOWS + +// We cannot use std::numeric_limits<T>::max() as it clashes with the +// max() macro defined by <windows.h>. +const BiggestParsable kBiggestParsableMax = ULLONG_MAX; +const BiggestSignedParsable kBiggestSignedParsableMax = LLONG_MAX; + +TEST(ParseNaturalNumberTest, RejectsInvalidFormat) { + BiggestParsable result = 0; + + // Rejects non-numbers. + EXPECT_FALSE(ParseNaturalNumber("non-number string", &result)); + + // Rejects numbers with whitespace prefix. + EXPECT_FALSE(ParseNaturalNumber(" 123", &result)); + + // Rejects negative numbers. + EXPECT_FALSE(ParseNaturalNumber("-123", &result)); + + // Rejects numbers starting with a plus sign. + EXPECT_FALSE(ParseNaturalNumber("+123", &result)); + errno = 0; +} + +TEST(ParseNaturalNumberTest, RejectsOverflownNumbers) { + BiggestParsable result = 0; + + EXPECT_FALSE(ParseNaturalNumber("99999999999999999999999", &result)); + + signed char char_result = 0; + EXPECT_FALSE(ParseNaturalNumber("200", &char_result)); + errno = 0; +} + +TEST(ParseNaturalNumberTest, AcceptsValidNumbers) { + BiggestParsable result = 0; + + result = 0; + ASSERT_TRUE(ParseNaturalNumber("123", &result)); + EXPECT_EQ(123U, result); + + // Check 0 as an edge case. + result = 1; + ASSERT_TRUE(ParseNaturalNumber("0", &result)); + EXPECT_EQ(0U, result); + + result = 1; + ASSERT_TRUE(ParseNaturalNumber("00000", &result)); + EXPECT_EQ(0U, result); +} + +TEST(ParseNaturalNumberTest, AcceptsTypeLimits) { + Message msg; + msg << kBiggestParsableMax; + + BiggestParsable result = 0; + EXPECT_TRUE(ParseNaturalNumber(msg.GetString(), &result)); + EXPECT_EQ(kBiggestParsableMax, result); + + Message msg2; + msg2 << kBiggestSignedParsableMax; + + BiggestSignedParsable signed_result = 0; + EXPECT_TRUE(ParseNaturalNumber(msg2.GetString(), &signed_result)); + EXPECT_EQ(kBiggestSignedParsableMax, signed_result); + + Message msg3; + msg3 << INT_MAX; + + int int_result = 0; + EXPECT_TRUE(ParseNaturalNumber(msg3.GetString(), &int_result)); + EXPECT_EQ(INT_MAX, int_result); + + Message msg4; + msg4 << UINT_MAX; + + unsigned int uint_result = 0; + EXPECT_TRUE(ParseNaturalNumber(msg4.GetString(), &uint_result)); + EXPECT_EQ(UINT_MAX, uint_result); +} + +TEST(ParseNaturalNumberTest, WorksForShorterIntegers) { + short short_result = 0; + ASSERT_TRUE(ParseNaturalNumber("123", &short_result)); + EXPECT_EQ(123, short_result); + + signed char char_result = 0; + ASSERT_TRUE(ParseNaturalNumber("123", &char_result)); + EXPECT_EQ(123, char_result); +} + +# if GTEST_OS_WINDOWS +TEST(EnvironmentTest, HandleFitsIntoSizeT) { + ASSERT_TRUE(sizeof(HANDLE) <= sizeof(size_t)); +} +# endif // GTEST_OS_WINDOWS + +// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED trigger +// failures when death tests are available on the system. +TEST(ConditionalDeathMacrosDeathTest, ExpectsDeathWhenDeathTestsAvailable) { + EXPECT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestExpectMacro"), + "death inside CondDeathTestExpectMacro"); + ASSERT_DEATH_IF_SUPPORTED(DieInside("CondDeathTestAssertMacro"), + "death inside CondDeathTestAssertMacro"); + + // Empty statement will not crash, which must trigger a failure. + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH_IF_SUPPORTED(;, ""), ""); + EXPECT_FATAL_FAILURE(ASSERT_DEATH_IF_SUPPORTED(;, ""), ""); +} + +TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInFastStyle) { + testing::GTEST_FLAG(death_test_style) = "fast"; + EXPECT_FALSE(InDeathTestChild()); + EXPECT_DEATH({ + fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside"); + fflush(stderr); + _exit(1); + }, "Inside"); +} + +TEST(InDeathTestChildDeathTest, ReportsDeathTestCorrectlyInThreadSafeStyle) { + testing::GTEST_FLAG(death_test_style) = "threadsafe"; + EXPECT_FALSE(InDeathTestChild()); + EXPECT_DEATH({ + fprintf(stderr, InDeathTestChild() ? "Inside" : "Outside"); + fflush(stderr); + _exit(1); + }, "Inside"); +} + +void DieWithMessage(const char* message) { + fputs(message, stderr); + fflush(stderr); // Make sure the text is printed before the process exits. + _exit(1); +} + +TEST(MatcherDeathTest, DoesNotBreakBareRegexMatching) { + // googletest tests this, of course; here we ensure that including googlemock + // has not broken it. +#if GTEST_USES_POSIX_RE + EXPECT_DEATH(DieWithMessage("O, I die, Horatio."), "I d[aeiou]e"); +#else + EXPECT_DEATH(DieWithMessage("O, I die, Horatio."), "I di?e"); +#endif +} + +TEST(MatcherDeathTest, MonomorphicMatcherMatches) { + EXPECT_DEATH(DieWithMessage("Behind O, I am slain!"), + Matcher<const std::string&>(ContainsRegex("I am slain"))); +} + +TEST(MatcherDeathTest, MonomorphicMatcherDoesNotMatch) { + EXPECT_NONFATAL_FAILURE( + EXPECT_DEATH( + DieWithMessage("Behind O, I am slain!"), + Matcher<const std::string&>(ContainsRegex("Ow, I am slain"))), + "Expected: contains regular expression \"Ow, I am slain\""); +} + +TEST(MatcherDeathTest, PolymorphicMatcherMatches) { + EXPECT_DEATH(DieWithMessage("The rest is silence."), + ContainsRegex("rest is silence")); +} + +TEST(MatcherDeathTest, PolymorphicMatcherDoesNotMatch) { + EXPECT_NONFATAL_FAILURE( + EXPECT_DEATH(DieWithMessage("The rest is silence."), + ContainsRegex("rest is science")), + "Expected: contains regular expression \"rest is science\""); +} + +} // namespace + +#else // !GTEST_HAS_DEATH_TEST follows + +namespace { + +using testing::internal::CaptureStderr; +using testing::internal::GetCapturedStderr; + +// Tests that EXPECT_DEATH_IF_SUPPORTED/ASSERT_DEATH_IF_SUPPORTED are still +// defined but do not trigger failures when death tests are not available on +// the system. +TEST(ConditionalDeathMacrosTest, WarnsWhenDeathTestsNotAvailable) { + // Empty statement will not crash, but that should not trigger a failure + // when death tests are not supported. + CaptureStderr(); + EXPECT_DEATH_IF_SUPPORTED(;, ""); + std::string output = GetCapturedStderr(); + ASSERT_TRUE(NULL != strstr(output.c_str(), + "Death tests are not supported on this platform")); + ASSERT_TRUE(NULL != strstr(output.c_str(), ";")); + + // The streamed message should not be printed as there is no test failure. + CaptureStderr(); + EXPECT_DEATH_IF_SUPPORTED(;, "") << "streamed message"; + output = GetCapturedStderr(); + ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message")); + + CaptureStderr(); + ASSERT_DEATH_IF_SUPPORTED(;, ""); // NOLINT + output = GetCapturedStderr(); + ASSERT_TRUE(NULL != strstr(output.c_str(), + "Death tests are not supported on this platform")); + ASSERT_TRUE(NULL != strstr(output.c_str(), ";")); + + CaptureStderr(); + ASSERT_DEATH_IF_SUPPORTED(;, "") << "streamed message"; // NOLINT + output = GetCapturedStderr(); + ASSERT_TRUE(NULL == strstr(output.c_str(), "streamed message")); +} + +void FuncWithAssert(int* n) { + ASSERT_DEATH_IF_SUPPORTED(return;, ""); + (*n)++; +} + +// Tests that ASSERT_DEATH_IF_SUPPORTED does not return from the current +// function (as ASSERT_DEATH does) if death tests are not supported. +TEST(ConditionalDeathMacrosTest, AssertDeatDoesNotReturnhIfUnsupported) { + int n = 0; + FuncWithAssert(&n); + EXPECT_EQ(1, n); +} + +} // namespace + +#endif // !GTEST_HAS_DEATH_TEST + +namespace { + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +// Tests that the death test macros expand to code which may or may not +// be followed by operator<<, and that in either case the complete text +// comprises only a single C++ statement. +// +// The syntax should work whether death tests are available or not. +TEST(ConditionalDeathMacrosSyntaxDeathTest, SingleStatement) { + if (AlwaysFalse()) + // This would fail if executed; this is a compilation test only + ASSERT_DEATH_IF_SUPPORTED(return, ""); + + if (AlwaysTrue()) + EXPECT_DEATH_IF_SUPPORTED(_exit(1), ""); + else + // This empty "else" branch is meant to ensure that EXPECT_DEATH + // doesn't expand into an "if" statement without an "else" + ; // NOLINT + + if (AlwaysFalse()) + ASSERT_DEATH_IF_SUPPORTED(return, "") << "did not die"; + + if (AlwaysFalse()) + ; // NOLINT + else + EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << 1 << 2 << 3; +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +// Tests that conditional death test macros expand to code which interacts +// well with switch statements. +TEST(ConditionalDeathMacrosSyntaxDeathTest, SwitchStatement) { + // Microsoft compiler usually complains about switch statements without + // case labels. We suppress that warning for this test. + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4065) + + switch (0) + default: + ASSERT_DEATH_IF_SUPPORTED(_exit(1), "") + << "exit in default switch handler"; + + switch (0) + case 0: + EXPECT_DEATH_IF_SUPPORTED(_exit(1), "") << "exit in switch case"; + + GTEST_DISABLE_MSC_WARNINGS_POP_() +} + +// Tests that a test case whose name ends with "DeathTest" works fine +// on Windows. +TEST(NotADeathTest, Test) { + SUCCEED(); +} + +} // namespace diff --git a/security/nss/gtests/google_test/gtest/test/googletest-death-test_ex_test.cc b/security/nss/gtests/google_test/gtest/test/googletest-death-test_ex_test.cc new file mode 100644 index 0000000000..7219680d07 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-death-test_ex_test.cc @@ -0,0 +1,92 @@ +// Copyright 2010, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests that verify interaction of exceptions and death tests. + +#include "gtest/gtest-death-test.h" +#include "gtest/gtest.h" + +#if GTEST_HAS_DEATH_TEST + +# if GTEST_HAS_SEH +# include <windows.h> // For RaiseException(). +# endif + +# include "gtest/gtest-spi.h" + +# if GTEST_HAS_EXCEPTIONS + +# include <exception> // For std::exception. + +// Tests that death tests report thrown exceptions as failures and that the +// exceptions do not escape death test macros. +TEST(CxxExceptionDeathTest, ExceptionIsFailure) { + try { + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw 1, ""), "threw an exception"); + } catch (...) { // NOLINT + FAIL() << "An exception escaped a death test macro invocation " + << "with catch_exceptions " + << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled"); + } +} + +class TestException : public std::exception { + public: + const char* what() const noexcept override { return "exceptional message"; } +}; + +TEST(CxxExceptionDeathTest, PrintsMessageForStdExceptions) { + // Verifies that the exception message is quoted in the failure text. + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""), + "exceptional message"); + // Verifies that the location is mentioned in the failure text. + EXPECT_NONFATAL_FAILURE(EXPECT_DEATH(throw TestException(), ""), + __FILE__); +} +# endif // GTEST_HAS_EXCEPTIONS + +# if GTEST_HAS_SEH +// Tests that enabling interception of SEH exceptions with the +// catch_exceptions flag does not interfere with SEH exceptions being +// treated as death by death tests. +TEST(SehExceptionDeasTest, CatchExceptionsDoesNotInterfere) { + EXPECT_DEATH(RaiseException(42, 0x0, 0, NULL), "") + << "with catch_exceptions " + << (testing::GTEST_FLAG(catch_exceptions) ? "enabled" : "disabled"); +} +# endif + +#endif // GTEST_HAS_DEATH_TEST + +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + testing::GTEST_FLAG(catch_exceptions) = GTEST_ENABLE_CATCH_EXCEPTIONS_ != 0; + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-env-var-test.py b/security/nss/gtests/google_test/gtest/test/googletest-env-var-test.py new file mode 100755 index 0000000000..02c3655c39 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-env-var-test.py @@ -0,0 +1,120 @@ +#!/usr/bin/env python +# +# Copyright 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that Google Test correctly parses environment variables.""" + +import os +import gtest_test_utils + + +IS_WINDOWS = os.name == 'nt' +IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux' + +COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-env-var-test_') + +environ = os.environ.copy() + + +def AssertEq(expected, actual): + if expected != actual: + print('Expected: %s' % (expected,)) + print(' Actual: %s' % (actual,)) + raise AssertionError + + +def SetEnvVar(env_var, value): + """Sets the env variable to 'value'; unsets it when 'value' is None.""" + + if value is not None: + environ[env_var] = value + elif env_var in environ: + del environ[env_var] + + +def GetFlag(flag): + """Runs googletest-env-var-test_ and returns its output.""" + + args = [COMMAND] + if flag is not None: + args += [flag] + return gtest_test_utils.Subprocess(args, env=environ).output + + +def TestFlag(flag, test_val, default_val): + """Verifies that the given flag is affected by the corresponding env var.""" + + env_var = 'GTEST_' + flag.upper() + SetEnvVar(env_var, test_val) + AssertEq(test_val, GetFlag(flag)) + SetEnvVar(env_var, None) + AssertEq(default_val, GetFlag(flag)) + + +class GTestEnvVarTest(gtest_test_utils.TestCase): + + def testEnvVarAffectsFlag(self): + """Tests that environment variable should affect the corresponding flag.""" + + TestFlag('break_on_failure', '1', '0') + TestFlag('color', 'yes', 'auto') + SetEnvVar('TESTBRIDGE_TEST_RUNNER_FAIL_FAST', None) # For 'fail_fast' test + TestFlag('fail_fast', '1', '0') + TestFlag('filter', 'FooTest.Bar', '*') + SetEnvVar('XML_OUTPUT_FILE', None) # For 'output' test + TestFlag('output', 'xml:tmp/foo.xml', '') + TestFlag('brief', '1', '0') + TestFlag('print_time', '0', '1') + TestFlag('repeat', '999', '1') + TestFlag('throw_on_failure', '1', '0') + TestFlag('death_test_style', 'threadsafe', 'fast') + TestFlag('catch_exceptions', '0', '1') + + if IS_LINUX: + TestFlag('death_test_use_fork', '1', '0') + TestFlag('stack_trace_depth', '0', '100') + + + def testXmlOutputFile(self): + """Tests that $XML_OUTPUT_FILE affects the output flag.""" + + SetEnvVar('GTEST_OUTPUT', None) + SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml') + AssertEq('xml:tmp/bar.xml', GetFlag('output')) + + def testXmlOutputFileOverride(self): + """Tests that $XML_OUTPUT_FILE is overridden by $GTEST_OUTPUT.""" + + SetEnvVar('GTEST_OUTPUT', 'xml:tmp/foo.xml') + SetEnvVar('XML_OUTPUT_FILE', 'tmp/bar.xml') + AssertEq('xml:tmp/foo.xml', GetFlag('output')) + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-env-var-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-env-var-test_.cc new file mode 100644 index 0000000000..52f95864e1 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-env-var-test_.cc @@ -0,0 +1,132 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// A helper program for testing that Google Test parses the environment +// variables correctly. + +#include <iostream> + +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +using ::std::cout; + +namespace testing { + +// The purpose of this is to make the test more realistic by ensuring +// that the UnitTest singleton is created before main() is entered. +// We don't actual run the TEST itself. +TEST(GTestEnvVarTest, Dummy) { +} + +void PrintFlag(const char* flag) { + if (strcmp(flag, "break_on_failure") == 0) { + cout << GTEST_FLAG(break_on_failure); + return; + } + + if (strcmp(flag, "catch_exceptions") == 0) { + cout << GTEST_FLAG(catch_exceptions); + return; + } + + if (strcmp(flag, "color") == 0) { + cout << GTEST_FLAG(color); + return; + } + + if (strcmp(flag, "death_test_style") == 0) { + cout << GTEST_FLAG(death_test_style); + return; + } + + if (strcmp(flag, "death_test_use_fork") == 0) { + cout << GTEST_FLAG(death_test_use_fork); + return; + } + + if (strcmp(flag, "fail_fast") == 0) { + cout << GTEST_FLAG(fail_fast); + return; + } + + if (strcmp(flag, "filter") == 0) { + cout << GTEST_FLAG(filter); + return; + } + + if (strcmp(flag, "output") == 0) { + cout << GTEST_FLAG(output); + return; + } + + if (strcmp(flag, "brief") == 0) { + cout << GTEST_FLAG(brief); + return; + } + + if (strcmp(flag, "print_time") == 0) { + cout << GTEST_FLAG(print_time); + return; + } + + if (strcmp(flag, "repeat") == 0) { + cout << GTEST_FLAG(repeat); + return; + } + + if (strcmp(flag, "stack_trace_depth") == 0) { + cout << GTEST_FLAG(stack_trace_depth); + return; + } + + if (strcmp(flag, "throw_on_failure") == 0) { + cout << GTEST_FLAG(throw_on_failure); + return; + } + + cout << "Invalid flag name " << flag + << ". Valid names are break_on_failure, color, filter, etc.\n"; + exit(1); +} + +} // namespace testing + +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + + if (argc != 2) { + cout << "Usage: googletest-env-var-test_ NAME_OF_FLAG\n"; + return 1; + } + + testing::PrintFlag(argv[1]); + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest.py new file mode 100755 index 0000000000..3aeb2dffea --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest.py @@ -0,0 +1,410 @@ +#!/usr/bin/env python +# +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for Google Test fail_fast. + +A user can specify if a Google Test program should continue test execution +after a test failure via the GTEST_FAIL_FAST environment variable or the +--gtest_fail_fast flag. The default value of the flag can also be changed +by Bazel fail fast environment variable TESTBRIDGE_TEST_RUNNER_FAIL_FAST. + +This script tests such functionality by invoking googletest-failfast-unittest_ +(a program written with Google Test) with different environments and command +line flags. +""" + +import os +import gtest_test_utils + +# Constants. + +# Bazel testbridge environment variable for fail fast +BAZEL_FAIL_FAST_ENV_VAR = 'TESTBRIDGE_TEST_RUNNER_FAIL_FAST' + +# The environment variable for specifying fail fast. +FAIL_FAST_ENV_VAR = 'GTEST_FAIL_FAST' + +# The command line flag for specifying fail fast. +FAIL_FAST_FLAG = 'gtest_fail_fast' + +# The command line flag to run disabled tests. +RUN_DISABLED_FLAG = 'gtest_also_run_disabled_tests' + +# The command line flag for specifying a filter. +FILTER_FLAG = 'gtest_filter' + +# Command to run the googletest-failfast-unittest_ program. +COMMAND = gtest_test_utils.GetTestExecutablePath( + 'googletest-failfast-unittest_') + +# The command line flag to tell Google Test to output the list of tests it +# will run. +LIST_TESTS_FLAG = '--gtest_list_tests' + +# Indicates whether Google Test supports death tests. +SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess( + [COMMAND, LIST_TESTS_FLAG]).output + +# Utilities. + +environ = os.environ.copy() + + +def SetEnvVar(env_var, value): + """Sets the env variable to 'value'; unsets it when 'value' is None.""" + + if value is not None: + environ[env_var] = value + elif env_var in environ: + del environ[env_var] + + +def RunAndReturnOutput(test_suite=None, fail_fast=None, run_disabled=False): + """Runs the test program and returns its output.""" + + args = [] + xml_path = os.path.join(gtest_test_utils.GetTempDir(), + '.GTestFailFastUnitTest.xml') + args += ['--gtest_output=xml:' + xml_path] + if fail_fast is not None: + if isinstance(fail_fast, str): + args += ['--%s=%s' % (FAIL_FAST_FLAG, fail_fast)] + elif fail_fast: + args += ['--%s' % FAIL_FAST_FLAG] + else: + args += ['--no%s' % FAIL_FAST_FLAG] + if test_suite: + args += ['--%s=%s.*' % (FILTER_FLAG, test_suite)] + if run_disabled: + args += ['--%s' % RUN_DISABLED_FLAG] + txt_out = gtest_test_utils.Subprocess([COMMAND] + args, env=environ).output + with open(xml_path) as xml_file: + return txt_out, xml_file.read() + + +# The unit test. +class GTestFailFastUnitTest(gtest_test_utils.TestCase): + """Tests the env variable or the command line flag for fail_fast.""" + + def testDefaultBehavior(self): + """Tests the behavior of not specifying the fail_fast.""" + + txt, _ = RunAndReturnOutput() + self.assertIn('22 FAILED TEST', txt) + + def testGoogletestFlag(self): + txt, _ = RunAndReturnOutput(test_suite='HasSimpleTest', fail_fast=True) + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + + txt, _ = RunAndReturnOutput(test_suite='HasSimpleTest', fail_fast=False) + self.assertIn('4 FAILED TEST', txt) + self.assertNotIn('[ SKIPPED ]', txt) + + def testGoogletestEnvVar(self): + """Tests the behavior of specifying fail_fast via Googletest env var.""" + + try: + SetEnvVar(FAIL_FAST_ENV_VAR, '1') + txt, _ = RunAndReturnOutput('HasSimpleTest') + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + + SetEnvVar(FAIL_FAST_ENV_VAR, '0') + txt, _ = RunAndReturnOutput('HasSimpleTest') + self.assertIn('4 FAILED TEST', txt) + self.assertNotIn('[ SKIPPED ]', txt) + finally: + SetEnvVar(FAIL_FAST_ENV_VAR, None) + + def testBazelEnvVar(self): + """Tests the behavior of specifying fail_fast via Bazel testbridge.""" + + try: + SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '1') + txt, _ = RunAndReturnOutput('HasSimpleTest') + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + + SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '0') + txt, _ = RunAndReturnOutput('HasSimpleTest') + self.assertIn('4 FAILED TEST', txt) + self.assertNotIn('[ SKIPPED ]', txt) + finally: + SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, None) + + def testFlagOverridesEnvVar(self): + """Tests precedence of flag over env var.""" + + try: + SetEnvVar(FAIL_FAST_ENV_VAR, '0') + txt, _ = RunAndReturnOutput('HasSimpleTest', True) + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + finally: + SetEnvVar(FAIL_FAST_ENV_VAR, None) + + def testGoogletestEnvVarOverridesBazelEnvVar(self): + """Tests that the Googletest native env var over Bazel testbridge.""" + + try: + SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, '0') + SetEnvVar(FAIL_FAST_ENV_VAR, '1') + txt, _ = RunAndReturnOutput('HasSimpleTest') + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + finally: + SetEnvVar(FAIL_FAST_ENV_VAR, None) + SetEnvVar(BAZEL_FAIL_FAST_ENV_VAR, None) + + def testEventListener(self): + txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=True) + self.assertIn('1 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 3 tests', txt) + for expected_count, callback in [(1, 'OnTestSuiteStart'), + (5, 'OnTestStart'), + (5, 'OnTestEnd'), + (5, 'OnTestPartResult'), + (1, 'OnTestSuiteEnd')]: + self.assertEqual( + expected_count, txt.count(callback), + 'Expected %d calls to callback %s match count on output: %s ' % + (expected_count, callback, txt)) + + txt, _ = RunAndReturnOutput(test_suite='HasSkipTest', fail_fast=False) + self.assertIn('3 FAILED TEST', txt) + self.assertIn('[ SKIPPED ] 1 test', txt) + for expected_count, callback in [(1, 'OnTestSuiteStart'), + (5, 'OnTestStart'), + (5, 'OnTestEnd'), + (5, 'OnTestPartResult'), + (1, 'OnTestSuiteEnd')]: + self.assertEqual( + expected_count, txt.count(callback), + 'Expected %d calls to callback %s match count on output: %s ' % + (expected_count, callback, txt)) + + def assertXmlResultCount(self, result, count, xml): + self.assertEqual( + count, xml.count('result="%s"' % result), + 'Expected \'result="%s"\' match count of %s: %s ' % + (result, count, xml)) + + def assertXmlStatusCount(self, status, count, xml): + self.assertEqual( + count, xml.count('status="%s"' % status), + 'Expected \'status="%s"\' match count of %s: %s ' % + (status, count, xml)) + + def assertFailFastXmlAndTxtOutput(self, + fail_fast, + test_suite, + passed_count, + failure_count, + skipped_count, + suppressed_count, + run_disabled=False): + """Assert XML and text output of a test execution.""" + + txt, xml = RunAndReturnOutput(test_suite, fail_fast, run_disabled) + if failure_count > 0: + self.assertIn('%s FAILED TEST' % failure_count, txt) + if suppressed_count > 0: + self.assertIn('%s DISABLED TEST' % suppressed_count, txt) + if skipped_count > 0: + self.assertIn('[ SKIPPED ] %s tests' % skipped_count, txt) + self.assertXmlStatusCount('run', + passed_count + failure_count + skipped_count, xml) + self.assertXmlStatusCount('notrun', suppressed_count, xml) + self.assertXmlResultCount('completed', passed_count + failure_count, xml) + self.assertXmlResultCount('skipped', skipped_count, xml) + self.assertXmlResultCount('suppressed', suppressed_count, xml) + + def assertFailFastBehavior(self, + test_suite, + passed_count, + failure_count, + skipped_count, + suppressed_count, + run_disabled=False): + """Assert --fail_fast via flag.""" + + for fail_fast in ('true', '1', 't', True): + self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count, + failure_count, skipped_count, + suppressed_count, run_disabled) + + def assertNotFailFastBehavior(self, + test_suite, + passed_count, + failure_count, + skipped_count, + suppressed_count, + run_disabled=False): + """Assert --nofail_fast via flag.""" + + for fail_fast in ('false', '0', 'f', False): + self.assertFailFastXmlAndTxtOutput(fail_fast, test_suite, passed_count, + failure_count, skipped_count, + suppressed_count, run_disabled) + + def testFlag_HasFixtureTest(self): + """Tests the behavior of fail_fast and TEST_F.""" + self.assertFailFastBehavior( + test_suite='HasFixtureTest', + passed_count=1, + failure_count=1, + skipped_count=3, + suppressed_count=0) + self.assertNotFailFastBehavior( + test_suite='HasFixtureTest', + passed_count=1, + failure_count=4, + skipped_count=0, + suppressed_count=0) + + def testFlag_HasSimpleTest(self): + """Tests the behavior of fail_fast and TEST.""" + self.assertFailFastBehavior( + test_suite='HasSimpleTest', + passed_count=1, + failure_count=1, + skipped_count=3, + suppressed_count=0) + self.assertNotFailFastBehavior( + test_suite='HasSimpleTest', + passed_count=1, + failure_count=4, + skipped_count=0, + suppressed_count=0) + + def testFlag_HasParametersTest(self): + """Tests the behavior of fail_fast and TEST_P.""" + self.assertFailFastBehavior( + test_suite='HasParametersSuite/HasParametersTest', + passed_count=0, + failure_count=1, + skipped_count=3, + suppressed_count=0) + self.assertNotFailFastBehavior( + test_suite='HasParametersSuite/HasParametersTest', + passed_count=0, + failure_count=4, + skipped_count=0, + suppressed_count=0) + + def testFlag_HasDisabledTest(self): + """Tests the behavior of fail_fast and Disabled test cases.""" + self.assertFailFastBehavior( + test_suite='HasDisabledTest', + passed_count=1, + failure_count=1, + skipped_count=2, + suppressed_count=1, + run_disabled=False) + self.assertNotFailFastBehavior( + test_suite='HasDisabledTest', + passed_count=1, + failure_count=3, + skipped_count=0, + suppressed_count=1, + run_disabled=False) + + def testFlag_HasDisabledRunDisabledTest(self): + """Tests the behavior of fail_fast and Disabled test cases enabled.""" + self.assertFailFastBehavior( + test_suite='HasDisabledTest', + passed_count=1, + failure_count=1, + skipped_count=3, + suppressed_count=0, + run_disabled=True) + self.assertNotFailFastBehavior( + test_suite='HasDisabledTest', + passed_count=1, + failure_count=4, + skipped_count=0, + suppressed_count=0, + run_disabled=True) + + def testFlag_HasDisabledSuiteTest(self): + """Tests the behavior of fail_fast and Disabled test suites.""" + self.assertFailFastBehavior( + test_suite='DISABLED_HasDisabledSuite', + passed_count=0, + failure_count=0, + skipped_count=0, + suppressed_count=5, + run_disabled=False) + self.assertNotFailFastBehavior( + test_suite='DISABLED_HasDisabledSuite', + passed_count=0, + failure_count=0, + skipped_count=0, + suppressed_count=5, + run_disabled=False) + + def testFlag_HasDisabledSuiteRunDisabledTest(self): + """Tests the behavior of fail_fast and Disabled test suites enabled.""" + self.assertFailFastBehavior( + test_suite='DISABLED_HasDisabledSuite', + passed_count=1, + failure_count=1, + skipped_count=3, + suppressed_count=0, + run_disabled=True) + self.assertNotFailFastBehavior( + test_suite='DISABLED_HasDisabledSuite', + passed_count=1, + failure_count=4, + skipped_count=0, + suppressed_count=0, + run_disabled=True) + + if SUPPORTS_DEATH_TESTS: + + def testFlag_HasDeathTest(self): + """Tests the behavior of fail_fast and death tests.""" + self.assertFailFastBehavior( + test_suite='HasDeathTest', + passed_count=1, + failure_count=1, + skipped_count=3, + suppressed_count=0) + self.assertNotFailFastBehavior( + test_suite='HasDeathTest', + passed_count=1, + failure_count=4, + skipped_count=0, + suppressed_count=0) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest_.cc b/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest_.cc new file mode 100644 index 0000000000..0b2c951bc0 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-failfast-unittest_.cc @@ -0,0 +1,167 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Unit test for Google Test test filters. +// +// A user can specify which test(s) in a Google Test program to run via +// either the GTEST_FILTER environment variable or the --gtest_filter +// flag. This is used for testing such functionality. +// +// The program will be invoked from a Python unit test. Don't run it +// directly. + +#include "gtest/gtest.h" + +namespace { + +// Test HasFixtureTest. + +class HasFixtureTest : public testing::Test {}; + +TEST_F(HasFixtureTest, Test0) {} + +TEST_F(HasFixtureTest, Test1) { FAIL() << "Expected failure."; } + +TEST_F(HasFixtureTest, Test2) { FAIL() << "Expected failure."; } + +TEST_F(HasFixtureTest, Test3) { FAIL() << "Expected failure."; } + +TEST_F(HasFixtureTest, Test4) { FAIL() << "Expected failure."; } + +// Test HasSimpleTest. + +TEST(HasSimpleTest, Test0) {} + +TEST(HasSimpleTest, Test1) { FAIL() << "Expected failure."; } + +TEST(HasSimpleTest, Test2) { FAIL() << "Expected failure."; } + +TEST(HasSimpleTest, Test3) { FAIL() << "Expected failure."; } + +TEST(HasSimpleTest, Test4) { FAIL() << "Expected failure."; } + +// Test HasDisabledTest. + +TEST(HasDisabledTest, Test0) {} + +TEST(HasDisabledTest, DISABLED_Test1) { FAIL() << "Expected failure."; } + +TEST(HasDisabledTest, Test2) { FAIL() << "Expected failure."; } + +TEST(HasDisabledTest, Test3) { FAIL() << "Expected failure."; } + +TEST(HasDisabledTest, Test4) { FAIL() << "Expected failure."; } + +// Test HasDeathTest + +TEST(HasDeathTest, Test0) { EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); } + +TEST(HasDeathTest, Test1) { + EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*"); +} + +TEST(HasDeathTest, Test2) { + EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*"); +} + +TEST(HasDeathTest, Test3) { + EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*"); +} + +TEST(HasDeathTest, Test4) { + EXPECT_DEATH_IF_SUPPORTED(FAIL() << "Expected failure.", ".*"); +} + +// Test DISABLED_HasDisabledSuite + +TEST(DISABLED_HasDisabledSuite, Test0) {} + +TEST(DISABLED_HasDisabledSuite, Test1) { FAIL() << "Expected failure."; } + +TEST(DISABLED_HasDisabledSuite, Test2) { FAIL() << "Expected failure."; } + +TEST(DISABLED_HasDisabledSuite, Test3) { FAIL() << "Expected failure."; } + +TEST(DISABLED_HasDisabledSuite, Test4) { FAIL() << "Expected failure."; } + +// Test HasParametersTest + +class HasParametersTest : public testing::TestWithParam<int> {}; + +TEST_P(HasParametersTest, Test1) { FAIL() << "Expected failure."; } + +TEST_P(HasParametersTest, Test2) { FAIL() << "Expected failure."; } + +INSTANTIATE_TEST_SUITE_P(HasParametersSuite, HasParametersTest, + testing::Values(1, 2)); + +class MyTestListener : public ::testing::EmptyTestEventListener { + void OnTestSuiteStart(const ::testing::TestSuite& test_suite) override { + printf("We are in OnTestSuiteStart of %s.\n", test_suite.name()); + } + + void OnTestStart(const ::testing::TestInfo& test_info) override { + printf("We are in OnTestStart of %s.%s.\n", test_info.test_suite_name(), + test_info.name()); + } + + void OnTestPartResult( + const ::testing::TestPartResult& test_part_result) override { + printf("We are in OnTestPartResult %s:%d.\n", test_part_result.file_name(), + test_part_result.line_number()); + } + + void OnTestEnd(const ::testing::TestInfo& test_info) override { + printf("We are in OnTestEnd of %s.%s.\n", test_info.test_suite_name(), + test_info.name()); + } + + void OnTestSuiteEnd(const ::testing::TestSuite& test_suite) override { + printf("We are in OnTestSuiteEnd of %s.\n", test_suite.name()); + } +}; + +TEST(HasSkipTest, Test0) { SUCCEED() << "Expected success."; } + +TEST(HasSkipTest, Test1) { GTEST_SKIP() << "Expected skip."; } + +TEST(HasSkipTest, Test2) { FAIL() << "Expected failure."; } + +TEST(HasSkipTest, Test3) { FAIL() << "Expected failure."; } + +TEST(HasSkipTest, Test4) { FAIL() << "Expected failure."; } + +} // namespace + +int main(int argc, char **argv) { + ::testing::InitGoogleTest(&argc, argv); + ::testing::UnitTest::GetInstance()->listeners().Append(new MyTestListener()); + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-filepath-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-filepath-test.cc new file mode 100644 index 0000000000..aafad36f3f --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-filepath-test.cc @@ -0,0 +1,649 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Google Test filepath utilities +// +// This file tests classes and functions used internally by +// Google Test. They are subject to change without notice. +// +// This file is #included from gtest-internal.h. +// Do not #include this file anywhere else! + +#include "gtest/internal/gtest-filepath.h" +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +#if GTEST_OS_WINDOWS_MOBILE +# include <windows.h> // NOLINT +#elif GTEST_OS_WINDOWS +# include <direct.h> // NOLINT +#endif // GTEST_OS_WINDOWS_MOBILE + +namespace testing { +namespace internal { +namespace { + +#if GTEST_OS_WINDOWS_MOBILE + +// Windows CE doesn't have the remove C function. +int remove(const char* path) { + LPCWSTR wpath = String::AnsiToUtf16(path); + int ret = DeleteFile(wpath) ? 0 : -1; + delete [] wpath; + return ret; +} +// Windows CE doesn't have the _rmdir C function. +int _rmdir(const char* path) { + FilePath filepath(path); + LPCWSTR wpath = String::AnsiToUtf16( + filepath.RemoveTrailingPathSeparator().c_str()); + int ret = RemoveDirectory(wpath) ? 0 : -1; + delete [] wpath; + return ret; +} + +#else + +TEST(GetCurrentDirTest, ReturnsCurrentDir) { + const FilePath original_dir = FilePath::GetCurrentDir(); + EXPECT_FALSE(original_dir.IsEmpty()); + + posix::ChDir(GTEST_PATH_SEP_); + const FilePath cwd = FilePath::GetCurrentDir(); + posix::ChDir(original_dir.c_str()); + +# if GTEST_OS_WINDOWS || GTEST_OS_OS2 + + // Skips the ":". + const char* const cwd_without_drive = strchr(cwd.c_str(), ':'); + ASSERT_TRUE(cwd_without_drive != NULL); + EXPECT_STREQ(GTEST_PATH_SEP_, cwd_without_drive + 1); + +# else + + EXPECT_EQ(GTEST_PATH_SEP_, cwd.string()); + +# endif +} + +#endif // GTEST_OS_WINDOWS_MOBILE + +TEST(IsEmptyTest, ReturnsTrueForEmptyPath) { + EXPECT_TRUE(FilePath("").IsEmpty()); +} + +TEST(IsEmptyTest, ReturnsFalseForNonEmptyPath) { + EXPECT_FALSE(FilePath("a").IsEmpty()); + EXPECT_FALSE(FilePath(".").IsEmpty()); + EXPECT_FALSE(FilePath("a/b").IsEmpty()); + EXPECT_FALSE(FilePath("a\\b\\").IsEmpty()); +} + +// RemoveDirectoryName "" -> "" +TEST(RemoveDirectoryNameTest, WhenEmptyName) { + EXPECT_EQ("", FilePath("").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName "afile" -> "afile" +TEST(RemoveDirectoryNameTest, ButNoDirectory) { + EXPECT_EQ("afile", + FilePath("afile").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName "/afile" -> "afile" +TEST(RemoveDirectoryNameTest, RootFileShouldGiveFileName) { + EXPECT_EQ("afile", + FilePath(GTEST_PATH_SEP_ "afile").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName "adir/" -> "" +TEST(RemoveDirectoryNameTest, WhereThereIsNoFileName) { + EXPECT_EQ("", + FilePath("adir" GTEST_PATH_SEP_).RemoveDirectoryName().string()); +} + +// RemoveDirectoryName "adir/afile" -> "afile" +TEST(RemoveDirectoryNameTest, ShouldGiveFileName) { + EXPECT_EQ("afile", + FilePath("adir" GTEST_PATH_SEP_ "afile").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName "adir/subdir/afile" -> "afile" +TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileName) { + EXPECT_EQ("afile", + FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile") + .RemoveDirectoryName().string()); +} + +#if GTEST_HAS_ALT_PATH_SEP_ + +// Tests that RemoveDirectoryName() works with the alternate separator +// on Windows. + +// RemoveDirectoryName("/afile") -> "afile" +TEST(RemoveDirectoryNameTest, RootFileShouldGiveFileNameForAlternateSeparator) { + EXPECT_EQ("afile", FilePath("/afile").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName("adir/") -> "" +TEST(RemoveDirectoryNameTest, WhereThereIsNoFileNameForAlternateSeparator) { + EXPECT_EQ("", FilePath("adir/").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName("adir/afile") -> "afile" +TEST(RemoveDirectoryNameTest, ShouldGiveFileNameForAlternateSeparator) { + EXPECT_EQ("afile", FilePath("adir/afile").RemoveDirectoryName().string()); +} + +// RemoveDirectoryName("adir/subdir/afile") -> "afile" +TEST(RemoveDirectoryNameTest, ShouldAlsoGiveFileNameForAlternateSeparator) { + EXPECT_EQ("afile", + FilePath("adir/subdir/afile").RemoveDirectoryName().string()); +} + +#endif + +// RemoveFileName "" -> "./" +TEST(RemoveFileNameTest, EmptyName) { +#if GTEST_OS_WINDOWS_MOBILE + // On Windows CE, we use the root as the current directory. + EXPECT_EQ(GTEST_PATH_SEP_, FilePath("").RemoveFileName().string()); +#else + EXPECT_EQ("." GTEST_PATH_SEP_, FilePath("").RemoveFileName().string()); +#endif +} + +// RemoveFileName "adir/" -> "adir/" +TEST(RemoveFileNameTest, ButNoFile) { + EXPECT_EQ("adir" GTEST_PATH_SEP_, + FilePath("adir" GTEST_PATH_SEP_).RemoveFileName().string()); +} + +// RemoveFileName "adir/afile" -> "adir/" +TEST(RemoveFileNameTest, GivesDirName) { + EXPECT_EQ("adir" GTEST_PATH_SEP_, + FilePath("adir" GTEST_PATH_SEP_ "afile").RemoveFileName().string()); +} + +// RemoveFileName "adir/subdir/afile" -> "adir/subdir/" +TEST(RemoveFileNameTest, GivesDirAndSubDirName) { + EXPECT_EQ("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_, + FilePath("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_ "afile") + .RemoveFileName().string()); +} + +// RemoveFileName "/afile" -> "/" +TEST(RemoveFileNameTest, GivesRootDir) { + EXPECT_EQ(GTEST_PATH_SEP_, + FilePath(GTEST_PATH_SEP_ "afile").RemoveFileName().string()); +} + +#if GTEST_HAS_ALT_PATH_SEP_ + +// Tests that RemoveFileName() works with the alternate separator on +// Windows. + +// RemoveFileName("adir/") -> "adir/" +TEST(RemoveFileNameTest, ButNoFileForAlternateSeparator) { + EXPECT_EQ("adir" GTEST_PATH_SEP_, + FilePath("adir/").RemoveFileName().string()); +} + +// RemoveFileName("adir/afile") -> "adir/" +TEST(RemoveFileNameTest, GivesDirNameForAlternateSeparator) { + EXPECT_EQ("adir" GTEST_PATH_SEP_, + FilePath("adir/afile").RemoveFileName().string()); +} + +// RemoveFileName("adir/subdir/afile") -> "adir/subdir/" +TEST(RemoveFileNameTest, GivesDirAndSubDirNameForAlternateSeparator) { + EXPECT_EQ("adir" GTEST_PATH_SEP_ "subdir" GTEST_PATH_SEP_, + FilePath("adir/subdir/afile").RemoveFileName().string()); +} + +// RemoveFileName("/afile") -> "\" +TEST(RemoveFileNameTest, GivesRootDirForAlternateSeparator) { + EXPECT_EQ(GTEST_PATH_SEP_, FilePath("/afile").RemoveFileName().string()); +} + +#endif + +TEST(MakeFileNameTest, GenerateWhenNumberIsZero) { + FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"), + 0, "xml"); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string()); +} + +TEST(MakeFileNameTest, GenerateFileNameNumberGtZero) { + FilePath actual = FilePath::MakeFileName(FilePath("foo"), FilePath("bar"), + 12, "xml"); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.string()); +} + +TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberIsZero) { + FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_), + FilePath("bar"), 0, "xml"); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string()); +} + +TEST(MakeFileNameTest, GenerateFileNameWithSlashNumberGtZero) { + FilePath actual = FilePath::MakeFileName(FilePath("foo" GTEST_PATH_SEP_), + FilePath("bar"), 12, "xml"); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar_12.xml", actual.string()); +} + +TEST(MakeFileNameTest, GenerateWhenNumberIsZeroAndDirIsEmpty) { + FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"), + 0, "xml"); + EXPECT_EQ("bar.xml", actual.string()); +} + +TEST(MakeFileNameTest, GenerateWhenNumberIsNotZeroAndDirIsEmpty) { + FilePath actual = FilePath::MakeFileName(FilePath(""), FilePath("bar"), + 14, "xml"); + EXPECT_EQ("bar_14.xml", actual.string()); +} + +TEST(ConcatPathsTest, WorksWhenDirDoesNotEndWithPathSep) { + FilePath actual = FilePath::ConcatPaths(FilePath("foo"), + FilePath("bar.xml")); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string()); +} + +TEST(ConcatPathsTest, WorksWhenPath1EndsWithPathSep) { + FilePath actual = FilePath::ConcatPaths(FilePath("foo" GTEST_PATH_SEP_), + FilePath("bar.xml")); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar.xml", actual.string()); +} + +TEST(ConcatPathsTest, Path1BeingEmpty) { + FilePath actual = FilePath::ConcatPaths(FilePath(""), + FilePath("bar.xml")); + EXPECT_EQ("bar.xml", actual.string()); +} + +TEST(ConcatPathsTest, Path2BeingEmpty) { + FilePath actual = FilePath::ConcatPaths(FilePath("foo"), FilePath("")); + EXPECT_EQ("foo" GTEST_PATH_SEP_, actual.string()); +} + +TEST(ConcatPathsTest, BothPathBeingEmpty) { + FilePath actual = FilePath::ConcatPaths(FilePath(""), + FilePath("")); + EXPECT_EQ("", actual.string()); +} + +TEST(ConcatPathsTest, Path1ContainsPathSep) { + FilePath actual = FilePath::ConcatPaths(FilePath("foo" GTEST_PATH_SEP_ "bar"), + FilePath("foobar.xml")); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_ "foobar.xml", + actual.string()); +} + +TEST(ConcatPathsTest, Path2ContainsPathSep) { + FilePath actual = FilePath::ConcatPaths( + FilePath("foo" GTEST_PATH_SEP_), + FilePath("bar" GTEST_PATH_SEP_ "bar.xml")); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_ "bar.xml", + actual.string()); +} + +TEST(ConcatPathsTest, Path2EndsWithPathSep) { + FilePath actual = FilePath::ConcatPaths(FilePath("foo"), + FilePath("bar" GTEST_PATH_SEP_)); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_, actual.string()); +} + +// RemoveTrailingPathSeparator "" -> "" +TEST(RemoveTrailingPathSeparatorTest, EmptyString) { + EXPECT_EQ("", FilePath("").RemoveTrailingPathSeparator().string()); +} + +// RemoveTrailingPathSeparator "foo" -> "foo" +TEST(RemoveTrailingPathSeparatorTest, FileNoSlashString) { + EXPECT_EQ("foo", FilePath("foo").RemoveTrailingPathSeparator().string()); +} + +// RemoveTrailingPathSeparator "foo/" -> "foo" +TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveTrailingSeparator) { + EXPECT_EQ("foo", + FilePath("foo" GTEST_PATH_SEP_).RemoveTrailingPathSeparator().string()); +#if GTEST_HAS_ALT_PATH_SEP_ + EXPECT_EQ("foo", FilePath("foo/").RemoveTrailingPathSeparator().string()); +#endif +} + +// RemoveTrailingPathSeparator "foo/bar/" -> "foo/bar/" +TEST(RemoveTrailingPathSeparatorTest, ShouldRemoveLastSeparator) { + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", + FilePath("foo" GTEST_PATH_SEP_ "bar" GTEST_PATH_SEP_) + .RemoveTrailingPathSeparator().string()); +} + +// RemoveTrailingPathSeparator "foo/bar" -> "foo/bar" +TEST(RemoveTrailingPathSeparatorTest, ShouldReturnUnmodified) { + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", + FilePath("foo" GTEST_PATH_SEP_ "bar") + .RemoveTrailingPathSeparator().string()); +} + +TEST(DirectoryTest, RootDirectoryExists) { +#if GTEST_OS_WINDOWS // We are on Windows. + char current_drive[_MAX_PATH]; // NOLINT + current_drive[0] = static_cast<char>(_getdrive() + 'A' - 1); + current_drive[1] = ':'; + current_drive[2] = '\\'; + current_drive[3] = '\0'; + EXPECT_TRUE(FilePath(current_drive).DirectoryExists()); +#else + EXPECT_TRUE(FilePath("/").DirectoryExists()); +#endif // GTEST_OS_WINDOWS +} + +#if GTEST_OS_WINDOWS +TEST(DirectoryTest, RootOfWrongDriveDoesNotExists) { + const int saved_drive_ = _getdrive(); + // Find a drive that doesn't exist. Start with 'Z' to avoid common ones. + for (char drive = 'Z'; drive >= 'A'; drive--) + if (_chdrive(drive - 'A' + 1) == -1) { + char non_drive[_MAX_PATH]; // NOLINT + non_drive[0] = drive; + non_drive[1] = ':'; + non_drive[2] = '\\'; + non_drive[3] = '\0'; + EXPECT_FALSE(FilePath(non_drive).DirectoryExists()); + break; + } + _chdrive(saved_drive_); +} +#endif // GTEST_OS_WINDOWS + +#if !GTEST_OS_WINDOWS_MOBILE +// Windows CE _does_ consider an empty directory to exist. +TEST(DirectoryTest, EmptyPathDirectoryDoesNotExist) { + EXPECT_FALSE(FilePath("").DirectoryExists()); +} +#endif // !GTEST_OS_WINDOWS_MOBILE + +TEST(DirectoryTest, CurrentDirectoryExists) { +#if GTEST_OS_WINDOWS // We are on Windows. +# ifndef _WIN32_CE // Windows CE doesn't have a current directory. + + EXPECT_TRUE(FilePath(".").DirectoryExists()); + EXPECT_TRUE(FilePath(".\\").DirectoryExists()); + +# endif // _WIN32_CE +#else + EXPECT_TRUE(FilePath(".").DirectoryExists()); + EXPECT_TRUE(FilePath("./").DirectoryExists()); +#endif // GTEST_OS_WINDOWS +} + +// "foo/bar" == foo//bar" == "foo///bar" +TEST(NormalizeTest, MultipleConsecutiveSepaparatorsInMidstring) { + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", + FilePath("foo" GTEST_PATH_SEP_ "bar").string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", + FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_ "bar", + FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ + GTEST_PATH_SEP_ "bar").string()); +} + +// "/bar" == //bar" == "///bar" +TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringStart) { + EXPECT_EQ(GTEST_PATH_SEP_ "bar", + FilePath(GTEST_PATH_SEP_ "bar").string()); + EXPECT_EQ(GTEST_PATH_SEP_ "bar", + FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string()); + EXPECT_EQ(GTEST_PATH_SEP_ "bar", + FilePath(GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_ "bar").string()); +} + +// "foo/" == foo//" == "foo///" +TEST(NormalizeTest, MultipleConsecutiveSepaparatorsAtStringEnd) { + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo" GTEST_PATH_SEP_).string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_).string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo" GTEST_PATH_SEP_ GTEST_PATH_SEP_ GTEST_PATH_SEP_).string()); +} + +#if GTEST_HAS_ALT_PATH_SEP_ + +// Tests that separators at the end of the string are normalized +// regardless of their combination (e.g. "foo\" =="foo/\" == +// "foo\\/"). +TEST(NormalizeTest, MixAlternateSeparatorAtStringEnd) { + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo/").string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo" GTEST_PATH_SEP_ "/").string()); + EXPECT_EQ("foo" GTEST_PATH_SEP_, + FilePath("foo//" GTEST_PATH_SEP_).string()); +} + +#endif + +TEST(AssignmentOperatorTest, DefaultAssignedToNonDefault) { + FilePath default_path; + FilePath non_default_path("path"); + non_default_path = default_path; + EXPECT_EQ("", non_default_path.string()); + EXPECT_EQ("", default_path.string()); // RHS var is unchanged. +} + +TEST(AssignmentOperatorTest, NonDefaultAssignedToDefault) { + FilePath non_default_path("path"); + FilePath default_path; + default_path = non_default_path; + EXPECT_EQ("path", default_path.string()); + EXPECT_EQ("path", non_default_path.string()); // RHS var is unchanged. +} + +TEST(AssignmentOperatorTest, ConstAssignedToNonConst) { + const FilePath const_default_path("const_path"); + FilePath non_default_path("path"); + non_default_path = const_default_path; + EXPECT_EQ("const_path", non_default_path.string()); +} + +class DirectoryCreationTest : public Test { + protected: + void SetUp() override { + testdata_path_.Set(FilePath( + TempDir() + GetCurrentExecutableName().string() + + "_directory_creation" GTEST_PATH_SEP_ "test" GTEST_PATH_SEP_)); + testdata_file_.Set(testdata_path_.RemoveTrailingPathSeparator()); + + unique_file0_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"), + 0, "txt")); + unique_file1_.Set(FilePath::MakeFileName(testdata_path_, FilePath("unique"), + 1, "txt")); + + remove(testdata_file_.c_str()); + remove(unique_file0_.c_str()); + remove(unique_file1_.c_str()); + posix::RmDir(testdata_path_.c_str()); + } + + void TearDown() override { + remove(testdata_file_.c_str()); + remove(unique_file0_.c_str()); + remove(unique_file1_.c_str()); + posix::RmDir(testdata_path_.c_str()); + } + + void CreateTextFile(const char* filename) { + FILE* f = posix::FOpen(filename, "w"); + fprintf(f, "text\n"); + fclose(f); + } + + // Strings representing a directory and a file, with identical paths + // except for the trailing separator character that distinquishes + // a directory named 'test' from a file named 'test'. Example names: + FilePath testdata_path_; // "/tmp/directory_creation/test/" + FilePath testdata_file_; // "/tmp/directory_creation/test" + FilePath unique_file0_; // "/tmp/directory_creation/test/unique.txt" + FilePath unique_file1_; // "/tmp/directory_creation/test/unique_1.txt" +}; + +TEST_F(DirectoryCreationTest, CreateDirectoriesRecursively) { + EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.string(); + EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively()); + EXPECT_TRUE(testdata_path_.DirectoryExists()); +} + +TEST_F(DirectoryCreationTest, CreateDirectoriesForAlreadyExistingPath) { + EXPECT_FALSE(testdata_path_.DirectoryExists()) << testdata_path_.string(); + EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively()); + // Call 'create' again... should still succeed. + EXPECT_TRUE(testdata_path_.CreateDirectoriesRecursively()); +} + +TEST_F(DirectoryCreationTest, CreateDirectoriesAndUniqueFilename) { + FilePath file_path(FilePath::GenerateUniqueFileName(testdata_path_, + FilePath("unique"), "txt")); + EXPECT_EQ(unique_file0_.string(), file_path.string()); + EXPECT_FALSE(file_path.FileOrDirectoryExists()); // file not there + + testdata_path_.CreateDirectoriesRecursively(); + EXPECT_FALSE(file_path.FileOrDirectoryExists()); // file still not there + CreateTextFile(file_path.c_str()); + EXPECT_TRUE(file_path.FileOrDirectoryExists()); + + FilePath file_path2(FilePath::GenerateUniqueFileName(testdata_path_, + FilePath("unique"), "txt")); + EXPECT_EQ(unique_file1_.string(), file_path2.string()); + EXPECT_FALSE(file_path2.FileOrDirectoryExists()); // file not there + CreateTextFile(file_path2.c_str()); + EXPECT_TRUE(file_path2.FileOrDirectoryExists()); +} + +TEST_F(DirectoryCreationTest, CreateDirectoriesFail) { + // force a failure by putting a file where we will try to create a directory. + CreateTextFile(testdata_file_.c_str()); + EXPECT_TRUE(testdata_file_.FileOrDirectoryExists()); + EXPECT_FALSE(testdata_file_.DirectoryExists()); + EXPECT_FALSE(testdata_file_.CreateDirectoriesRecursively()); +} + +TEST(NoDirectoryCreationTest, CreateNoDirectoriesForDefaultXmlFile) { + const FilePath test_detail_xml("test_detail.xml"); + EXPECT_FALSE(test_detail_xml.CreateDirectoriesRecursively()); +} + +TEST(FilePathTest, DefaultConstructor) { + FilePath fp; + EXPECT_EQ("", fp.string()); +} + +TEST(FilePathTest, CharAndCopyConstructors) { + const FilePath fp("spicy"); + EXPECT_EQ("spicy", fp.string()); + + const FilePath fp_copy(fp); + EXPECT_EQ("spicy", fp_copy.string()); +} + +TEST(FilePathTest, StringConstructor) { + const FilePath fp(std::string("cider")); + EXPECT_EQ("cider", fp.string()); +} + +TEST(FilePathTest, Set) { + const FilePath apple("apple"); + FilePath mac("mac"); + mac.Set(apple); // Implement Set() since overloading operator= is forbidden. + EXPECT_EQ("apple", mac.string()); + EXPECT_EQ("apple", apple.string()); +} + +TEST(FilePathTest, ToString) { + const FilePath file("drink"); + EXPECT_EQ("drink", file.string()); +} + +TEST(FilePathTest, RemoveExtension) { + EXPECT_EQ("app", FilePath("app.cc").RemoveExtension("cc").string()); + EXPECT_EQ("app", FilePath("app.exe").RemoveExtension("exe").string()); + EXPECT_EQ("APP", FilePath("APP.EXE").RemoveExtension("exe").string()); +} + +TEST(FilePathTest, RemoveExtensionWhenThereIsNoExtension) { + EXPECT_EQ("app", FilePath("app").RemoveExtension("exe").string()); +} + +TEST(FilePathTest, IsDirectory) { + EXPECT_FALSE(FilePath("cola").IsDirectory()); + EXPECT_TRUE(FilePath("koala" GTEST_PATH_SEP_).IsDirectory()); +#if GTEST_HAS_ALT_PATH_SEP_ + EXPECT_TRUE(FilePath("koala/").IsDirectory()); +#endif +} + +TEST(FilePathTest, IsAbsolutePath) { + EXPECT_FALSE(FilePath("is" GTEST_PATH_SEP_ "relative").IsAbsolutePath()); + EXPECT_FALSE(FilePath("").IsAbsolutePath()); +#if GTEST_OS_WINDOWS + EXPECT_TRUE(FilePath("c:\\" GTEST_PATH_SEP_ "is_not" + GTEST_PATH_SEP_ "relative").IsAbsolutePath()); + EXPECT_FALSE(FilePath("c:foo" GTEST_PATH_SEP_ "bar").IsAbsolutePath()); + EXPECT_TRUE(FilePath("c:/" GTEST_PATH_SEP_ "is_not" + GTEST_PATH_SEP_ "relative").IsAbsolutePath()); +#else + EXPECT_TRUE(FilePath(GTEST_PATH_SEP_ "is_not" GTEST_PATH_SEP_ "relative") + .IsAbsolutePath()); +#endif // GTEST_OS_WINDOWS +} + +TEST(FilePathTest, IsRootDirectory) { +#if GTEST_OS_WINDOWS + EXPECT_TRUE(FilePath("a:\\").IsRootDirectory()); + EXPECT_TRUE(FilePath("Z:/").IsRootDirectory()); + EXPECT_TRUE(FilePath("e://").IsRootDirectory()); + EXPECT_FALSE(FilePath("").IsRootDirectory()); + EXPECT_FALSE(FilePath("b:").IsRootDirectory()); + EXPECT_FALSE(FilePath("b:a").IsRootDirectory()); + EXPECT_FALSE(FilePath("8:/").IsRootDirectory()); + EXPECT_FALSE(FilePath("c|/").IsRootDirectory()); +#else + EXPECT_TRUE(FilePath("/").IsRootDirectory()); + EXPECT_TRUE(FilePath("//").IsRootDirectory()); + EXPECT_FALSE(FilePath("").IsRootDirectory()); + EXPECT_FALSE(FilePath("\\").IsRootDirectory()); + EXPECT_FALSE(FilePath("/x").IsRootDirectory()); +#endif +} + +} // namespace +} // namespace internal +} // namespace testing diff --git a/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest.py new file mode 100755 index 0000000000..6b32f2d219 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest.py @@ -0,0 +1,639 @@ +#!/usr/bin/env python +# +# Copyright 2005 Google Inc. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for Google Test test filters. + +A user can specify which test(s) in a Google Test program to run via either +the GTEST_FILTER environment variable or the --gtest_filter flag. +This script tests such functionality by invoking +googletest-filter-unittest_ (a program written with Google Test) with different +environments and command line flags. + +Note that test sharding may also influence which tests are filtered. Therefore, +we test that here also. +""" + +import os +import re +try: + from sets import Set as set # For Python 2.3 compatibility +except ImportError: + pass +import sys +import gtest_test_utils + +# Constants. + +# Checks if this platform can pass empty environment variables to child +# processes. We set an env variable to an empty string and invoke a python +# script in a subprocess to print whether the variable is STILL in +# os.environ. We then use 'eval' to parse the child's output so that an +# exception is thrown if the input is anything other than 'True' nor 'False'. +CAN_PASS_EMPTY_ENV = False +if sys.executable: + os.environ['EMPTY_VAR'] = '' + child = gtest_test_utils.Subprocess( + [sys.executable, '-c', 'import os; print(\'EMPTY_VAR\' in os.environ)']) + CAN_PASS_EMPTY_ENV = eval(child.output) + + +# Check if this platform can unset environment variables in child processes. +# We set an env variable to a non-empty string, unset it, and invoke +# a python script in a subprocess to print whether the variable +# is NO LONGER in os.environ. +# We use 'eval' to parse the child's output so that an exception +# is thrown if the input is neither 'True' nor 'False'. +CAN_UNSET_ENV = False +if sys.executable: + os.environ['UNSET_VAR'] = 'X' + del os.environ['UNSET_VAR'] + child = gtest_test_utils.Subprocess( + [sys.executable, '-c', 'import os; print(\'UNSET_VAR\' not in os.environ)' + ]) + CAN_UNSET_ENV = eval(child.output) + + +# Checks if we should test with an empty filter. This doesn't +# make sense on platforms that cannot pass empty env variables (Win32) +# and on platforms that cannot unset variables (since we cannot tell +# the difference between "" and NULL -- Borland and Solaris < 5.10) +CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV) + + +# The environment variable for specifying the test filters. +FILTER_ENV_VAR = 'GTEST_FILTER' + +# The environment variables for test sharding. +TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' +SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' +SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE' + +# The command line flag for specifying the test filters. +FILTER_FLAG = 'gtest_filter' + +# The command line flag for including disabled tests. +ALSO_RUN_DISABLED_TESTS_FLAG = 'gtest_also_run_disabled_tests' + +# Command to run the googletest-filter-unittest_ program. +COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-filter-unittest_') + +# Regex for determining whether parameterized tests are enabled in the binary. +PARAM_TEST_REGEX = re.compile(r'/ParamTest') + +# Regex for parsing test case names from Google Test's output. +TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)') + +# Regex for parsing test names from Google Test's output. +TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)') + +# The command line flag to tell Google Test to output the list of tests it +# will run. +LIST_TESTS_FLAG = '--gtest_list_tests' + +# Indicates whether Google Test supports death tests. +SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess( + [COMMAND, LIST_TESTS_FLAG]).output + +# Full names of all tests in googletest-filter-unittests_. +PARAM_TESTS = [ + 'SeqP/ParamTest.TestX/0', + 'SeqP/ParamTest.TestX/1', + 'SeqP/ParamTest.TestY/0', + 'SeqP/ParamTest.TestY/1', + 'SeqQ/ParamTest.TestX/0', + 'SeqQ/ParamTest.TestX/1', + 'SeqQ/ParamTest.TestY/0', + 'SeqQ/ParamTest.TestY/1', + ] + +DISABLED_TESTS = [ + 'BarTest.DISABLED_TestFour', + 'BarTest.DISABLED_TestFive', + 'BazTest.DISABLED_TestC', + 'DISABLED_FoobarTest.Test1', + 'DISABLED_FoobarTest.DISABLED_Test2', + 'DISABLED_FoobarbazTest.TestA', + ] + +if SUPPORTS_DEATH_TESTS: + DEATH_TESTS = [ + 'HasDeathTest.Test1', + 'HasDeathTest.Test2', + ] +else: + DEATH_TESTS = [] + +# All the non-disabled tests. +ACTIVE_TESTS = [ + 'FooTest.Abc', + 'FooTest.Xyz', + + 'BarTest.TestOne', + 'BarTest.TestTwo', + 'BarTest.TestThree', + + 'BazTest.TestOne', + 'BazTest.TestA', + 'BazTest.TestB', + ] + DEATH_TESTS + PARAM_TESTS + +param_tests_present = None + +# Utilities. + +environ = os.environ.copy() + + +def SetEnvVar(env_var, value): + """Sets the env variable to 'value'; unsets it when 'value' is None.""" + + if value is not None: + environ[env_var] = value + elif env_var in environ: + del environ[env_var] + + +def RunAndReturnOutput(args = None): + """Runs the test program and returns its output.""" + + return gtest_test_utils.Subprocess([COMMAND] + (args or []), + env=environ).output + + +def RunAndExtractTestList(args = None): + """Runs the test program and returns its exit code and a list of tests run.""" + + p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ) + tests_run = [] + test_case = '' + test = '' + for line in p.output.split('\n'): + match = TEST_CASE_REGEX.match(line) + if match is not None: + test_case = match.group(1) + else: + match = TEST_REGEX.match(line) + if match is not None: + test = match.group(1) + tests_run.append(test_case + '.' + test) + return (tests_run, p.exit_code) + + +def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs): + """Runs the given function and arguments in a modified environment.""" + try: + original_env = environ.copy() + environ.update(extra_env) + return function(*args, **kwargs) + finally: + environ.clear() + environ.update(original_env) + + +def RunWithSharding(total_shards, shard_index, command): + """Runs a test program shard and returns exit code and a list of tests run.""" + + extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index), + TOTAL_SHARDS_ENV_VAR: str(total_shards)} + return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command) + +# The unit test. + + +class GTestFilterUnitTest(gtest_test_utils.TestCase): + """Tests the env variable or the command line flag to filter tests.""" + + # Utilities. + + def AssertSetEqual(self, lhs, rhs): + """Asserts that two sets are equal.""" + + for elem in lhs: + self.assert_(elem in rhs, '%s in %s' % (elem, rhs)) + + for elem in rhs: + self.assert_(elem in lhs, '%s in %s' % (elem, lhs)) + + def AssertPartitionIsValid(self, set_var, list_of_sets): + """Asserts that list_of_sets is a valid partition of set_var.""" + + full_partition = [] + for slice_var in list_of_sets: + full_partition.extend(slice_var) + self.assertEqual(len(set_var), len(full_partition)) + self.assertEqual(set(set_var), set(full_partition)) + + def AdjustForParameterizedTests(self, tests_to_run): + """Adjust tests_to_run in case value parameterized tests are disabled.""" + + global param_tests_present + if not param_tests_present: + return list(set(tests_to_run) - set(PARAM_TESTS)) + else: + return tests_to_run + + def RunAndVerify(self, gtest_filter, tests_to_run): + """Checks that the binary runs correct set of tests for a given filter.""" + + tests_to_run = self.AdjustForParameterizedTests(tests_to_run) + + # First, tests using the environment variable. + + # Windows removes empty variables from the environment when passing it + # to a new process. This means it is impossible to pass an empty filter + # into a process using the environment variable. However, we can still + # test the case when the variable is not supplied (i.e., gtest_filter is + # None). + # pylint: disable-msg=C6403 + if CAN_TEST_EMPTY_FILTER or gtest_filter != '': + SetEnvVar(FILTER_ENV_VAR, gtest_filter) + tests_run = RunAndExtractTestList()[0] + SetEnvVar(FILTER_ENV_VAR, None) + self.AssertSetEqual(tests_run, tests_to_run) + # pylint: enable-msg=C6403 + + # Next, tests using the command line flag. + + if gtest_filter is None: + args = [] + else: + args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)] + + tests_run = RunAndExtractTestList(args)[0] + self.AssertSetEqual(tests_run, tests_to_run) + + def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run, + args=None, check_exit_0=False): + """Checks that binary runs correct tests for the given filter and shard. + + Runs all shards of googletest-filter-unittest_ with the given filter, and + verifies that the right set of tests were run. The union of tests run + on each shard should be identical to tests_to_run, without duplicates. + If check_exit_0, . + + Args: + gtest_filter: A filter to apply to the tests. + total_shards: A total number of shards to split test run into. + tests_to_run: A set of tests expected to run. + args : Arguments to pass to the to the test binary. + check_exit_0: When set to a true value, make sure that all shards + return 0. + """ + + tests_to_run = self.AdjustForParameterizedTests(tests_to_run) + + # Windows removes empty variables from the environment when passing it + # to a new process. This means it is impossible to pass an empty filter + # into a process using the environment variable. However, we can still + # test the case when the variable is not supplied (i.e., gtest_filter is + # None). + # pylint: disable-msg=C6403 + if CAN_TEST_EMPTY_FILTER or gtest_filter != '': + SetEnvVar(FILTER_ENV_VAR, gtest_filter) + partition = [] + for i in range(0, total_shards): + (tests_run, exit_code) = RunWithSharding(total_shards, i, args) + if check_exit_0: + self.assertEqual(0, exit_code) + partition.append(tests_run) + + self.AssertPartitionIsValid(tests_to_run, partition) + SetEnvVar(FILTER_ENV_VAR, None) + # pylint: enable-msg=C6403 + + def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run): + """Checks that the binary runs correct set of tests for the given filter. + + Runs googletest-filter-unittest_ with the given filter, and enables + disabled tests. Verifies that the right set of tests were run. + + Args: + gtest_filter: A filter to apply to the tests. + tests_to_run: A set of tests expected to run. + """ + + tests_to_run = self.AdjustForParameterizedTests(tests_to_run) + + # Construct the command line. + args = ['--%s' % ALSO_RUN_DISABLED_TESTS_FLAG] + if gtest_filter is not None: + args.append('--%s=%s' % (FILTER_FLAG, gtest_filter)) + + tests_run = RunAndExtractTestList(args)[0] + self.AssertSetEqual(tests_run, tests_to_run) + + def setUp(self): + """Sets up test case. + + Determines whether value-parameterized tests are enabled in the binary and + sets the flags accordingly. + """ + + global param_tests_present + if param_tests_present is None: + param_tests_present = PARAM_TEST_REGEX.search( + RunAndReturnOutput()) is not None + + def testDefaultBehavior(self): + """Tests the behavior of not specifying the filter.""" + + self.RunAndVerify(None, ACTIVE_TESTS) + + def testDefaultBehaviorWithShards(self): + """Tests the behavior without the filter, with sharding enabled.""" + + self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS) + self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS) + self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS) + self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS) + self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS) + + def testEmptyFilter(self): + """Tests an empty filter.""" + + self.RunAndVerify('', []) + self.RunAndVerifyWithSharding('', 1, []) + self.RunAndVerifyWithSharding('', 2, []) + + def testBadFilter(self): + """Tests a filter that matches nothing.""" + + self.RunAndVerify('BadFilter', []) + self.RunAndVerifyAllowingDisabled('BadFilter', []) + + def testFullName(self): + """Tests filtering by full name.""" + + self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz']) + self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz']) + self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz']) + + def testUniversalFilters(self): + """Tests filters that match everything.""" + + self.RunAndVerify('*', ACTIVE_TESTS) + self.RunAndVerify('*.*', ACTIVE_TESTS) + self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS) + self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS) + self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS) + + def testFilterByTestCase(self): + """Tests filtering by test case name.""" + + self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz']) + + BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB'] + self.RunAndVerify('BazTest.*', BAZ_TESTS) + self.RunAndVerifyAllowingDisabled('BazTest.*', + BAZ_TESTS + ['BazTest.DISABLED_TestC']) + + def testFilterByTest(self): + """Tests filtering by test name.""" + + self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne']) + + def testFilterDisabledTests(self): + """Select only the disabled tests to run.""" + + self.RunAndVerify('DISABLED_FoobarTest.Test1', []) + self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1', + ['DISABLED_FoobarTest.Test1']) + + self.RunAndVerify('*DISABLED_*', []) + self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS) + + self.RunAndVerify('*.DISABLED_*', []) + self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [ + 'BarTest.DISABLED_TestFour', + 'BarTest.DISABLED_TestFive', + 'BazTest.DISABLED_TestC', + 'DISABLED_FoobarTest.DISABLED_Test2', + ]) + + self.RunAndVerify('DISABLED_*', []) + self.RunAndVerifyAllowingDisabled('DISABLED_*', [ + 'DISABLED_FoobarTest.Test1', + 'DISABLED_FoobarTest.DISABLED_Test2', + 'DISABLED_FoobarbazTest.TestA', + ]) + + def testWildcardInTestCaseName(self): + """Tests using wildcard in the test case name.""" + + self.RunAndVerify('*a*.*', [ + 'BarTest.TestOne', + 'BarTest.TestTwo', + 'BarTest.TestThree', + + 'BazTest.TestOne', + 'BazTest.TestA', + 'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS) + + def testWildcardInTestName(self): + """Tests using wildcard in the test name.""" + + self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA']) + + def testFilterWithoutDot(self): + """Tests a filter that has no '.' in it.""" + + self.RunAndVerify('*z*', [ + 'FooTest.Xyz', + + 'BazTest.TestOne', + 'BazTest.TestA', + 'BazTest.TestB', + ]) + + def testTwoPatterns(self): + """Tests filters that consist of two patterns.""" + + self.RunAndVerify('Foo*.*:*A*', [ + 'FooTest.Abc', + 'FooTest.Xyz', + + 'BazTest.TestA', + ]) + + # An empty pattern + a non-empty one + self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA']) + + def testThreePatterns(self): + """Tests filters that consist of three patterns.""" + + self.RunAndVerify('*oo*:*A*:*One', [ + 'FooTest.Abc', + 'FooTest.Xyz', + + 'BarTest.TestOne', + + 'BazTest.TestOne', + 'BazTest.TestA', + ]) + + # The 2nd pattern is empty. + self.RunAndVerify('*oo*::*One', [ + 'FooTest.Abc', + 'FooTest.Xyz', + + 'BarTest.TestOne', + + 'BazTest.TestOne', + ]) + + # The last 2 patterns are empty. + self.RunAndVerify('*oo*::', [ + 'FooTest.Abc', + 'FooTest.Xyz', + ]) + + def testNegativeFilters(self): + self.RunAndVerify('*-BazTest.TestOne', [ + 'FooTest.Abc', + 'FooTest.Xyz', + + 'BarTest.TestOne', + 'BarTest.TestTwo', + 'BarTest.TestThree', + + 'BazTest.TestA', + 'BazTest.TestB', + ] + DEATH_TESTS + PARAM_TESTS) + + self.RunAndVerify('*-FooTest.Abc:BazTest.*', [ + 'FooTest.Xyz', + + 'BarTest.TestOne', + 'BarTest.TestTwo', + 'BarTest.TestThree', + ] + DEATH_TESTS + PARAM_TESTS) + + self.RunAndVerify('BarTest.*-BarTest.TestOne', [ + 'BarTest.TestTwo', + 'BarTest.TestThree', + ]) + + # Tests without leading '*'. + self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [ + 'BarTest.TestOne', + 'BarTest.TestTwo', + 'BarTest.TestThree', + ] + DEATH_TESTS + PARAM_TESTS) + + # Value parameterized tests. + self.RunAndVerify('*/*', PARAM_TESTS) + + # Value parameterized tests filtering by the sequence name. + self.RunAndVerify('SeqP/*', [ + 'SeqP/ParamTest.TestX/0', + 'SeqP/ParamTest.TestX/1', + 'SeqP/ParamTest.TestY/0', + 'SeqP/ParamTest.TestY/1', + ]) + + # Value parameterized tests filtering by the test name. + self.RunAndVerify('*/0', [ + 'SeqP/ParamTest.TestX/0', + 'SeqP/ParamTest.TestY/0', + 'SeqQ/ParamTest.TestX/0', + 'SeqQ/ParamTest.TestY/0', + ]) + + def testFlagOverridesEnvVar(self): + """Tests that the filter flag overrides the filtering env. variable.""" + + SetEnvVar(FILTER_ENV_VAR, 'Foo*') + args = ['--%s=%s' % (FILTER_FLAG, '*One')] + tests_run = RunAndExtractTestList(args)[0] + SetEnvVar(FILTER_ENV_VAR, None) + + self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne']) + + def testShardStatusFileIsCreated(self): + """Tests that the shard file is created if specified in the environment.""" + + shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), + 'shard_status_file') + self.assert_(not os.path.exists(shard_status_file)) + + extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} + try: + InvokeWithModifiedEnv(extra_env, RunAndReturnOutput) + finally: + self.assert_(os.path.exists(shard_status_file)) + os.remove(shard_status_file) + + def testShardStatusFileIsCreatedWithListTests(self): + """Tests that the shard file is created with the "list_tests" flag.""" + + shard_status_file = os.path.join(gtest_test_utils.GetTempDir(), + 'shard_status_file2') + self.assert_(not os.path.exists(shard_status_file)) + + extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file} + try: + output = InvokeWithModifiedEnv(extra_env, + RunAndReturnOutput, + [LIST_TESTS_FLAG]) + finally: + # This assertion ensures that Google Test enumerated the tests as + # opposed to running them. + self.assert_('[==========]' not in output, + 'Unexpected output during test enumeration.\n' + 'Please ensure that LIST_TESTS_FLAG is assigned the\n' + 'correct flag value for listing Google Test tests.') + + self.assert_(os.path.exists(shard_status_file)) + os.remove(shard_status_file) + + if SUPPORTS_DEATH_TESTS: + def testShardingWorksWithDeathTests(self): + """Tests integration with death tests and sharding.""" + + gtest_filter = 'HasDeathTest.*:SeqP/*' + expected_tests = [ + 'HasDeathTest.Test1', + 'HasDeathTest.Test2', + + 'SeqP/ParamTest.TestX/0', + 'SeqP/ParamTest.TestX/1', + 'SeqP/ParamTest.TestY/0', + 'SeqP/ParamTest.TestY/1', + ] + + for flag in ['--gtest_death_test_style=threadsafe', + '--gtest_death_test_style=fast']: + self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests, + check_exit_0=True, args=[flag]) + self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests, + check_exit_0=True, args=[flag]) + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest_.cc b/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest_.cc new file mode 100644 index 0000000000..d30ec9c78b --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-filter-unittest_.cc @@ -0,0 +1,137 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Unit test for Google Test test filters. +// +// A user can specify which test(s) in a Google Test program to run via +// either the GTEST_FILTER environment variable or the --gtest_filter +// flag. This is used for testing such functionality. +// +// The program will be invoked from a Python unit test. Don't run it +// directly. + +#include "gtest/gtest.h" + +namespace { + +// Test case FooTest. + +class FooTest : public testing::Test { +}; + +TEST_F(FooTest, Abc) { +} + +TEST_F(FooTest, Xyz) { + FAIL() << "Expected failure."; +} + +// Test case BarTest. + +TEST(BarTest, TestOne) { +} + +TEST(BarTest, TestTwo) { +} + +TEST(BarTest, TestThree) { +} + +TEST(BarTest, DISABLED_TestFour) { + FAIL() << "Expected failure."; +} + +TEST(BarTest, DISABLED_TestFive) { + FAIL() << "Expected failure."; +} + +// Test case BazTest. + +TEST(BazTest, TestOne) { + FAIL() << "Expected failure."; +} + +TEST(BazTest, TestA) { +} + +TEST(BazTest, TestB) { +} + +TEST(BazTest, DISABLED_TestC) { + FAIL() << "Expected failure."; +} + +// Test case HasDeathTest + +TEST(HasDeathTest, Test1) { + EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); +} + +// We need at least two death tests to make sure that the all death tests +// aren't on the first shard. +TEST(HasDeathTest, Test2) { + EXPECT_DEATH_IF_SUPPORTED(exit(1), ".*"); +} + +// Test case FoobarTest + +TEST(DISABLED_FoobarTest, Test1) { + FAIL() << "Expected failure."; +} + +TEST(DISABLED_FoobarTest, DISABLED_Test2) { + FAIL() << "Expected failure."; +} + +// Test case FoobarbazTest + +TEST(DISABLED_FoobarbazTest, TestA) { + FAIL() << "Expected failure."; +} + +class ParamTest : public testing::TestWithParam<int> { +}; + +TEST_P(ParamTest, TestX) { +} + +TEST_P(ParamTest, TestY) { +} + +INSTANTIATE_TEST_SUITE_P(SeqP, ParamTest, testing::Values(1, 2)); +INSTANTIATE_TEST_SUITE_P(SeqQ, ParamTest, testing::Values(5, 6)); + +} // namespace + +int main(int argc, char **argv) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest.py new file mode 100644 index 0000000000..32ba628535 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest.py @@ -0,0 +1,72 @@ +# Copyright 2021 Google Inc. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +"""Unit test for Google Test's global test environment behavior. + +A user can specify a global test environment via +testing::AddGlobalTestEnvironment. Failures in the global environment should +result in all unit tests being skipped. + +This script tests such functionality by invoking +googletest-global-environment-unittest_ (a program written with Google Test). +""" + +import gtest_test_utils + + +def RunAndReturnOutput(): + """Runs the test program and returns its output.""" + + return gtest_test_utils.Subprocess([ + gtest_test_utils.GetTestExecutablePath( + 'googletest-global-environment-unittest_') + ]).output + + +class GTestGlobalEnvironmentUnitTest(gtest_test_utils.TestCase): + """Tests global test environment failures.""" + + def testEnvironmentSetUpFails(self): + """Tests the behavior of not specifying the fail_fast.""" + + # Run the test. + txt = RunAndReturnOutput() + + # We should see the text of the global environment setup error. + self.assertIn('Canned environment setup error', txt) + + # Our test should have been skipped due to the error, and not treated as a + # pass. + self.assertIn('[ SKIPPED ] 1 test', txt) + self.assertIn('[ PASSED ] 0 tests', txt) + + # The test case shouldn't have been run. + self.assertNotIn('Unexpected call', txt) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest_.cc b/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest_.cc new file mode 100644 index 0000000000..f401b2fac2 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-global-environment-unittest_.cc @@ -0,0 +1,58 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for Google Test global test environments. +// +// The program will be invoked from a Python unit test. Don't run it +// directly. + +#include "gtest/gtest.h" + +namespace { + +// An environment that always fails in its SetUp method. +class FailingEnvironment final : public ::testing::Environment { + public: + void SetUp() override { FAIL() << "Canned environment setup error"; } +}; + +// Register the environment. +auto* const g_environment_ = + ::testing::AddGlobalTestEnvironment(new FailingEnvironment); + +// A test that doesn't actually run. +TEST(SomeTest, DoesFoo) { FAIL() << "Unexpected call"; } + +} // namespace + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-json-outfiles-test.py b/security/nss/gtests/google_test/gtest/test/googletest-json-outfiles-test.py new file mode 100644 index 0000000000..8ef47b8f97 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-json-outfiles-test.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# Copyright 2018, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for the gtest_json_output module.""" + +import json +import os +import gtest_json_test_utils +import gtest_test_utils + +GTEST_OUTPUT_SUBDIR = 'json_outfiles' +GTEST_OUTPUT_1_TEST = 'gtest_xml_outfile1_test_' +GTEST_OUTPUT_2_TEST = 'gtest_xml_outfile2_test_' + +EXPECTED_1 = { + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'name': + u'AllTests', + u'testsuites': [{ + u'name': + u'PropertyOne', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'TestSomeProperties', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'PropertyOne', + u'SetUpProp': u'1', + u'TestSomeProperty': u'1', + u'TearDownProp': u'1', + }], + }], +} + +EXPECTED_2 = { + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'name': + u'AllTests', + u'testsuites': [{ + u'name': + u'PropertyTwo', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'TestSomeProperties', + u'status': u'RUN', + u'result': u'COMPLETED', + u'timestamp': u'*', + u'time': u'*', + u'classname': u'PropertyTwo', + u'SetUpProp': u'2', + u'TestSomeProperty': u'2', + u'TearDownProp': u'2', + }], + }], +} + + +class GTestJsonOutFilesTest(gtest_test_utils.TestCase): + """Unit test for Google Test's JSON output functionality.""" + + def setUp(self): + # We want the trailing '/' that the last "" provides in os.path.join, for + # telling Google Test to create an output directory instead of a single file + # for xml output. + self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_OUTPUT_SUBDIR, '') + self.DeleteFilesAndDir() + + def tearDown(self): + self.DeleteFilesAndDir() + + def DeleteFilesAndDir(self): + try: + os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + '.json')) + except os.error: + pass + try: + os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + '.json')) + except os.error: + pass + try: + os.rmdir(self.output_dir_) + except os.error: + pass + + def testOutfile1(self): + self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_1) + + def testOutfile2(self): + self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_2) + + def _TestOutFile(self, test_name, expected): + gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name) + command = [gtest_prog_path, '--gtest_output=json:%s' % self.output_dir_] + p = gtest_test_utils.Subprocess(command, + working_dir=gtest_test_utils.GetTempDir()) + self.assert_(p.exited) + self.assertEquals(0, p.exit_code) + + output_file_name1 = test_name + '.json' + output_file1 = os.path.join(self.output_dir_, output_file_name1) + output_file_name2 = 'lt-' + output_file_name1 + output_file2 = os.path.join(self.output_dir_, output_file_name2) + self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2), + output_file1) + + if os.path.isfile(output_file1): + with open(output_file1) as f: + actual = json.load(f) + else: + with open(output_file2) as f: + actual = json.load(f) + self.assertEqual(expected, gtest_json_test_utils.normalize(actual)) + + +if __name__ == '__main__': + os.environ['GTEST_STACK_TRACE_DEPTH'] = '0' + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-json-output-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-json-output-unittest.py new file mode 100644 index 0000000000..41c8565144 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-json-output-unittest.py @@ -0,0 +1,848 @@ +#!/usr/bin/env python +# Copyright 2018, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for the gtest_json_output module.""" + +import datetime +import errno +import json +import os +import re +import sys + +import gtest_json_test_utils +import gtest_test_utils + +GTEST_FILTER_FLAG = '--gtest_filter' +GTEST_LIST_TESTS_FLAG = '--gtest_list_tests' +GTEST_OUTPUT_FLAG = '--gtest_output' +GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.json' +GTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_' + +# The flag indicating stacktraces are not supported +NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support' + +SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv + +if SUPPORTS_STACK_TRACES: + STACK_TRACE_TEMPLATE = '\nStack trace:\n*' +else: + STACK_TRACE_TEMPLATE = '' + +EXPECTED_NON_EMPTY = { + u'tests': + 26, + u'failures': + 5, + u'disabled': + 2, + u'errors': + 0, + u'timestamp': + u'*', + u'time': + u'*', + u'ad_hoc_property': + u'42', + u'name': + u'AllTests', + u'testsuites': [{ + u'name': + u'SuccessfulTest', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'Succeeds', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'SuccessfulTest' + }] + }, { + u'name': + u'FailedTest', + u'tests': + 1, + u'failures': + 1, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': + u'Fails', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'FailedTest', + u'failures': [{ + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Expected equality of these values:\n' + u' 1\n 2' + STACK_TRACE_TEMPLATE, + u'type': u'' + }] + }] + }, { + u'name': + u'DisabledTest', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 1, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'DISABLED_test_not_run', + u'status': u'NOTRUN', + u'result': u'SUPPRESSED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'DisabledTest' + }] + }, { + u'name': + u'SkippedTest', + u'tests': + 3, + u'failures': + 1, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'Skipped', + u'status': u'RUN', + u'result': u'SKIPPED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'SkippedTest' + }, { + u'name': u'SkippedWithMessage', + u'status': u'RUN', + u'result': u'SKIPPED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'SkippedTest' + }, { + u'name': + u'SkippedAfterFailure', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'SkippedTest', + u'failures': [{ + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Expected equality of these values:\n' + u' 1\n 2' + STACK_TRACE_TEMPLATE, + u'type': u'' + }] + }] + }, { + u'name': + u'MixedResultTest', + u'tests': + 3, + u'failures': + 1, + u'disabled': + 1, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'Succeeds', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'MixedResultTest' + }, { + u'name': + u'Fails', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'MixedResultTest', + u'failures': [{ + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Expected equality of these values:\n' + u' 1\n 2' + STACK_TRACE_TEMPLATE, + u'type': u'' + }, { + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Expected equality of these values:\n' + u' 2\n 3' + STACK_TRACE_TEMPLATE, + u'type': u'' + }] + }, { + u'name': u'DISABLED_test', + u'status': u'NOTRUN', + u'result': u'SUPPRESSED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'MixedResultTest' + }] + }, { + u'name': + u'XmlQuotingTest', + u'tests': + 1, + u'failures': + 1, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': + u'OutputsCData', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'XmlQuotingTest', + u'failures': [{ + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Failed\nXML output: <?xml encoding="utf-8">' + u'<top><![CDATA[cdata text]]></top>' + + STACK_TRACE_TEMPLATE, + u'type': u'' + }] + }] + }, { + u'name': + u'InvalidCharactersTest', + u'tests': + 1, + u'failures': + 1, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': + u'InvalidCharactersInMessage', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'InvalidCharactersTest', + u'failures': [{ + u'failure': u'gtest_xml_output_unittest_.cc:*\n' + u'Failed\nInvalid characters in brackets' + u' [\x01\x02]' + STACK_TRACE_TEMPLATE, + u'type': u'' + }] + }] + }, { + u'name': + u'PropertyRecordingTest', + u'tests': + 4, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'SetUpTestSuite': + u'yes', + u'TearDownTestSuite': + u'aye', + u'testsuite': [{ + u'name': u'OneProperty', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'PropertyRecordingTest', + u'key_1': u'1' + }, { + u'name': u'IntValuedProperty', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'PropertyRecordingTest', + u'key_int': u'1' + }, { + u'name': u'ThreeProperties', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'PropertyRecordingTest', + u'key_1': u'1', + u'key_2': u'2', + u'key_3': u'3' + }, { + u'name': u'TwoValuesForOneKeyUsesLastValue', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'PropertyRecordingTest', + u'key_1': u'2' + }] + }, { + u'name': + u'NoFixtureTest', + u'tests': + 3, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'RecordProperty', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'NoFixtureTest', + u'key': u'1' + }, { + u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'NoFixtureTest', + u'key_for_utility_int': u'1' + }, { + u'name': u'ExternalUtilityThatCallsRecordStringValuedProperty', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'NoFixtureTest', + u'key_for_utility_string': u'1' + }] + }, { + u'name': + u'TypedTest/0', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'HasTypeParamAttribute', + u'type_param': u'int', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'TypedTest/0' + }] + }, { + u'name': + u'TypedTest/1', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'HasTypeParamAttribute', + u'type_param': u'long', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'TypedTest/1' + }] + }, { + u'name': + u'Single/TypeParameterizedTestSuite/0', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'HasTypeParamAttribute', + u'type_param': u'int', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/TypeParameterizedTestSuite/0' + }] + }, { + u'name': + u'Single/TypeParameterizedTestSuite/1', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'HasTypeParamAttribute', + u'type_param': u'long', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/TypeParameterizedTestSuite/1' + }] + }, { + u'name': + u'Single/ValueParamTest', + u'tests': + 4, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'HasValueParamAttribute/0', + u'value_param': u'33', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/ValueParamTest' + }, { + u'name': u'HasValueParamAttribute/1', + u'value_param': u'42', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/ValueParamTest' + }, { + u'name': u'AnotherTestThatHasValueParamAttribute/0', + u'value_param': u'33', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/ValueParamTest' + }, { + u'name': u'AnotherTestThatHasValueParamAttribute/1', + u'value_param': u'42', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'Single/ValueParamTest' + }] + }] +} + +EXPECTED_FILTERED = { + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'name': + u'AllTests', + u'ad_hoc_property': + u'42', + u'testsuites': [{ + u'name': + u'SuccessfulTest', + u'tests': + 1, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': u'Succeeds', + u'status': u'RUN', + u'result': u'COMPLETED', + u'time': u'*', + u'timestamp': u'*', + u'classname': u'SuccessfulTest', + }] + }], +} + +EXPECTED_NO_TEST = { + u'tests': + 0, + u'failures': + 0, + u'disabled': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'name': + u'AllTests', + u'testsuites': [{ + u'name': + u'NonTestSuiteFailure', + u'tests': + 1, + u'failures': + 1, + u'disabled': + 0, + u'skipped': + 0, + u'errors': + 0, + u'time': + u'*', + u'timestamp': + u'*', + u'testsuite': [{ + u'name': + u'', + u'status': + u'RUN', + u'result': + u'COMPLETED', + u'time': + u'*', + u'timestamp': + u'*', + u'classname': + u'', + u'failures': [{ + u'failure': u'gtest_no_test_unittest.cc:*\n' + u'Expected equality of these values:\n' + u' 1\n 2' + STACK_TRACE_TEMPLATE, + u'type': u'', + }] + }] + }], +} + +GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME) + +SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess( + [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output + + +class GTestJsonOutputUnitTest(gtest_test_utils.TestCase): + """Unit test for Google Test's JSON output functionality. + """ + + # This test currently breaks on platforms that do not support typed and + # type-parameterized tests, so we don't run it under them. + if SUPPORTS_TYPED_TESTS: + + def testNonEmptyJsonOutput(self): + """Verifies JSON output for a Google Test binary with non-empty output. + + Runs a test program that generates a non-empty JSON output, and + tests that the JSON output is expected. + """ + self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY, 1) + + def testNoTestJsonOutput(self): + """Verifies JSON output for a Google Test binary without actual tests. + + Runs a test program that generates an JSON output for a binary with no + tests, and tests that the JSON output is expected. + """ + + self._TestJsonOutput('gtest_no_test_unittest', EXPECTED_NO_TEST, 0) + + def testTimestampValue(self): + """Checks whether the timestamp attribute in the JSON output is valid. + + Runs a test program that generates an empty JSON output, and checks if + the timestamp attribute in the testsuites tag is valid. + """ + actual = self._GetJsonOutput('gtest_no_test_unittest', [], 0) + date_time_str = actual['timestamp'] + # datetime.strptime() is only available in Python 2.5+ so we have to + # parse the expected datetime manually. + match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str) + self.assertTrue( + re.match, + 'JSON datettime string %s has incorrect format' % date_time_str) + date_time_from_json = datetime.datetime( + year=int(match.group(1)), month=int(match.group(2)), + day=int(match.group(3)), hour=int(match.group(4)), + minute=int(match.group(5)), second=int(match.group(6))) + + time_delta = abs(datetime.datetime.now() - date_time_from_json) + # timestamp value should be near the current local time + self.assertTrue(time_delta < datetime.timedelta(seconds=600), + 'time_delta is %s' % time_delta) + + def testDefaultOutputFile(self): + """Verifies the default output file name. + + Confirms that Google Test produces an JSON output file with the expected + default name if no name is explicitly specified. + """ + output_file = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_DEFAULT_OUTPUT_FILE) + gtest_prog_path = gtest_test_utils.GetTestExecutablePath( + 'gtest_no_test_unittest') + try: + os.remove(output_file) + except OSError: + e = sys.exc_info()[1] + if e.errno != errno.ENOENT: + raise + + p = gtest_test_utils.Subprocess( + [gtest_prog_path, '%s=json' % GTEST_OUTPUT_FLAG], + working_dir=gtest_test_utils.GetTempDir()) + self.assert_(p.exited) + self.assertEquals(0, p.exit_code) + self.assert_(os.path.isfile(output_file)) + + def testSuppressedJsonOutput(self): + """Verifies that no JSON output is generated. + + Tests that no JSON file is generated if the default JSON listener is + shut down before RUN_ALL_TESTS is invoked. + """ + + json_path = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_PROGRAM_NAME + 'out.json') + if os.path.isfile(json_path): + os.remove(json_path) + + command = [GTEST_PROGRAM_PATH, + '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path), + '--shut_down_xml'] + p = gtest_test_utils.Subprocess(command) + if p.terminated_by_signal: + # p.signal is available only if p.terminated_by_signal is True. + self.assertFalse( + p.terminated_by_signal, + '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal)) + else: + self.assert_(p.exited) + self.assertEquals(1, p.exit_code, + "'%s' exited with code %s, which doesn't match " + 'the expected exit code %s.' + % (command, p.exit_code, 1)) + + self.assert_(not os.path.isfile(json_path)) + + def testFilteredTestJsonOutput(self): + """Verifies JSON output when a filter is applied. + + Runs a test program that executes only some tests and verifies that + non-selected tests do not show up in the JSON output. + """ + + self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED, 0, + extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG]) + + def _GetJsonOutput(self, gtest_prog_name, extra_args, expected_exit_code): + """Returns the JSON output generated by running the program gtest_prog_name. + + Furthermore, the program's exit code must be expected_exit_code. + + Args: + gtest_prog_name: Google Test binary name. + extra_args: extra arguments to binary invocation. + expected_exit_code: program's exit code. + """ + json_path = os.path.join(gtest_test_utils.GetTempDir(), + gtest_prog_name + 'out.json') + gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name) + + command = ( + [gtest_prog_path, '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path)] + + extra_args + ) + p = gtest_test_utils.Subprocess(command) + if p.terminated_by_signal: + self.assert_(False, + '%s was killed by signal %d' % (gtest_prog_name, p.signal)) + else: + self.assert_(p.exited) + self.assertEquals(expected_exit_code, p.exit_code, + "'%s' exited with code %s, which doesn't match " + 'the expected exit code %s.' + % (command, p.exit_code, expected_exit_code)) + with open(json_path) as f: + actual = json.load(f) + return actual + + def _TestJsonOutput(self, gtest_prog_name, expected, + expected_exit_code, extra_args=None): + """Checks the JSON output generated by the Google Test binary. + + Asserts that the JSON document generated by running the program + gtest_prog_name matches expected_json, a string containing another + JSON document. Furthermore, the program's exit code must be + expected_exit_code. + + Args: + gtest_prog_name: Google Test binary name. + expected: expected output. + expected_exit_code: program's exit code. + extra_args: extra arguments to binary invocation. + """ + + actual = self._GetJsonOutput(gtest_prog_name, extra_args or [], + expected_exit_code) + self.assertEqual(expected, gtest_json_test_utils.normalize(actual)) + + +if __name__ == '__main__': + if NO_STACKTRACE_SUPPORT_FLAG in sys.argv: + # unittest.main() can't handle unknown flags + sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG) + + os.environ['GTEST_STACK_TRACE_DEPTH'] = '1' + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest.py b/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest.py new file mode 100755 index 0000000000..81423a339e --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python +# +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for Google Test's --gtest_list_tests flag. + +A user can ask Google Test to list all tests by specifying the +--gtest_list_tests flag. This script tests such functionality +by invoking googletest-list-tests-unittest_ (a program written with +Google Test) the command line flags. +""" + +import re +import gtest_test_utils + +# Constants. + +# The command line flag for enabling/disabling listing all tests. +LIST_TESTS_FLAG = 'gtest_list_tests' + +# Path to the googletest-list-tests-unittest_ program. +EXE_PATH = gtest_test_utils.GetTestExecutablePath('googletest-list-tests-unittest_') + +# The expected output when running googletest-list-tests-unittest_ with +# --gtest_list_tests +EXPECTED_OUTPUT_NO_FILTER_RE = re.compile(r"""FooDeathTest\. + Test1 +Foo\. + Bar1 + Bar2 + DISABLED_Bar3 +Abc\. + Xyz + Def +FooBar\. + Baz +FooTest\. + Test1 + DISABLED_Test2 + Test3 +TypedTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\. + TestA + TestB +TypedTest/1\. # TypeParam = int\s*\*( __ptr64)? + TestA + TestB +TypedTest/2\. # TypeParam = .*MyArray<bool,\s*42> + TestA + TestB +My/TypeParamTest/0\. # TypeParam = (VeryLo{245}|class VeryLo{239})\.\.\. + TestA + TestB +My/TypeParamTest/1\. # TypeParam = int\s*\*( __ptr64)? + TestA + TestB +My/TypeParamTest/2\. # TypeParam = .*MyArray<bool,\s*42> + TestA + TestB +MyInstantiation/ValueParamTest\. + TestA/0 # GetParam\(\) = one line + TestA/1 # GetParam\(\) = two\\nlines + TestA/2 # GetParam\(\) = a very\\nlo{241}\.\.\. + TestB/0 # GetParam\(\) = one line + TestB/1 # GetParam\(\) = two\\nlines + TestB/2 # GetParam\(\) = a very\\nlo{241}\.\.\. +""") + +# The expected output when running googletest-list-tests-unittest_ with +# --gtest_list_tests and --gtest_filter=Foo*. +EXPECTED_OUTPUT_FILTER_FOO_RE = re.compile(r"""FooDeathTest\. + Test1 +Foo\. + Bar1 + Bar2 + DISABLED_Bar3 +FooBar\. + Baz +FooTest\. + Test1 + DISABLED_Test2 + Test3 +""") + +# Utilities. + + +def Run(args): + """Runs googletest-list-tests-unittest_ and returns the list of tests printed.""" + + return gtest_test_utils.Subprocess([EXE_PATH] + args, + capture_stderr=False).output + + +# The unit test. + + +class GTestListTestsUnitTest(gtest_test_utils.TestCase): + """Tests using the --gtest_list_tests flag to list all tests.""" + + def RunAndVerify(self, flag_value, expected_output_re, other_flag): + """Runs googletest-list-tests-unittest_ and verifies that it prints + the correct tests. + + Args: + flag_value: value of the --gtest_list_tests flag; + None if the flag should not be present. + expected_output_re: regular expression that matches the expected + output after running command; + other_flag: a different flag to be passed to command + along with gtest_list_tests; + None if the flag should not be present. + """ + + if flag_value is None: + flag = '' + flag_expression = 'not set' + elif flag_value == '0': + flag = '--%s=0' % LIST_TESTS_FLAG + flag_expression = '0' + else: + flag = '--%s' % LIST_TESTS_FLAG + flag_expression = '1' + + args = [flag] + + if other_flag is not None: + args += [other_flag] + + output = Run(args) + + if expected_output_re: + self.assert_( + expected_output_re.match(output), + ('when %s is %s, the output of "%s" is "%s",\n' + 'which does not match regex "%s"' % + (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output, + expected_output_re.pattern))) + else: + self.assert_( + not EXPECTED_OUTPUT_NO_FILTER_RE.match(output), + ('when %s is %s, the output of "%s" is "%s"'% + (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output))) + + def testDefaultBehavior(self): + """Tests the behavior of the default mode.""" + + self.RunAndVerify(flag_value=None, + expected_output_re=None, + other_flag=None) + + def testFlag(self): + """Tests using the --gtest_list_tests flag.""" + + self.RunAndVerify(flag_value='0', + expected_output_re=None, + other_flag=None) + self.RunAndVerify(flag_value='1', + expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE, + other_flag=None) + + def testOverrideNonFilterFlags(self): + """Tests that --gtest_list_tests overrides the non-filter flags.""" + + self.RunAndVerify(flag_value='1', + expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE, + other_flag='--gtest_break_on_failure') + + def testWithFilterFlags(self): + """Tests that --gtest_list_tests takes into account the + --gtest_filter flag.""" + + self.RunAndVerify(flag_value='1', + expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE, + other_flag='--gtest_filter=Foo*') + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest_.cc b/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest_.cc new file mode 100644 index 0000000000..493c6f0046 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-list-tests-unittest_.cc @@ -0,0 +1,156 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Unit test for Google Test's --gtest_list_tests flag. +// +// A user can ask Google Test to list all tests that will run +// so that when using a filter, a user will know what +// tests to look for. The tests will not be run after listing. +// +// This program will be invoked from a Python unit test. +// Don't run it directly. + +#include "gtest/gtest.h" + +// Several different test cases and tests that will be listed. +TEST(Foo, Bar1) { +} + +TEST(Foo, Bar2) { +} + +TEST(Foo, DISABLED_Bar3) { +} + +TEST(Abc, Xyz) { +} + +TEST(Abc, Def) { +} + +TEST(FooBar, Baz) { +} + +class FooTest : public testing::Test { +}; + +TEST_F(FooTest, Test1) { +} + +TEST_F(FooTest, DISABLED_Test2) { +} + +TEST_F(FooTest, Test3) { +} + +TEST(FooDeathTest, Test1) { +} + +// A group of value-parameterized tests. + +class MyType { + public: + explicit MyType(const std::string& a_value) : value_(a_value) {} + + const std::string& value() const { return value_; } + + private: + std::string value_; +}; + +// Teaches Google Test how to print a MyType. +void PrintTo(const MyType& x, std::ostream* os) { + *os << x.value(); +} + +class ValueParamTest : public testing::TestWithParam<MyType> { +}; + +TEST_P(ValueParamTest, TestA) { +} + +TEST_P(ValueParamTest, TestB) { +} + +INSTANTIATE_TEST_SUITE_P( + MyInstantiation, ValueParamTest, + testing::Values(MyType("one line"), + MyType("two\nlines"), + MyType("a very\nloooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong line"))); // NOLINT + +// A group of typed tests. + +// A deliberately long type name for testing the line-truncating +// behavior when printing a type parameter. +class VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName { // NOLINT +}; + +template <typename T> +class TypedTest : public testing::Test { +}; + +template <typename T, int kSize> +class MyArray { +}; + +typedef testing::Types<VeryLoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooogName, // NOLINT + int*, MyArray<bool, 42> > MyTypes; + +TYPED_TEST_SUITE(TypedTest, MyTypes); + +TYPED_TEST(TypedTest, TestA) { +} + +TYPED_TEST(TypedTest, TestB) { +} + +// A group of type-parameterized tests. + +template <typename T> +class TypeParamTest : public testing::Test { +}; + +TYPED_TEST_SUITE_P(TypeParamTest); + +TYPED_TEST_P(TypeParamTest, TestA) { +} + +TYPED_TEST_P(TypeParamTest, TestB) { +} + +REGISTER_TYPED_TEST_SUITE_P(TypeParamTest, TestA, TestB); + +INSTANTIATE_TYPED_TEST_SUITE_P(My, TypeParamTest, MyTypes); + +int main(int argc, char **argv) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-listener-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-listener-test.cc new file mode 100644 index 0000000000..10457afe39 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-listener-test.cc @@ -0,0 +1,518 @@ +// Copyright 2009 Google Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// The Google C++ Testing and Mocking Framework (Google Test) +// +// This file verifies Google Test event listeners receive events at the +// right times. + +#include <vector> + +#include "gtest/gtest.h" +#include "gtest/internal/custom/gtest.h" + +using ::testing::AddGlobalTestEnvironment; +using ::testing::Environment; +using ::testing::InitGoogleTest; +using ::testing::Test; +using ::testing::TestSuite; +using ::testing::TestEventListener; +using ::testing::TestInfo; +using ::testing::TestPartResult; +using ::testing::UnitTest; + +// Used by tests to register their events. +std::vector<std::string>* g_events = nullptr; + +namespace testing { +namespace internal { + +class EventRecordingListener : public TestEventListener { + public: + explicit EventRecordingListener(const char* name) : name_(name) {} + + protected: + void OnTestProgramStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnTestProgramStart")); + } + + void OnTestIterationStart(const UnitTest& /*unit_test*/, + int iteration) override { + Message message; + message << GetFullMethodName("OnTestIterationStart") + << "(" << iteration << ")"; + g_events->push_back(message.GetString()); + } + + void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpStart")); + } + + void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd")); + } +#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + void OnTestCaseStart(const TestCase& /*test_case*/) override { + g_events->push_back(GetFullMethodName("OnTestCaseStart")); + } +#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + + void OnTestStart(const TestInfo& /*test_info*/) override { + g_events->push_back(GetFullMethodName("OnTestStart")); + } + + void OnTestPartResult(const TestPartResult& /*test_part_result*/) override { + g_events->push_back(GetFullMethodName("OnTestPartResult")); + } + + void OnTestEnd(const TestInfo& /*test_info*/) override { + g_events->push_back(GetFullMethodName("OnTestEnd")); + } + +#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + void OnTestCaseEnd(const TestCase& /*test_case*/) override { + g_events->push_back(GetFullMethodName("OnTestCaseEnd")); + } +#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + + void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart")); + } + + void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownEnd")); + } + + void OnTestIterationEnd(const UnitTest& /*unit_test*/, + int iteration) override { + Message message; + message << GetFullMethodName("OnTestIterationEnd") + << "(" << iteration << ")"; + g_events->push_back(message.GetString()); + } + + void OnTestProgramEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnTestProgramEnd")); + } + + private: + std::string GetFullMethodName(const char* name) { + return name_ + "." + name; + } + + std::string name_; +}; + +// This listener is using OnTestSuiteStart, OnTestSuiteEnd API +class EventRecordingListener2 : public TestEventListener { + public: + explicit EventRecordingListener2(const char* name) : name_(name) {} + + protected: + void OnTestProgramStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnTestProgramStart")); + } + + void OnTestIterationStart(const UnitTest& /*unit_test*/, + int iteration) override { + Message message; + message << GetFullMethodName("OnTestIterationStart") << "(" << iteration + << ")"; + g_events->push_back(message.GetString()); + } + + void OnEnvironmentsSetUpStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpStart")); + } + + void OnEnvironmentsSetUpEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsSetUpEnd")); + } + + void OnTestSuiteStart(const TestSuite& /*test_suite*/) override { + g_events->push_back(GetFullMethodName("OnTestSuiteStart")); + } + + void OnTestStart(const TestInfo& /*test_info*/) override { + g_events->push_back(GetFullMethodName("OnTestStart")); + } + + void OnTestPartResult(const TestPartResult& /*test_part_result*/) override { + g_events->push_back(GetFullMethodName("OnTestPartResult")); + } + + void OnTestEnd(const TestInfo& /*test_info*/) override { + g_events->push_back(GetFullMethodName("OnTestEnd")); + } + + void OnTestSuiteEnd(const TestSuite& /*test_suite*/) override { + g_events->push_back(GetFullMethodName("OnTestSuiteEnd")); + } + + void OnEnvironmentsTearDownStart(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownStart")); + } + + void OnEnvironmentsTearDownEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnEnvironmentsTearDownEnd")); + } + + void OnTestIterationEnd(const UnitTest& /*unit_test*/, + int iteration) override { + Message message; + message << GetFullMethodName("OnTestIterationEnd") << "(" << iteration + << ")"; + g_events->push_back(message.GetString()); + } + + void OnTestProgramEnd(const UnitTest& /*unit_test*/) override { + g_events->push_back(GetFullMethodName("OnTestProgramEnd")); + } + + private: + std::string GetFullMethodName(const char* name) { return name_ + "." + name; } + + std::string name_; +}; + +class EnvironmentInvocationCatcher : public Environment { + protected: + void SetUp() override { g_events->push_back("Environment::SetUp"); } + + void TearDown() override { g_events->push_back("Environment::TearDown"); } +}; + +class ListenerTest : public Test { + protected: + static void SetUpTestSuite() { + g_events->push_back("ListenerTest::SetUpTestSuite"); + } + + static void TearDownTestSuite() { + g_events->push_back("ListenerTest::TearDownTestSuite"); + } + + void SetUp() override { g_events->push_back("ListenerTest::SetUp"); } + + void TearDown() override { g_events->push_back("ListenerTest::TearDown"); } +}; + +TEST_F(ListenerTest, DoesFoo) { + // Test execution order within a test case is not guaranteed so we are not + // recording the test name. + g_events->push_back("ListenerTest::* Test Body"); + SUCCEED(); // Triggers OnTestPartResult. +} + +TEST_F(ListenerTest, DoesBar) { + g_events->push_back("ListenerTest::* Test Body"); + SUCCEED(); // Triggers OnTestPartResult. +} + +} // namespace internal + +} // namespace testing + +using ::testing::internal::EnvironmentInvocationCatcher; +using ::testing::internal::EventRecordingListener; +using ::testing::internal::EventRecordingListener2; + +void VerifyResults(const std::vector<std::string>& data, + const char* const* expected_data, + size_t expected_data_size) { + const size_t actual_size = data.size(); + // If the following assertion fails, a new entry will be appended to + // data. Hence we save data.size() first. + EXPECT_EQ(expected_data_size, actual_size); + + // Compares the common prefix. + const size_t shorter_size = expected_data_size <= actual_size ? + expected_data_size : actual_size; + size_t i = 0; + for (; i < shorter_size; ++i) { + ASSERT_STREQ(expected_data[i], data[i].c_str()) + << "at position " << i; + } + + // Prints extra elements in the actual data. + for (; i < actual_size; ++i) { + printf(" Actual event #%lu: %s\n", + static_cast<unsigned long>(i), data[i].c_str()); + } +} + +int main(int argc, char **argv) { + std::vector<std::string> events; + g_events = &events; + InitGoogleTest(&argc, argv); + + UnitTest::GetInstance()->listeners().Append( + new EventRecordingListener("1st")); + UnitTest::GetInstance()->listeners().Append( + new EventRecordingListener("2nd")); + UnitTest::GetInstance()->listeners().Append( + new EventRecordingListener2("3rd")); + + AddGlobalTestEnvironment(new EnvironmentInvocationCatcher); + + GTEST_CHECK_(events.size() == 0) + << "AddGlobalTestEnvironment should not generate any events itself."; + + ::testing::GTEST_FLAG(repeat) = 2; + int ret_val = RUN_ALL_TESTS(); + +#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + + // The deprecated OnTestSuiteStart/OnTestCaseStart events are included + const char* const expected_events[] = {"1st.OnTestProgramStart", + "2nd.OnTestProgramStart", + "3rd.OnTestProgramStart", + "1st.OnTestIterationStart(0)", + "2nd.OnTestIterationStart(0)", + "3rd.OnTestIterationStart(0)", + "1st.OnEnvironmentsSetUpStart", + "2nd.OnEnvironmentsSetUpStart", + "3rd.OnEnvironmentsSetUpStart", + "Environment::SetUp", + "3rd.OnEnvironmentsSetUpEnd", + "2nd.OnEnvironmentsSetUpEnd", + "1st.OnEnvironmentsSetUpEnd", + "3rd.OnTestSuiteStart", + "1st.OnTestCaseStart", + "2nd.OnTestCaseStart", + "ListenerTest::SetUpTestSuite", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "ListenerTest::TearDownTestSuite", + "3rd.OnTestSuiteEnd", + "2nd.OnTestCaseEnd", + "1st.OnTestCaseEnd", + "1st.OnEnvironmentsTearDownStart", + "2nd.OnEnvironmentsTearDownStart", + "3rd.OnEnvironmentsTearDownStart", + "Environment::TearDown", + "3rd.OnEnvironmentsTearDownEnd", + "2nd.OnEnvironmentsTearDownEnd", + "1st.OnEnvironmentsTearDownEnd", + "3rd.OnTestIterationEnd(0)", + "2nd.OnTestIterationEnd(0)", + "1st.OnTestIterationEnd(0)", + "1st.OnTestIterationStart(1)", + "2nd.OnTestIterationStart(1)", + "3rd.OnTestIterationStart(1)", + "1st.OnEnvironmentsSetUpStart", + "2nd.OnEnvironmentsSetUpStart", + "3rd.OnEnvironmentsSetUpStart", + "Environment::SetUp", + "3rd.OnEnvironmentsSetUpEnd", + "2nd.OnEnvironmentsSetUpEnd", + "1st.OnEnvironmentsSetUpEnd", + "3rd.OnTestSuiteStart", + "1st.OnTestCaseStart", + "2nd.OnTestCaseStart", + "ListenerTest::SetUpTestSuite", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "ListenerTest::TearDownTestSuite", + "3rd.OnTestSuiteEnd", + "2nd.OnTestCaseEnd", + "1st.OnTestCaseEnd", + "1st.OnEnvironmentsTearDownStart", + "2nd.OnEnvironmentsTearDownStart", + "3rd.OnEnvironmentsTearDownStart", + "Environment::TearDown", + "3rd.OnEnvironmentsTearDownEnd", + "2nd.OnEnvironmentsTearDownEnd", + "1st.OnEnvironmentsTearDownEnd", + "3rd.OnTestIterationEnd(1)", + "2nd.OnTestIterationEnd(1)", + "1st.OnTestIterationEnd(1)", + "3rd.OnTestProgramEnd", + "2nd.OnTestProgramEnd", + "1st.OnTestProgramEnd"}; +#else + const char* const expected_events[] = {"1st.OnTestProgramStart", + "2nd.OnTestProgramStart", + "3rd.OnTestProgramStart", + "1st.OnTestIterationStart(0)", + "2nd.OnTestIterationStart(0)", + "3rd.OnTestIterationStart(0)", + "1st.OnEnvironmentsSetUpStart", + "2nd.OnEnvironmentsSetUpStart", + "3rd.OnEnvironmentsSetUpStart", + "Environment::SetUp", + "3rd.OnEnvironmentsSetUpEnd", + "2nd.OnEnvironmentsSetUpEnd", + "1st.OnEnvironmentsSetUpEnd", + "3rd.OnTestSuiteStart", + "ListenerTest::SetUpTestSuite", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "ListenerTest::TearDownTestSuite", + "3rd.OnTestSuiteEnd", + "1st.OnEnvironmentsTearDownStart", + "2nd.OnEnvironmentsTearDownStart", + "3rd.OnEnvironmentsTearDownStart", + "Environment::TearDown", + "3rd.OnEnvironmentsTearDownEnd", + "2nd.OnEnvironmentsTearDownEnd", + "1st.OnEnvironmentsTearDownEnd", + "3rd.OnTestIterationEnd(0)", + "2nd.OnTestIterationEnd(0)", + "1st.OnTestIterationEnd(0)", + "1st.OnTestIterationStart(1)", + "2nd.OnTestIterationStart(1)", + "3rd.OnTestIterationStart(1)", + "1st.OnEnvironmentsSetUpStart", + "2nd.OnEnvironmentsSetUpStart", + "3rd.OnEnvironmentsSetUpStart", + "Environment::SetUp", + "3rd.OnEnvironmentsSetUpEnd", + "2nd.OnEnvironmentsSetUpEnd", + "1st.OnEnvironmentsSetUpEnd", + "3rd.OnTestSuiteStart", + "ListenerTest::SetUpTestSuite", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "1st.OnTestStart", + "2nd.OnTestStart", + "3rd.OnTestStart", + "ListenerTest::SetUp", + "ListenerTest::* Test Body", + "1st.OnTestPartResult", + "2nd.OnTestPartResult", + "3rd.OnTestPartResult", + "ListenerTest::TearDown", + "3rd.OnTestEnd", + "2nd.OnTestEnd", + "1st.OnTestEnd", + "ListenerTest::TearDownTestSuite", + "3rd.OnTestSuiteEnd", + "1st.OnEnvironmentsTearDownStart", + "2nd.OnEnvironmentsTearDownStart", + "3rd.OnEnvironmentsTearDownStart", + "Environment::TearDown", + "3rd.OnEnvironmentsTearDownEnd", + "2nd.OnEnvironmentsTearDownEnd", + "1st.OnEnvironmentsTearDownEnd", + "3rd.OnTestIterationEnd(1)", + "2nd.OnTestIterationEnd(1)", + "1st.OnTestIterationEnd(1)", + "3rd.OnTestProgramEnd", + "2nd.OnTestProgramEnd", + "1st.OnTestProgramEnd"}; +#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + + VerifyResults(events, + expected_events, + sizeof(expected_events)/sizeof(expected_events[0])); + + // We need to check manually for ad hoc test failures that happen after + // RUN_ALL_TESTS finishes. + if (UnitTest::GetInstance()->Failed()) + ret_val = 1; + + return ret_val; +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-message-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-message-test.cc new file mode 100644 index 0000000000..962d519114 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-message-test.cc @@ -0,0 +1,158 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for the Message class. + +#include "gtest/gtest-message.h" + +#include "gtest/gtest.h" + +namespace { + +using ::testing::Message; + +// Tests the testing::Message class + +// Tests the default constructor. +TEST(MessageTest, DefaultConstructor) { + const Message msg; + EXPECT_EQ("", msg.GetString()); +} + +// Tests the copy constructor. +TEST(MessageTest, CopyConstructor) { + const Message msg1("Hello"); + const Message msg2(msg1); + EXPECT_EQ("Hello", msg2.GetString()); +} + +// Tests constructing a Message from a C-string. +TEST(MessageTest, ConstructsFromCString) { + Message msg("Hello"); + EXPECT_EQ("Hello", msg.GetString()); +} + +// Tests streaming a float. +TEST(MessageTest, StreamsFloat) { + const std::string s = (Message() << 1.23456F << " " << 2.34567F).GetString(); + // Both numbers should be printed with enough precision. + EXPECT_PRED_FORMAT2(testing::IsSubstring, "1.234560", s.c_str()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, " 2.345669", s.c_str()); +} + +// Tests streaming a double. +TEST(MessageTest, StreamsDouble) { + const std::string s = (Message() << 1260570880.4555497 << " " + << 1260572265.1954534).GetString(); + // Both numbers should be printed with enough precision. + EXPECT_PRED_FORMAT2(testing::IsSubstring, "1260570880.45", s.c_str()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, " 1260572265.19", s.c_str()); +} + +// Tests streaming a non-char pointer. +TEST(MessageTest, StreamsPointer) { + int n = 0; + int* p = &n; + EXPECT_NE("(null)", (Message() << p).GetString()); +} + +// Tests streaming a NULL non-char pointer. +TEST(MessageTest, StreamsNullPointer) { + int* p = nullptr; + EXPECT_EQ("(null)", (Message() << p).GetString()); +} + +// Tests streaming a C string. +TEST(MessageTest, StreamsCString) { + EXPECT_EQ("Foo", (Message() << "Foo").GetString()); +} + +// Tests streaming a NULL C string. +TEST(MessageTest, StreamsNullCString) { + char* p = nullptr; + EXPECT_EQ("(null)", (Message() << p).GetString()); +} + +// Tests streaming std::string. +TEST(MessageTest, StreamsString) { + const ::std::string str("Hello"); + EXPECT_EQ("Hello", (Message() << str).GetString()); +} + +// Tests that we can output strings containing embedded NULs. +TEST(MessageTest, StreamsStringWithEmbeddedNUL) { + const char char_array_with_nul[] = + "Here's a NUL\0 and some more string"; + const ::std::string string_with_nul(char_array_with_nul, + sizeof(char_array_with_nul) - 1); + EXPECT_EQ("Here's a NUL\\0 and some more string", + (Message() << string_with_nul).GetString()); +} + +// Tests streaming a NUL char. +TEST(MessageTest, StreamsNULChar) { + EXPECT_EQ("\\0", (Message() << '\0').GetString()); +} + +// Tests streaming int. +TEST(MessageTest, StreamsInt) { + EXPECT_EQ("123", (Message() << 123).GetString()); +} + +// Tests that basic IO manipulators (endl, ends, and flush) can be +// streamed to Message. +TEST(MessageTest, StreamsBasicIoManip) { + EXPECT_EQ("Line 1.\nA NUL char \\0 in line 2.", + (Message() << "Line 1." << std::endl + << "A NUL char " << std::ends << std::flush + << " in line 2.").GetString()); +} + +// Tests Message::GetString() +TEST(MessageTest, GetString) { + Message msg; + msg << 1 << " lamb"; + EXPECT_EQ("1 lamb", msg.GetString()); +} + +// Tests streaming a Message object to an ostream. +TEST(MessageTest, StreamsToOStream) { + Message msg("Hello"); + ::std::stringstream ss; + ss << msg; + EXPECT_EQ("Hello", testing::internal::StringStreamToString(&ss)); +} + +// Tests that a Message object doesn't take up too much stack space. +TEST(MessageTest, DoesNotTakeUpMuchStackSpace) { + EXPECT_LE(sizeof(Message), 16U); +} + +} // namespace diff --git a/security/nss/gtests/google_test/gtest/test/googletest-options-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-options-test.cc new file mode 100644 index 0000000000..11fb1f2246 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-options-test.cc @@ -0,0 +1,219 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Google Test UnitTestOptions tests +// +// This file tests classes and functions used internally by +// Google Test. They are subject to change without notice. +// +// This file is #included from gtest.cc, to avoid changing build or +// make-files on Windows and other platforms. Do not #include this file +// anywhere else! + +#include "gtest/gtest.h" + +#if GTEST_OS_WINDOWS_MOBILE +# include <windows.h> +#elif GTEST_OS_WINDOWS +# include <direct.h> +#elif GTEST_OS_OS2 +// For strcasecmp on OS/2 +#include <strings.h> +#endif // GTEST_OS_WINDOWS_MOBILE + +#include "src/gtest-internal-inl.h" + +namespace testing { +namespace internal { +namespace { + +// Turns the given relative path into an absolute path. +FilePath GetAbsolutePathOf(const FilePath& relative_path) { + return FilePath::ConcatPaths(FilePath::GetCurrentDir(), relative_path); +} + +// Testing UnitTestOptions::GetOutputFormat/GetOutputFile. + +TEST(XmlOutputTest, GetOutputFormatDefault) { + GTEST_FLAG(output) = ""; + EXPECT_STREQ("", UnitTestOptions::GetOutputFormat().c_str()); +} + +TEST(XmlOutputTest, GetOutputFormat) { + GTEST_FLAG(output) = "xml:filename"; + EXPECT_STREQ("xml", UnitTestOptions::GetOutputFormat().c_str()); +} + +TEST(XmlOutputTest, GetOutputFileDefault) { + GTEST_FLAG(output) = ""; + EXPECT_EQ(GetAbsolutePathOf(FilePath("test_detail.xml")).string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +} + +TEST(XmlOutputTest, GetOutputFileSingleFile) { + GTEST_FLAG(output) = "xml:filename.abc"; + EXPECT_EQ(GetAbsolutePathOf(FilePath("filename.abc")).string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +} + +TEST(XmlOutputTest, GetOutputFileFromDirectoryPath) { + GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_; + const std::string expected_output_file = + GetAbsolutePathOf( + FilePath(std::string("path") + GTEST_PATH_SEP_ + + GetCurrentExecutableName().string() + ".xml")).string(); + const std::string& output_file = + UnitTestOptions::GetAbsolutePathToOutputFile(); +#if GTEST_OS_WINDOWS + EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str()); +#else + EXPECT_EQ(expected_output_file, output_file.c_str()); +#endif +} + +TEST(OutputFileHelpersTest, GetCurrentExecutableName) { + const std::string exe_str = GetCurrentExecutableName().string(); +#if GTEST_OS_WINDOWS + const bool success = + _strcmpi("googletest-options-test", exe_str.c_str()) == 0 || + _strcmpi("gtest-options-ex_test", exe_str.c_str()) == 0 || + _strcmpi("gtest_all_test", exe_str.c_str()) == 0 || + _strcmpi("gtest_dll_test", exe_str.c_str()) == 0; +#elif GTEST_OS_OS2 + const bool success = + strcasecmp("googletest-options-test", exe_str.c_str()) == 0 || + strcasecmp("gtest-options-ex_test", exe_str.c_str()) == 0 || + strcasecmp("gtest_all_test", exe_str.c_str()) == 0 || + strcasecmp("gtest_dll_test", exe_str.c_str()) == 0; +#elif GTEST_OS_FUCHSIA + const bool success = exe_str == "app"; +#else + const bool success = + exe_str == "googletest-options-test" || + exe_str == "gtest_all_test" || + exe_str == "lt-gtest_all_test" || + exe_str == "gtest_dll_test"; +#endif // GTEST_OS_WINDOWS + if (!success) + FAIL() << "GetCurrentExecutableName() returns " << exe_str; +} + +#if !GTEST_OS_FUCHSIA + +class XmlOutputChangeDirTest : public Test { + protected: + void SetUp() override { + original_working_dir_ = FilePath::GetCurrentDir(); + posix::ChDir(".."); + // This will make the test fail if run from the root directory. + EXPECT_NE(original_working_dir_.string(), + FilePath::GetCurrentDir().string()); + } + + void TearDown() override { + posix::ChDir(original_working_dir_.string().c_str()); + } + + FilePath original_working_dir_; +}; + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefault) { + GTEST_FLAG(output) = ""; + EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_, + FilePath("test_detail.xml")).string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +} + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithDefaultXML) { + GTEST_FLAG(output) = "xml"; + EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_, + FilePath("test_detail.xml")).string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +} + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativeFile) { + GTEST_FLAG(output) = "xml:filename.abc"; + EXPECT_EQ(FilePath::ConcatPaths(original_working_dir_, + FilePath("filename.abc")).string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +} + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithRelativePath) { + GTEST_FLAG(output) = "xml:path" GTEST_PATH_SEP_; + const std::string expected_output_file = + FilePath::ConcatPaths( + original_working_dir_, + FilePath(std::string("path") + GTEST_PATH_SEP_ + + GetCurrentExecutableName().string() + ".xml")).string(); + const std::string& output_file = + UnitTestOptions::GetAbsolutePathToOutputFile(); +#if GTEST_OS_WINDOWS + EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str()); +#else + EXPECT_EQ(expected_output_file, output_file.c_str()); +#endif +} + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsoluteFile) { +#if GTEST_OS_WINDOWS + GTEST_FLAG(output) = "xml:c:\\tmp\\filename.abc"; + EXPECT_EQ(FilePath("c:\\tmp\\filename.abc").string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +#else + GTEST_FLAG(output) ="xml:/tmp/filename.abc"; + EXPECT_EQ(FilePath("/tmp/filename.abc").string(), + UnitTestOptions::GetAbsolutePathToOutputFile()); +#endif +} + +TEST_F(XmlOutputChangeDirTest, PreserveOriginalWorkingDirWithAbsolutePath) { +#if GTEST_OS_WINDOWS + const std::string path = "c:\\tmp\\"; +#else + const std::string path = "/tmp/"; +#endif + + GTEST_FLAG(output) = "xml:" + path; + const std::string expected_output_file = + path + GetCurrentExecutableName().string() + ".xml"; + const std::string& output_file = + UnitTestOptions::GetAbsolutePathToOutputFile(); + +#if GTEST_OS_WINDOWS + EXPECT_STRCASEEQ(expected_output_file.c_str(), output_file.c_str()); +#else + EXPECT_EQ(expected_output_file, output_file.c_str()); +#endif +} + +#endif // !GTEST_OS_FUCHSIA + +} // namespace +} // namespace internal +} // namespace testing diff --git a/security/nss/gtests/google_test/gtest/test/googletest-output-test-golden-lin.txt b/security/nss/gtests/google_test/gtest/test/googletest-output-test-golden-lin.txt new file mode 100644 index 0000000000..3fab3b97d8 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-output-test-golden-lin.txt @@ -0,0 +1,1180 @@ +The non-test part of the code is expected to have 2 failures. + +googletest-output-test_.cc:#: Failure +Value of: false + Actual: false +Expected: true +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 2 + 3 +Stack trace: (omitted) + +[0;32m[==========] [mRunning 88 tests from 41 test suites. +[0;32m[----------] [mGlobal test environment set-up. +FooEnvironment::SetUp() called. +BarEnvironment::SetUp() called. +[0;32m[----------] [m1 test from ADeathTest +[0;32m[ RUN ] [mADeathTest.ShouldRunFirst +[0;32m[ OK ] [mADeathTest.ShouldRunFirst +[0;32m[----------] [m1 test from ATypedDeathTest/0, where TypeParam = int +[0;32m[ RUN ] [mATypedDeathTest/0.ShouldRunFirst +[0;32m[ OK ] [mATypedDeathTest/0.ShouldRunFirst +[0;32m[----------] [m1 test from ATypedDeathTest/1, where TypeParam = double +[0;32m[ RUN ] [mATypedDeathTest/1.ShouldRunFirst +[0;32m[ OK ] [mATypedDeathTest/1.ShouldRunFirst +[0;32m[----------] [m1 test from My/ATypeParamDeathTest/0, where TypeParam = int +[0;32m[ RUN ] [mMy/ATypeParamDeathTest/0.ShouldRunFirst +[0;32m[ OK ] [mMy/ATypeParamDeathTest/0.ShouldRunFirst +[0;32m[----------] [m1 test from My/ATypeParamDeathTest/1, where TypeParam = double +[0;32m[ RUN ] [mMy/ATypeParamDeathTest/1.ShouldRunFirst +[0;32m[ OK ] [mMy/ATypeParamDeathTest/1.ShouldRunFirst +[0;32m[----------] [m2 tests from PassingTest +[0;32m[ RUN ] [mPassingTest.PassingTest1 +[0;32m[ OK ] [mPassingTest.PassingTest1 +[0;32m[ RUN ] [mPassingTest.PassingTest2 +[0;32m[ OK ] [mPassingTest.PassingTest2 +[0;32m[----------] [m2 tests from NonfatalFailureTest +[0;32m[ RUN ] [mNonfatalFailureTest.EscapesStringOperands +googletest-output-test_.cc:#: Failure +Expected equality of these values: + kGoldenString + Which is: "\"Line" + actual + Which is: "actual \"string\"" +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Expected equality of these values: + golden + Which is: "\"Line" + actual + Which is: "actual \"string\"" +Stack trace: (omitted) + +[0;31m[ FAILED ] [mNonfatalFailureTest.EscapesStringOperands +[0;32m[ RUN ] [mNonfatalFailureTest.DiffForLongStrings +googletest-output-test_.cc:#: Failure +Expected equality of these values: + golden_str + Which is: "\"Line\0 1\"\nLine 2" + "Line 2" +With diff: +@@ -1,2 @@ +-\"Line\0 1\" + Line 2 + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mNonfatalFailureTest.DiffForLongStrings +[0;32m[----------] [m3 tests from FatalFailureTest +[0;32m[ RUN ] [mFatalFailureTest.FatalFailureInSubroutine +(expecting a failure that x should be 1) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + x + Which is: 2 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mFatalFailureTest.FatalFailureInSubroutine +[0;32m[ RUN ] [mFatalFailureTest.FatalFailureInNestedSubroutine +(expecting a failure that x should be 1) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + x + Which is: 2 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mFatalFailureTest.FatalFailureInNestedSubroutine +[0;32m[ RUN ] [mFatalFailureTest.NonfatalFailureInSubroutine +(expecting a failure on false) +googletest-output-test_.cc:#: Failure +Value of: false + Actual: false +Expected: true +Stack trace: (omitted) + +[0;31m[ FAILED ] [mFatalFailureTest.NonfatalFailureInSubroutine +[0;32m[----------] [m1 test from LoggingTest +[0;32m[ RUN ] [mLoggingTest.InterleavingLoggingAndAssertions +(expecting 2 failures on (3) >= (a[i])) +i == 0 +i == 1 +googletest-output-test_.cc:#: Failure +Expected: (3) >= (a[i]), actual: 3 vs 9 +Stack trace: (omitted) + +i == 2 +i == 3 +googletest-output-test_.cc:#: Failure +Expected: (3) >= (a[i]), actual: 3 vs 6 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mLoggingTest.InterleavingLoggingAndAssertions +[0;32m[----------] [m7 tests from SCOPED_TRACETest +[0;32m[ RUN ] [mSCOPED_TRACETest.AcceptedValues +googletest-output-test_.cc:#: Failure +Failed +Just checking that all these values work fine. +Google Test trace: +googletest-output-test_.cc:#: (null) +googletest-output-test_.cc:#: 1337 +googletest-output-test_.cc:#: std::string +googletest-output-test_.cc:#: literal string +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.AcceptedValues +[0;32m[ RUN ] [mSCOPED_TRACETest.ObeysScopes +(expected to fail) +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and shouldn't have a trace. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and should have a trace. +Google Test trace: +googletest-output-test_.cc:#: Expected trace +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and shouldn't have a trace. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.ObeysScopes +[0;32m[ RUN ] [mSCOPED_TRACETest.WorksInLoop +(expected to fail) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 2 + n + Which is: 1 +Google Test trace: +googletest-output-test_.cc:#: i = 1 +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + n + Which is: 2 +Google Test trace: +googletest-output-test_.cc:#: i = 2 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksInLoop +[0;32m[ RUN ] [mSCOPED_TRACETest.WorksInSubroutine +(expected to fail) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 2 + n + Which is: 1 +Google Test trace: +googletest-output-test_.cc:#: n = 1 +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + n + Which is: 2 +Google Test trace: +googletest-output-test_.cc:#: n = 2 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksInSubroutine +[0;32m[ RUN ] [mSCOPED_TRACETest.CanBeNested +(expected to fail) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + n + Which is: 2 +Google Test trace: +googletest-output-test_.cc:#: n = 2 +googletest-output-test_.cc:#: +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.CanBeNested +[0;32m[ RUN ] [mSCOPED_TRACETest.CanBeRepeated +(expected to fail) +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and should contain trace point A. +Google Test trace: +googletest-output-test_.cc:#: A +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and should contain trace point A and B. +Google Test trace: +googletest-output-test_.cc:#: B +googletest-output-test_.cc:#: A +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and should contain trace point A, B, and C. +Google Test trace: +googletest-output-test_.cc:#: C +googletest-output-test_.cc:#: B +googletest-output-test_.cc:#: A +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +This failure is expected, and should contain trace point A, B, and D. +Google Test trace: +googletest-output-test_.cc:#: D +googletest-output-test_.cc:#: B +googletest-output-test_.cc:#: A +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.CanBeRepeated +[0;32m[ RUN ] [mSCOPED_TRACETest.WorksConcurrently +(expecting 6 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected failure #1 (in thread B, only trace B alive). +Google Test trace: +googletest-output-test_.cc:#: Trace B +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #2 (in thread A, trace A & B both alive). +Google Test trace: +googletest-output-test_.cc:#: Trace A +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #3 (in thread B, trace A & B both alive). +Google Test trace: +googletest-output-test_.cc:#: Trace B +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #4 (in thread B, only trace A alive). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #5 (in thread A, only trace A alive). +Google Test trace: +googletest-output-test_.cc:#: Trace A +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #6 (in thread A, no trace alive). +Stack trace: (omitted) + +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksConcurrently +[0;32m[----------] [m1 test from ScopedTraceTest +[0;32m[ RUN ] [mScopedTraceTest.WithExplicitFileAndLine +googletest-output-test_.cc:#: Failure +Failed +Check that the trace is attached to a particular location. +Google Test trace: +explicit_file.cc:123: expected trace message +Stack trace: (omitted) + +[0;31m[ FAILED ] [mScopedTraceTest.WithExplicitFileAndLine +[0;32m[----------] [m1 test from NonFatalFailureInFixtureConstructorTest +[0;32m[ RUN ] [mNonFatalFailureInFixtureConstructorTest.FailureInConstructor +(expecting 5 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected failure #1, in the test fixture c'tor. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #2, in SetUp(). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #3, in the test body. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #4, in TearDown. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #5, in the test fixture d'tor. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mNonFatalFailureInFixtureConstructorTest.FailureInConstructor +[0;32m[----------] [m1 test from FatalFailureInFixtureConstructorTest +[0;32m[ RUN ] [mFatalFailureInFixtureConstructorTest.FailureInConstructor +(expecting 2 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected failure #1, in the test fixture c'tor. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #2, in the test fixture d'tor. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mFatalFailureInFixtureConstructorTest.FailureInConstructor +[0;32m[----------] [m1 test from NonFatalFailureInSetUpTest +[0;32m[ RUN ] [mNonFatalFailureInSetUpTest.FailureInSetUp +(expecting 4 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected failure #1, in SetUp(). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #2, in the test function. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #3, in TearDown(). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #4, in the test fixture d'tor. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mNonFatalFailureInSetUpTest.FailureInSetUp +[0;32m[----------] [m1 test from FatalFailureInSetUpTest +[0;32m[ RUN ] [mFatalFailureInSetUpTest.FailureInSetUp +(expecting 3 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected failure #1, in SetUp(). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #2, in TearDown(). +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected failure #3, in the test fixture d'tor. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mFatalFailureInSetUpTest.FailureInSetUp +[0;32m[----------] [m1 test from AddFailureAtTest +[0;32m[ RUN ] [mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber +foo.cc:42: Failure +Failed +Expected nonfatal failure in foo.cc +Stack trace: (omitted) + +[0;31m[ FAILED ] [mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber +[0;32m[----------] [m1 test from GtestFailAtTest +[0;32m[ RUN ] [mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber +foo.cc:42: Failure +Failed +Expected fatal failure in foo.cc +Stack trace: (omitted) + +[0;31m[ FAILED ] [mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber +[0;32m[----------] [m4 tests from MixedUpTestSuiteTest +[0;32m[ RUN ] [mMixedUpTestSuiteTest.FirstTestFromNamespaceFoo +[0;32m[ OK ] [mMixedUpTestSuiteTest.FirstTestFromNamespaceFoo +[0;32m[ RUN ] [mMixedUpTestSuiteTest.SecondTestFromNamespaceFoo +[0;32m[ OK ] [mMixedUpTestSuiteTest.SecondTestFromNamespaceFoo +[0;32m[ RUN ] [mMixedUpTestSuiteTest.ThisShouldFail +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class. However, in test suite MixedUpTestSuiteTest, +you defined test FirstTestFromNamespaceFoo and test ThisShouldFail +using two different test fixture classes. This can happen if +the two classes are from different namespaces or translation +units and have the same name. You should probably rename one +of the classes to put the tests into different test suites. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mMixedUpTestSuiteTest.ThisShouldFail +[0;32m[ RUN ] [mMixedUpTestSuiteTest.ThisShouldFailToo +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class. However, in test suite MixedUpTestSuiteTest, +you defined test FirstTestFromNamespaceFoo and test ThisShouldFailToo +using two different test fixture classes. This can happen if +the two classes are from different namespaces or translation +units and have the same name. You should probably rename one +of the classes to put the tests into different test suites. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mMixedUpTestSuiteTest.ThisShouldFailToo +[0;32m[----------] [m2 tests from MixedUpTestSuiteWithSameTestNameTest +[0;32m[ RUN ] [mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail +[0;32m[ OK ] [mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail +[0;32m[ RUN ] [mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class. However, in test suite MixedUpTestSuiteWithSameTestNameTest, +you defined test TheSecondTestWithThisNameShouldFail and test TheSecondTestWithThisNameShouldFail +using two different test fixture classes. This can happen if +the two classes are from different namespaces or translation +units and have the same name. You should probably rename one +of the classes to put the tests into different test suites. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail +[0;32m[----------] [m2 tests from TEST_F_before_TEST_in_same_test_case +[0;32m[ RUN ] [mTEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F +[0;32m[ OK ] [mTEST_F_before_TEST_in_same_test_case.DefinedUsingTEST_F +[0;32m[ RUN ] [mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class, so mixing TEST_F and TEST in the same test suite is +illegal. In test suite TEST_F_before_TEST_in_same_test_case, +test DefinedUsingTEST_F is defined using TEST_F but +test DefinedUsingTESTAndShouldFail is defined using TEST. You probably +want to change the TEST to TEST_F or move it to another test +case. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail +[0;32m[----------] [m2 tests from TEST_before_TEST_F_in_same_test_case +[0;32m[ RUN ] [mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST +[0;32m[ OK ] [mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST +[0;32m[ RUN ] [mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class, so mixing TEST_F and TEST in the same test suite is +illegal. In test suite TEST_before_TEST_F_in_same_test_case, +test DefinedUsingTEST_FAndShouldFail is defined using TEST_F but +test DefinedUsingTEST is defined using TEST. You probably +want to change the TEST to TEST_F or move it to another test +case. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail +[0;32m[----------] [m8 tests from ExpectNonfatalFailureTest +[0;32m[ RUN ] [mExpectNonfatalFailureTest.CanReferenceGlobalVariables +[0;32m[ OK ] [mExpectNonfatalFailureTest.CanReferenceGlobalVariables +[0;32m[ RUN ] [mExpectNonfatalFailureTest.CanReferenceLocalVariables +[0;32m[ OK ] [mExpectNonfatalFailureTest.CanReferenceLocalVariables +[0;32m[ RUN ] [mExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure +[0;32m[ OK ] [mExpectNonfatalFailureTest.SucceedsWhenThereIsOneNonfatalFailure +[0;32m[ RUN ] [mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure +[0;32m[ RUN ] [mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: 2 failures +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure 1. +Stack trace: (omitted) + + +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure 2. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures +[0;32m[ RUN ] [mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure +[0;32m[ RUN ] [mExpectNonfatalFailureTest.FailsWhenStatementReturns +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenStatementReturns +[0;32m[ RUN ] [mExpectNonfatalFailureTest.FailsWhenStatementThrows +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenStatementThrows +[0;32m[----------] [m8 tests from ExpectFatalFailureTest +[0;32m[ RUN ] [mExpectFatalFailureTest.CanReferenceGlobalVariables +[0;32m[ OK ] [mExpectFatalFailureTest.CanReferenceGlobalVariables +[0;32m[ RUN ] [mExpectFatalFailureTest.CanReferenceLocalStaticVariables +[0;32m[ OK ] [mExpectFatalFailureTest.CanReferenceLocalStaticVariables +[0;32m[ RUN ] [mExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure +[0;32m[ OK ] [mExpectFatalFailureTest.SucceedsWhenThereIsOneFatalFailure +[0;32m[ RUN ] [mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure +[0;32m[ RUN ] [mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: 2 failures +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures +[0;32m[ RUN ] [mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure +[0;32m[ RUN ] [mExpectFatalFailureTest.FailsWhenStatementReturns +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementReturns +[0;32m[ RUN ] [mExpectFatalFailureTest.FailsWhenStatementThrows +(expecting a failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementThrows +[0;32m[----------] [m2 tests from TypedTest/0, where TypeParam = int +[0;32m[ RUN ] [mTypedTest/0.Success +[0;32m[ OK ] [mTypedTest/0.Success +[0;32m[ RUN ] [mTypedTest/0.Failure +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + TypeParam() + Which is: 0 +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mTypedTest/0.Failure, where TypeParam = int +[0;32m[----------] [m2 tests from TypedTestWithNames/char0, where TypeParam = char +[0;32m[ RUN ] [mTypedTestWithNames/char0.Success +[0;32m[ OK ] [mTypedTestWithNames/char0.Success +[0;32m[ RUN ] [mTypedTestWithNames/char0.Failure +googletest-output-test_.cc:#: Failure +Failed +Stack trace: (omitted) + +[0;31m[ FAILED ] [mTypedTestWithNames/char0.Failure, where TypeParam = char +[0;32m[----------] [m2 tests from TypedTestWithNames/int1, where TypeParam = int +[0;32m[ RUN ] [mTypedTestWithNames/int1.Success +[0;32m[ OK ] [mTypedTestWithNames/int1.Success +[0;32m[ RUN ] [mTypedTestWithNames/int1.Failure +googletest-output-test_.cc:#: Failure +Failed +Stack trace: (omitted) + +[0;31m[ FAILED ] [mTypedTestWithNames/int1.Failure, where TypeParam = int +[0;32m[----------] [m2 tests from Unsigned/TypedTestP/0, where TypeParam = unsigned char +[0;32m[ RUN ] [mUnsigned/TypedTestP/0.Success +[0;32m[ OK ] [mUnsigned/TypedTestP/0.Success +[0;32m[ RUN ] [mUnsigned/TypedTestP/0.Failure +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1U + Which is: 1 + TypeParam() + Which is: '\0' +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mUnsigned/TypedTestP/0.Failure, where TypeParam = unsigned char +[0;32m[----------] [m2 tests from Unsigned/TypedTestP/1, where TypeParam = unsigned int +[0;32m[ RUN ] [mUnsigned/TypedTestP/1.Success +[0;32m[ OK ] [mUnsigned/TypedTestP/1.Success +[0;32m[ RUN ] [mUnsigned/TypedTestP/1.Failure +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1U + Which is: 1 + TypeParam() + Which is: 0 +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mUnsigned/TypedTestP/1.Failure, where TypeParam = unsigned int +[0;32m[----------] [m2 tests from UnsignedCustomName/TypedTestP/unsignedChar0, where TypeParam = unsigned char +[0;32m[ RUN ] [mUnsignedCustomName/TypedTestP/unsignedChar0.Success +[0;32m[ OK ] [mUnsignedCustomName/TypedTestP/unsignedChar0.Success +[0;32m[ RUN ] [mUnsignedCustomName/TypedTestP/unsignedChar0.Failure +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1U + Which is: 1 + TypeParam() + Which is: '\0' +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mUnsignedCustomName/TypedTestP/unsignedChar0.Failure, where TypeParam = unsigned char +[0;32m[----------] [m2 tests from UnsignedCustomName/TypedTestP/unsignedInt1, where TypeParam = unsigned int +[0;32m[ RUN ] [mUnsignedCustomName/TypedTestP/unsignedInt1.Success +[0;32m[ OK ] [mUnsignedCustomName/TypedTestP/unsignedInt1.Success +[0;32m[ RUN ] [mUnsignedCustomName/TypedTestP/unsignedInt1.Failure +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1U + Which is: 1 + TypeParam() + Which is: 0 +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mUnsignedCustomName/TypedTestP/unsignedInt1.Failure, where TypeParam = unsigned int +[0;32m[----------] [m4 tests from ExpectFailureTest +[0;32m[ RUN ] [mExpectFailureTest.ExpectFatalFailure +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: +googletest-output-test_.cc:#: Success: +Succeeded +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure containing "Some other fatal failure expected." + Actual: +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureTest.ExpectFatalFailure +[0;32m[ RUN ] [mExpectFailureTest.ExpectNonFatalFailure +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: +googletest-output-test_.cc:#: Success: +Succeeded +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure containing "Some other non-fatal failure." + Actual: +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureTest.ExpectNonFatalFailure +[0;32m[ RUN ] [mExpectFailureTest.ExpectFatalFailureOnAllThreads +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: +googletest-output-test_.cc:#: Success: +Succeeded +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 fatal failure containing "Some other fatal failure expected." + Actual: +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureTest.ExpectFatalFailureOnAllThreads +[0;32m[ RUN ] [mExpectFailureTest.ExpectNonFatalFailureOnAllThreads +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: +googletest-output-test_.cc:#: Success: +Succeeded +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: +googletest-output-test_.cc:#: Fatal failure: +Failed +Expected fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +(expecting 1 failure) +gtest.cc:#: Failure +Expected: 1 non-fatal failure containing "Some other non-fatal failure." + Actual: +googletest-output-test_.cc:#: Non-fatal failure: +Failed +Expected non-fatal failure. +Stack trace: (omitted) + + +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureTest.ExpectNonFatalFailureOnAllThreads +[0;32m[----------] [m2 tests from ExpectFailureWithThreadsTest +[0;32m[ RUN ] [mExpectFailureWithThreadsTest.ExpectFatalFailure +(expecting 2 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected fatal failure. +Stack trace: (omitted) + +gtest.cc:#: Failure +Expected: 1 fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureWithThreadsTest.ExpectFatalFailure +[0;32m[ RUN ] [mExpectFailureWithThreadsTest.ExpectNonFatalFailure +(expecting 2 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected non-fatal failure. +Stack trace: (omitted) + +gtest.cc:#: Failure +Expected: 1 non-fatal failure + Actual: 0 failures +Stack trace: (omitted) + +[0;31m[ FAILED ] [mExpectFailureWithThreadsTest.ExpectNonFatalFailure +[0;32m[----------] [m1 test from ScopedFakeTestPartResultReporterTest +[0;32m[ RUN ] [mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread +(expecting 2 failures) +googletest-output-test_.cc:#: Failure +Failed +Expected fatal failure. +Stack trace: (omitted) + +googletest-output-test_.cc:#: Failure +Failed +Expected non-fatal failure. +Stack trace: (omitted) + +[0;31m[ FAILED ] [mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread +[0;32m[----------] [m2 tests from DynamicFixture +DynamicFixture::SetUpTestSuite +[0;32m[ RUN ] [mDynamicFixture.DynamicTestPass +DynamicFixture() +DynamicFixture::SetUp +DynamicFixture::TearDown +~DynamicFixture() +[0;32m[ OK ] [mDynamicFixture.DynamicTestPass +[0;32m[ RUN ] [mDynamicFixture.DynamicTestFail +DynamicFixture() +DynamicFixture::SetUp +googletest-output-test_.cc:#: Failure +Value of: Pass + Actual: false +Expected: true +Stack trace: (omitted) + +DynamicFixture::TearDown +~DynamicFixture() +[0;31m[ FAILED ] [mDynamicFixture.DynamicTestFail +DynamicFixture::TearDownTestSuite +[0;32m[----------] [m1 test from DynamicFixtureAnotherName +DynamicFixture::SetUpTestSuite +[0;32m[ RUN ] [mDynamicFixtureAnotherName.DynamicTestPass +DynamicFixture() +DynamicFixture::SetUp +DynamicFixture::TearDown +~DynamicFixture() +[0;32m[ OK ] [mDynamicFixtureAnotherName.DynamicTestPass +DynamicFixture::TearDownTestSuite +[0;32m[----------] [m2 tests from BadDynamicFixture1 +DynamicFixture::SetUpTestSuite +[0;32m[ RUN ] [mBadDynamicFixture1.FixtureBase +DynamicFixture() +DynamicFixture::SetUp +DynamicFixture::TearDown +~DynamicFixture() +[0;32m[ OK ] [mBadDynamicFixture1.FixtureBase +[0;32m[ RUN ] [mBadDynamicFixture1.TestBase +DynamicFixture() +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class, so mixing TEST_F and TEST in the same test suite is +illegal. In test suite BadDynamicFixture1, +test FixtureBase is defined using TEST_F but +test TestBase is defined using TEST. You probably +want to change the TEST to TEST_F or move it to another test +case. +Stack trace: (omitted) + +~DynamicFixture() +[0;31m[ FAILED ] [mBadDynamicFixture1.TestBase +DynamicFixture::TearDownTestSuite +[0;32m[----------] [m2 tests from BadDynamicFixture2 +DynamicFixture::SetUpTestSuite +[0;32m[ RUN ] [mBadDynamicFixture2.FixtureBase +DynamicFixture() +DynamicFixture::SetUp +DynamicFixture::TearDown +~DynamicFixture() +[0;32m[ OK ] [mBadDynamicFixture2.FixtureBase +[0;32m[ RUN ] [mBadDynamicFixture2.Derived +DynamicFixture() +gtest.cc:#: Failure +Failed +All tests in the same test suite must use the same test fixture +class. However, in test suite BadDynamicFixture2, +you defined test FixtureBase and test Derived +using two different test fixture classes. This can happen if +the two classes are from different namespaces or translation +units and have the same name. You should probably rename one +of the classes to put the tests into different test suites. +Stack trace: (omitted) + +~DynamicFixture() +[0;31m[ FAILED ] [mBadDynamicFixture2.Derived +DynamicFixture::TearDownTestSuite +[0;32m[----------] [m1 test from PrintingFailingParams/FailingParamTest +[0;32m[ RUN ] [mPrintingFailingParams/FailingParamTest.Fails/0 +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + GetParam() + Which is: 2 +Stack trace: (omitted) + +[0;31m[ FAILED ] [mPrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2 +[0;32m[----------] [m1 test from EmptyBasenameParamInst +[0;32m[ RUN ] [mEmptyBasenameParamInst.Passes/0 +[0;32m[ OK ] [mEmptyBasenameParamInst.Passes/0 +[0;32m[----------] [m2 tests from PrintingStrings/ParamTest +[0;32m[ RUN ] [mPrintingStrings/ParamTest.Success/a +[0;32m[ OK ] [mPrintingStrings/ParamTest.Success/a +[0;32m[ RUN ] [mPrintingStrings/ParamTest.Failure/a +googletest-output-test_.cc:#: Failure +Expected equality of these values: + "b" + GetParam() + Which is: "a" +Expected failure +Stack trace: (omitted) + +[0;31m[ FAILED ] [mPrintingStrings/ParamTest.Failure/a, where GetParam() = "a" +[0;32m[----------] [m3 tests from GoogleTestVerification +[0;32m[ RUN ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<NoTests> +googletest-output-test_.cc:#: Failure +Parameterized test suite NoTests is instantiated via INSTANTIATE_TEST_SUITE_P, but no tests are defined via TEST_P . No test cases will run. + +Ideally, INSTANTIATE_TEST_SUITE_P should only ever be invoked from code that always depend on code that provides TEST_P. Failing to do so is often an indication of dead code, e.g. the last TEST_P was removed but the rest got left behind. + +To suppress this error for this test suite, insert the following line (in a non-header) in the namespace it is defined in: + +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NoTests); +Stack trace: (omitted) + +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<NoTests> +[0;32m[ RUN ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<DetectNotInstantiatedTest> +googletest-output-test_.cc:#: Failure +Parameterized test suite DetectNotInstantiatedTest is defined via TEST_P, but never instantiated. None of the test cases will run. Either no INSTANTIATE_TEST_SUITE_P is provided or the only ones provided expand to nothing. + +Ideally, TEST_P definitions should only ever be included as part of binaries that intend to use them. (As opposed to, for example, being placed in a library that may be linked in to get other utilities.) + +To suppress this error for this test suite, insert the following line (in a non-header) in the namespace it is defined in: + +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(DetectNotInstantiatedTest); +Stack trace: (omitted) + +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<DetectNotInstantiatedTest> +[0;32m[ RUN ] [mGoogleTestVerification.UninstantiatedTypeParameterizedTestSuite<DetectNotInstantiatedTypesTest> +googletest-output-test_.cc:#: Failure +Type parameterized test suite DetectNotInstantiatedTypesTest is defined via REGISTER_TYPED_TEST_SUITE_P, but never instantiated via INSTANTIATE_TYPED_TEST_SUITE_P. None of the test cases will run. + +Ideally, TYPED_TEST_P definitions should only ever be included as part of binaries that intend to use them. (As opposed to, for example, being placed in a library that may be linked in to get other utilities.) + +To suppress this error for this test suite, insert the following line (in a non-header) in the namespace it is defined in: + +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(DetectNotInstantiatedTypesTest); +Stack trace: (omitted) + +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedTypeParameterizedTestSuite<DetectNotInstantiatedTypesTest> +[0;32m[----------] [mGlobal test environment tear-down +BarEnvironment::TearDown() called. +googletest-output-test_.cc:#: Failure +Failed +Expected non-fatal failure. +Stack trace: (omitted) + +FooEnvironment::TearDown() called. +googletest-output-test_.cc:#: Failure +Failed +Expected fatal failure. +Stack trace: (omitted) + +[0;32m[==========] [m88 tests from 41 test suites ran. +[0;32m[ PASSED ] [m31 tests. +[0;31m[ FAILED ] [m57 tests, listed below: +[0;31m[ FAILED ] [mNonfatalFailureTest.EscapesStringOperands +[0;31m[ FAILED ] [mNonfatalFailureTest.DiffForLongStrings +[0;31m[ FAILED ] [mFatalFailureTest.FatalFailureInSubroutine +[0;31m[ FAILED ] [mFatalFailureTest.FatalFailureInNestedSubroutine +[0;31m[ FAILED ] [mFatalFailureTest.NonfatalFailureInSubroutine +[0;31m[ FAILED ] [mLoggingTest.InterleavingLoggingAndAssertions +[0;31m[ FAILED ] [mSCOPED_TRACETest.AcceptedValues +[0;31m[ FAILED ] [mSCOPED_TRACETest.ObeysScopes +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksInLoop +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksInSubroutine +[0;31m[ FAILED ] [mSCOPED_TRACETest.CanBeNested +[0;31m[ FAILED ] [mSCOPED_TRACETest.CanBeRepeated +[0;31m[ FAILED ] [mSCOPED_TRACETest.WorksConcurrently +[0;31m[ FAILED ] [mScopedTraceTest.WithExplicitFileAndLine +[0;31m[ FAILED ] [mNonFatalFailureInFixtureConstructorTest.FailureInConstructor +[0;31m[ FAILED ] [mFatalFailureInFixtureConstructorTest.FailureInConstructor +[0;31m[ FAILED ] [mNonFatalFailureInSetUpTest.FailureInSetUp +[0;31m[ FAILED ] [mFatalFailureInSetUpTest.FailureInSetUp +[0;31m[ FAILED ] [mAddFailureAtTest.MessageContainsSpecifiedFileAndLineNumber +[0;31m[ FAILED ] [mGtestFailAtTest.MessageContainsSpecifiedFileAndLineNumber +[0;31m[ FAILED ] [mMixedUpTestSuiteTest.ThisShouldFail +[0;31m[ FAILED ] [mMixedUpTestSuiteTest.ThisShouldFailToo +[0;31m[ FAILED ] [mMixedUpTestSuiteWithSameTestNameTest.TheSecondTestWithThisNameShouldFail +[0;31m[ FAILED ] [mTEST_F_before_TEST_in_same_test_case.DefinedUsingTESTAndShouldFail +[0;31m[ FAILED ] [mTEST_before_TEST_F_in_same_test_case.DefinedUsingTEST_FAndShouldFail +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereIsNoNonfatalFailure +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereAreTwoNonfatalFailures +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenThereIsOneFatalFailure +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenStatementReturns +[0;31m[ FAILED ] [mExpectNonfatalFailureTest.FailsWhenStatementThrows +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereIsNoFatalFailure +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereAreTwoFatalFailures +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenThereIsOneNonfatalFailure +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementReturns +[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementThrows +[0;31m[ FAILED ] [mTypedTest/0.Failure, where TypeParam = int +[0;31m[ FAILED ] [mTypedTestWithNames/char0.Failure, where TypeParam = char +[0;31m[ FAILED ] [mTypedTestWithNames/int1.Failure, where TypeParam = int +[0;31m[ FAILED ] [mUnsigned/TypedTestP/0.Failure, where TypeParam = unsigned char +[0;31m[ FAILED ] [mUnsigned/TypedTestP/1.Failure, where TypeParam = unsigned int +[0;31m[ FAILED ] [mUnsignedCustomName/TypedTestP/unsignedChar0.Failure, where TypeParam = unsigned char +[0;31m[ FAILED ] [mUnsignedCustomName/TypedTestP/unsignedInt1.Failure, where TypeParam = unsigned int +[0;31m[ FAILED ] [mExpectFailureTest.ExpectFatalFailure +[0;31m[ FAILED ] [mExpectFailureTest.ExpectNonFatalFailure +[0;31m[ FAILED ] [mExpectFailureTest.ExpectFatalFailureOnAllThreads +[0;31m[ FAILED ] [mExpectFailureTest.ExpectNonFatalFailureOnAllThreads +[0;31m[ FAILED ] [mExpectFailureWithThreadsTest.ExpectFatalFailure +[0;31m[ FAILED ] [mExpectFailureWithThreadsTest.ExpectNonFatalFailure +[0;31m[ FAILED ] [mScopedFakeTestPartResultReporterTest.InterceptOnlyCurrentThread +[0;31m[ FAILED ] [mDynamicFixture.DynamicTestFail +[0;31m[ FAILED ] [mBadDynamicFixture1.TestBase +[0;31m[ FAILED ] [mBadDynamicFixture2.Derived +[0;31m[ FAILED ] [mPrintingFailingParams/FailingParamTest.Fails/0, where GetParam() = 2 +[0;31m[ FAILED ] [mPrintingStrings/ParamTest.Failure/a, where GetParam() = "a" +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<NoTests> +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedParameterizedTestSuite<DetectNotInstantiatedTest> +[0;31m[ FAILED ] [mGoogleTestVerification.UninstantiatedTypeParameterizedTestSuite<DetectNotInstantiatedTypesTest> + +57 FAILED TESTS +[0;33m YOU HAVE 1 DISABLED TEST + +[mNote: Google Test filter = FatalFailureTest.*:LoggingTest.* +[==========] Running 4 tests from 2 test suites. +[----------] Global test environment set-up. +[----------] 3 tests from FatalFailureTest +[ RUN ] FatalFailureTest.FatalFailureInSubroutine +(expecting a failure that x should be 1) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + x + Which is: 2 +Stack trace: (omitted) + +[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms) +[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine +(expecting a failure that x should be 1) +googletest-output-test_.cc:#: Failure +Expected equality of these values: + 1 + x + Which is: 2 +Stack trace: (omitted) + +[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms) +[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine +(expecting a failure on false) +googletest-output-test_.cc:#: Failure +Value of: false + Actual: false +Expected: true +Stack trace: (omitted) + +[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms) +[----------] 3 tests from FatalFailureTest (? ms total) + +[----------] 1 test from LoggingTest +[ RUN ] LoggingTest.InterleavingLoggingAndAssertions +(expecting 2 failures on (3) >= (a[i])) +i == 0 +i == 1 +googletest-output-test_.cc:#: Failure +Expected: (3) >= (a[i]), actual: 3 vs 9 +Stack trace: (omitted) + +i == 2 +i == 3 +googletest-output-test_.cc:#: Failure +Expected: (3) >= (a[i]), actual: 3 vs 6 +Stack trace: (omitted) + +[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms) +[----------] 1 test from LoggingTest (? ms total) + +[----------] Global test environment tear-down +[==========] 4 tests from 2 test suites ran. (? ms total) +[ PASSED ] 0 tests. +[ FAILED ] 4 tests, listed below: +[ FAILED ] FatalFailureTest.FatalFailureInSubroutine +[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine +[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine +[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions + + 4 FAILED TESTS +Note: Google Test filter = *DISABLED_* +[==========] Running 1 test from 1 test suite. +[----------] Global test environment set-up. +[----------] 1 test from DisabledTestsWarningTest +[ RUN ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning +[ OK ] DisabledTestsWarningTest.DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning +[----------] Global test environment tear-down +[==========] 1 test from 1 test suite ran. +[ PASSED ] 1 test. +Note: Google Test filter = PassingTest.* +Note: This is test shard 2 of 2. +[==========] Running 1 test from 1 test suite. +[----------] Global test environment set-up. +[----------] 1 test from PassingTest +[ RUN ] PassingTest.PassingTest2 +[ OK ] PassingTest.PassingTest2 +[----------] Global test environment tear-down +[==========] 1 test from 1 test suite ran. +[ PASSED ] 1 test. diff --git a/security/nss/gtests/google_test/gtest/test/googletest-output-test.py b/security/nss/gtests/google_test/gtest/test/googletest-output-test.py new file mode 100755 index 0000000000..09028f66f9 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-output-test.py @@ -0,0 +1,346 @@ +#!/usr/bin/env python +# +# Copyright 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +r"""Tests the text output of Google C++ Testing and Mocking Framework. + +To update the golden file: +googletest_output_test.py --build_dir=BUILD/DIR --gengolden +where BUILD/DIR contains the built googletest-output-test_ file. +googletest_output_test.py --gengolden +googletest_output_test.py +""" + +import difflib +import os +import re +import sys +import gtest_test_utils + + +# The flag for generating the golden file +GENGOLDEN_FLAG = '--gengolden' +CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS' + +# The flag indicating stacktraces are not supported +NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support' + +IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux' +IS_WINDOWS = os.name == 'nt' + +GOLDEN_NAME = 'googletest-output-test-golden-lin.txt' + +PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('googletest-output-test_') + +# At least one command we exercise must not have the +# 'internal_skip_environment_and_ad_hoc_tests' argument. +COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests']) +COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes']) +COMMAND_WITH_TIME = ({}, [PROGRAM_PATH, + '--gtest_print_time', + 'internal_skip_environment_and_ad_hoc_tests', + '--gtest_filter=FatalFailureTest.*:LoggingTest.*']) +COMMAND_WITH_DISABLED = ( + {}, [PROGRAM_PATH, + '--gtest_also_run_disabled_tests', + 'internal_skip_environment_and_ad_hoc_tests', + '--gtest_filter=*DISABLED_*']) +COMMAND_WITH_SHARDING = ( + {'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'}, + [PROGRAM_PATH, + 'internal_skip_environment_and_ad_hoc_tests', + '--gtest_filter=PassingTest.*']) + +GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME) + + +def ToUnixLineEnding(s): + """Changes all Windows/Mac line endings in s to UNIX line endings.""" + + return s.replace('\r\n', '\n').replace('\r', '\n') + + +def RemoveLocations(test_output): + """Removes all file location info from a Google Test program's output. + + Args: + test_output: the output of a Google Test program. + + Returns: + output with all file location info (in the form of + 'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or + 'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by + 'FILE_NAME:#: '. + """ + + return re.sub(r'.*[/\\]((googletest-output-test_|gtest).cc)(\:\d+|\(\d+\))\: ', + r'\1:#: ', test_output) + + +def RemoveStackTraceDetails(output): + """Removes all stack traces from a Google Test program's output.""" + + # *? means "find the shortest string that matches". + return re.sub(r'Stack trace:(.|\n)*?\n\n', + 'Stack trace: (omitted)\n\n', output) + + +def RemoveStackTraces(output): + """Removes all traces of stack traces from a Google Test program's output.""" + + # *? means "find the shortest string that matches". + return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output) + + +def RemoveTime(output): + """Removes all time information from a Google Test program's output.""" + + return re.sub(r'\(\d+ ms', '(? ms', output) + + +def RemoveTypeInfoDetails(test_output): + """Removes compiler-specific type info from Google Test program's output. + + Args: + test_output: the output of a Google Test program. + + Returns: + output with type information normalized to canonical form. + """ + + # some compilers output the name of type 'unsigned int' as 'unsigned' + return re.sub(r'unsigned int', 'unsigned', test_output) + + +def NormalizeToCurrentPlatform(test_output): + """Normalizes platform specific output details for easier comparison.""" + + if IS_WINDOWS: + # Removes the color information that is not present on Windows. + test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output) + # Changes failure message headers into the Windows format. + test_output = re.sub(r': Failure\n', r': error: ', test_output) + # Changes file(line_number) to file:line_number. + test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output) + + return test_output + + +def RemoveTestCounts(output): + """Removes test counts from a Google Test program's output.""" + + output = re.sub(r'\d+ tests?, listed below', + '? tests, listed below', output) + output = re.sub(r'\d+ FAILED TESTS', + '? FAILED TESTS', output) + output = re.sub(r'\d+ tests? from \d+ test cases?', + '? tests from ? test cases', output) + output = re.sub(r'\d+ tests? from ([a-zA-Z_])', + r'? tests from \1', output) + return re.sub(r'\d+ tests?\.', '? tests.', output) + + +def RemoveMatchingTests(test_output, pattern): + """Removes output of specified tests from a Google Test program's output. + + This function strips not only the beginning and the end of a test but also + all output in between. + + Args: + test_output: A string containing the test output. + pattern: A regex string that matches names of test cases or + tests to remove. + + Returns: + Contents of test_output with tests whose names match pattern removed. + """ + + test_output = re.sub( + r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % ( + pattern, pattern), + '', + test_output) + return re.sub(r'.*%s.*\n' % pattern, '', test_output) + + +def NormalizeOutput(output): + """Normalizes output (the output of googletest-output-test_.exe).""" + + output = ToUnixLineEnding(output) + output = RemoveLocations(output) + output = RemoveStackTraceDetails(output) + output = RemoveTime(output) + return output + + +def GetShellCommandOutput(env_cmd): + """Runs a command in a sub-process, and returns its output in a string. + + Args: + env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra + environment variables to set, and element 1 is a string with + the command and any flags. + + Returns: + A string with the command's combined standard and diagnostic output. + """ + + # Spawns cmd in a sub-process, and gets its standard I/O file objects. + # Set and save the environment properly. + environ = os.environ.copy() + environ.update(env_cmd[0]) + p = gtest_test_utils.Subprocess(env_cmd[1], env=environ) + + return p.output + + +def GetCommandOutput(env_cmd): + """Runs a command and returns its output with all file location + info stripped off. + + Args: + env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra + environment variables to set, and element 1 is a string with + the command and any flags. + """ + + # Disables exception pop-ups on Windows. + environ, cmdline = env_cmd + environ = dict(environ) # Ensures we are modifying a copy. + environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1' + return NormalizeOutput(GetShellCommandOutput((environ, cmdline))) + + +def GetOutputOfAllCommands(): + """Returns concatenated output from several representative commands.""" + + return (GetCommandOutput(COMMAND_WITH_COLOR) + + GetCommandOutput(COMMAND_WITH_TIME) + + GetCommandOutput(COMMAND_WITH_DISABLED) + + GetCommandOutput(COMMAND_WITH_SHARDING)) + + +test_list = GetShellCommandOutput(COMMAND_LIST_TESTS) +SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list +SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list +SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list +SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv + +CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and + SUPPORTS_TYPED_TESTS and + SUPPORTS_THREADS and + SUPPORTS_STACK_TRACES) + +class GTestOutputTest(gtest_test_utils.TestCase): + def RemoveUnsupportedTests(self, test_output): + if not SUPPORTS_DEATH_TESTS: + test_output = RemoveMatchingTests(test_output, 'DeathTest') + if not SUPPORTS_TYPED_TESTS: + test_output = RemoveMatchingTests(test_output, 'TypedTest') + test_output = RemoveMatchingTests(test_output, 'TypedDeathTest') + test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest') + if not SUPPORTS_THREADS: + test_output = RemoveMatchingTests(test_output, + 'ExpectFailureWithThreadsTest') + test_output = RemoveMatchingTests(test_output, + 'ScopedFakeTestPartResultReporterTest') + test_output = RemoveMatchingTests(test_output, + 'WorksConcurrently') + if not SUPPORTS_STACK_TRACES: + test_output = RemoveStackTraces(test_output) + + return test_output + + def testOutput(self): + output = GetOutputOfAllCommands() + + golden_file = open(GOLDEN_PATH, 'rb') + # A mis-configured source control system can cause \r appear in EOL + # sequences when we read the golden file irrespective of an operating + # system used. Therefore, we need to strip those \r's from newlines + # unconditionally. + golden = ToUnixLineEnding(golden_file.read().decode()) + golden_file.close() + + # We want the test to pass regardless of certain features being + # supported or not. + + # We still have to remove type name specifics in all cases. + normalized_actual = RemoveTypeInfoDetails(output) + normalized_golden = RemoveTypeInfoDetails(golden) + + if CAN_GENERATE_GOLDEN_FILE: + self.assertEqual(normalized_golden, normalized_actual, + '\n'.join(difflib.unified_diff( + normalized_golden.split('\n'), + normalized_actual.split('\n'), + 'golden', 'actual'))) + else: + normalized_actual = NormalizeToCurrentPlatform( + RemoveTestCounts(normalized_actual)) + normalized_golden = NormalizeToCurrentPlatform( + RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden))) + + # This code is very handy when debugging golden file differences: + if os.getenv('DEBUG_GTEST_OUTPUT_TEST'): + open(os.path.join( + gtest_test_utils.GetSourceDir(), + '_googletest-output-test_normalized_actual.txt'), 'wb').write( + normalized_actual) + open(os.path.join( + gtest_test_utils.GetSourceDir(), + '_googletest-output-test_normalized_golden.txt'), 'wb').write( + normalized_golden) + + self.assertEqual(normalized_golden, normalized_actual) + + +if __name__ == '__main__': + if NO_STACKTRACE_SUPPORT_FLAG in sys.argv: + # unittest.main() can't handle unknown flags + sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG) + + if GENGOLDEN_FLAG in sys.argv: + if CAN_GENERATE_GOLDEN_FILE: + output = GetOutputOfAllCommands() + golden_file = open(GOLDEN_PATH, 'wb') + golden_file.write(output.encode()) + golden_file.close() + else: + message = ( + """Unable to write a golden file when compiled in an environment +that does not support all the required features (death tests, +typed tests, stack traces, and multiple threads). +Please build this test and generate the golden file using Blaze on Linux.""") + + sys.stderr.write(message) + sys.exit(1) + else: + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-output-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-output-test_.cc new file mode 100644 index 0000000000..074f64ef62 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-output-test_.cc @@ -0,0 +1,1108 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// The purpose of this file is to generate Google Test output under +// various conditions. The output will then be verified by +// googletest-output-test.py to ensure that Google Test generates the +// desired messages. Therefore, most tests in this file are MEANT TO +// FAIL. + +#include "gtest/gtest-spi.h" +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +#include <stdlib.h> + +#if _MSC_VER +GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */) +#endif // _MSC_VER + +#if GTEST_IS_THREADSAFE +using testing::ScopedFakeTestPartResultReporter; +using testing::TestPartResultArray; + +using testing::internal::Notification; +using testing::internal::ThreadWithParam; +#endif + +namespace posix = ::testing::internal::posix; + +// Tests catching fatal failures. + +// A subroutine used by the following test. +void TestEq1(int x) { + ASSERT_EQ(1, x); +} + +// This function calls a test subroutine, catches the fatal failure it +// generates, and then returns early. +void TryTestSubroutine() { + // Calls a subrountine that yields a fatal failure. + TestEq1(2); + + // Catches the fatal failure and aborts the test. + // + // The testing::Test:: prefix is necessary when calling + // HasFatalFailure() outside of a TEST, TEST_F, or test fixture. + if (testing::Test::HasFatalFailure()) return; + + // If we get here, something is wrong. + FAIL() << "This should never be reached."; +} + +TEST(PassingTest, PassingTest1) { +} + +TEST(PassingTest, PassingTest2) { +} + +// Tests that parameters of failing parameterized tests are printed in the +// failing test summary. +class FailingParamTest : public testing::TestWithParam<int> {}; + +TEST_P(FailingParamTest, Fails) { + EXPECT_EQ(1, GetParam()); +} + +// This generates a test which will fail. Google Test is expected to print +// its parameter when it outputs the list of all failed tests. +INSTANTIATE_TEST_SUITE_P(PrintingFailingParams, + FailingParamTest, + testing::Values(2)); + +// Tests that an empty value for the test suite basename yields just +// the test name without any prior / +class EmptyBasenameParamInst : public testing::TestWithParam<int> {}; + +TEST_P(EmptyBasenameParamInst, Passes) { EXPECT_EQ(1, GetParam()); } + +INSTANTIATE_TEST_SUITE_P(, EmptyBasenameParamInst, testing::Values(1)); + +static const char kGoldenString[] = "\"Line\0 1\"\nLine 2"; + +TEST(NonfatalFailureTest, EscapesStringOperands) { + std::string actual = "actual \"string\""; + EXPECT_EQ(kGoldenString, actual); + + const char* golden = kGoldenString; + EXPECT_EQ(golden, actual); +} + +TEST(NonfatalFailureTest, DiffForLongStrings) { + std::string golden_str(kGoldenString, sizeof(kGoldenString) - 1); + EXPECT_EQ(golden_str, "Line 2"); +} + +// Tests catching a fatal failure in a subroutine. +TEST(FatalFailureTest, FatalFailureInSubroutine) { + printf("(expecting a failure that x should be 1)\n"); + + TryTestSubroutine(); +} + +// Tests catching a fatal failure in a nested subroutine. +TEST(FatalFailureTest, FatalFailureInNestedSubroutine) { + printf("(expecting a failure that x should be 1)\n"); + + // Calls a subrountine that yields a fatal failure. + TryTestSubroutine(); + + // Catches the fatal failure and aborts the test. + // + // When calling HasFatalFailure() inside a TEST, TEST_F, or test + // fixture, the testing::Test:: prefix is not needed. + if (HasFatalFailure()) return; + + // If we get here, something is wrong. + FAIL() << "This should never be reached."; +} + +// Tests HasFatalFailure() after a failed EXPECT check. +TEST(FatalFailureTest, NonfatalFailureInSubroutine) { + printf("(expecting a failure on false)\n"); + EXPECT_TRUE(false); // Generates a nonfatal failure + ASSERT_FALSE(HasFatalFailure()); // This should succeed. +} + +// Tests interleaving user logging and Google Test assertions. +TEST(LoggingTest, InterleavingLoggingAndAssertions) { + static const int a[4] = { + 3, 9, 2, 6 + }; + + printf("(expecting 2 failures on (3) >= (a[i]))\n"); + for (int i = 0; i < static_cast<int>(sizeof(a)/sizeof(*a)); i++) { + printf("i == %d\n", i); + EXPECT_GE(3, a[i]); + } +} + +// Tests the SCOPED_TRACE macro. + +// A helper function for testing SCOPED_TRACE. +void SubWithoutTrace(int n) { + EXPECT_EQ(1, n); + ASSERT_EQ(2, n); +} + +// Another helper function for testing SCOPED_TRACE. +void SubWithTrace(int n) { + SCOPED_TRACE(testing::Message() << "n = " << n); + + SubWithoutTrace(n); +} + +TEST(SCOPED_TRACETest, AcceptedValues) { + SCOPED_TRACE("literal string"); + SCOPED_TRACE(std::string("std::string")); + SCOPED_TRACE(1337); // streamable type + const char* null_value = nullptr; + SCOPED_TRACE(null_value); + + ADD_FAILURE() << "Just checking that all these values work fine."; +} + +// Tests that SCOPED_TRACE() obeys lexical scopes. +TEST(SCOPED_TRACETest, ObeysScopes) { + printf("(expected to fail)\n"); + + // There should be no trace before SCOPED_TRACE() is invoked. + ADD_FAILURE() << "This failure is expected, and shouldn't have a trace."; + + { + SCOPED_TRACE("Expected trace"); + // After SCOPED_TRACE(), a failure in the current scope should contain + // the trace. + ADD_FAILURE() << "This failure is expected, and should have a trace."; + } + + // Once the control leaves the scope of the SCOPED_TRACE(), there + // should be no trace again. + ADD_FAILURE() << "This failure is expected, and shouldn't have a trace."; +} + +// Tests that SCOPED_TRACE works inside a loop. +TEST(SCOPED_TRACETest, WorksInLoop) { + printf("(expected to fail)\n"); + + for (int i = 1; i <= 2; i++) { + SCOPED_TRACE(testing::Message() << "i = " << i); + + SubWithoutTrace(i); + } +} + +// Tests that SCOPED_TRACE works in a subroutine. +TEST(SCOPED_TRACETest, WorksInSubroutine) { + printf("(expected to fail)\n"); + + SubWithTrace(1); + SubWithTrace(2); +} + +// Tests that SCOPED_TRACE can be nested. +TEST(SCOPED_TRACETest, CanBeNested) { + printf("(expected to fail)\n"); + + SCOPED_TRACE(""); // A trace without a message. + + SubWithTrace(2); +} + +// Tests that multiple SCOPED_TRACEs can be used in the same scope. +TEST(SCOPED_TRACETest, CanBeRepeated) { + printf("(expected to fail)\n"); + + SCOPED_TRACE("A"); + ADD_FAILURE() + << "This failure is expected, and should contain trace point A."; + + SCOPED_TRACE("B"); + ADD_FAILURE() + << "This failure is expected, and should contain trace point A and B."; + + { + SCOPED_TRACE("C"); + ADD_FAILURE() << "This failure is expected, and should " + << "contain trace point A, B, and C."; + } + + SCOPED_TRACE("D"); + ADD_FAILURE() << "This failure is expected, and should " + << "contain trace point A, B, and D."; +} + +#if GTEST_IS_THREADSAFE +// Tests that SCOPED_TRACE()s can be used concurrently from multiple +// threads. Namely, an assertion should be affected by +// SCOPED_TRACE()s in its own thread only. + +// Here's the sequence of actions that happen in the test: +// +// Thread A (main) | Thread B (spawned) +// ===============================|================================ +// spawns thread B | +// -------------------------------+-------------------------------- +// waits for n1 | SCOPED_TRACE("Trace B"); +// | generates failure #1 +// | notifies n1 +// -------------------------------+-------------------------------- +// SCOPED_TRACE("Trace A"); | waits for n2 +// generates failure #2 | +// notifies n2 | +// -------------------------------|-------------------------------- +// waits for n3 | generates failure #3 +// | trace B dies +// | generates failure #4 +// | notifies n3 +// -------------------------------|-------------------------------- +// generates failure #5 | finishes +// trace A dies | +// generates failure #6 | +// -------------------------------|-------------------------------- +// waits for thread B to finish | + +struct CheckPoints { + Notification n1; + Notification n2; + Notification n3; +}; + +static void ThreadWithScopedTrace(CheckPoints* check_points) { + { + SCOPED_TRACE("Trace B"); + ADD_FAILURE() + << "Expected failure #1 (in thread B, only trace B alive)."; + check_points->n1.Notify(); + check_points->n2.WaitForNotification(); + + ADD_FAILURE() + << "Expected failure #3 (in thread B, trace A & B both alive)."; + } // Trace B dies here. + ADD_FAILURE() + << "Expected failure #4 (in thread B, only trace A alive)."; + check_points->n3.Notify(); +} + +TEST(SCOPED_TRACETest, WorksConcurrently) { + printf("(expecting 6 failures)\n"); + + CheckPoints check_points; + ThreadWithParam<CheckPoints*> thread(&ThreadWithScopedTrace, &check_points, + nullptr); + check_points.n1.WaitForNotification(); + + { + SCOPED_TRACE("Trace A"); + ADD_FAILURE() + << "Expected failure #2 (in thread A, trace A & B both alive)."; + check_points.n2.Notify(); + check_points.n3.WaitForNotification(); + + ADD_FAILURE() + << "Expected failure #5 (in thread A, only trace A alive)."; + } // Trace A dies here. + ADD_FAILURE() + << "Expected failure #6 (in thread A, no trace alive)."; + thread.Join(); +} +#endif // GTEST_IS_THREADSAFE + +// Tests basic functionality of the ScopedTrace utility (most of its features +// are already tested in SCOPED_TRACETest). +TEST(ScopedTraceTest, WithExplicitFileAndLine) { + testing::ScopedTrace trace("explicit_file.cc", 123, "expected trace message"); + ADD_FAILURE() << "Check that the trace is attached to a particular location."; +} + +TEST(DisabledTestsWarningTest, + DISABLED_AlsoRunDisabledTestsFlagSuppressesWarning) { + // This test body is intentionally empty. Its sole purpose is for + // verifying that the --gtest_also_run_disabled_tests flag + // suppresses the "YOU HAVE 12 DISABLED TESTS" warning at the end of + // the test output. +} + +// Tests using assertions outside of TEST and TEST_F. +// +// This function creates two failures intentionally. +void AdHocTest() { + printf("The non-test part of the code is expected to have 2 failures.\n\n"); + EXPECT_TRUE(false); + EXPECT_EQ(2, 3); +} + +// Runs all TESTs, all TEST_Fs, and the ad hoc test. +int RunAllTests() { + AdHocTest(); + return RUN_ALL_TESTS(); +} + +// Tests non-fatal failures in the fixture constructor. +class NonFatalFailureInFixtureConstructorTest : public testing::Test { + protected: + NonFatalFailureInFixtureConstructorTest() { + printf("(expecting 5 failures)\n"); + ADD_FAILURE() << "Expected failure #1, in the test fixture c'tor."; + } + + ~NonFatalFailureInFixtureConstructorTest() override { + ADD_FAILURE() << "Expected failure #5, in the test fixture d'tor."; + } + + void SetUp() override { ADD_FAILURE() << "Expected failure #2, in SetUp()."; } + + void TearDown() override { + ADD_FAILURE() << "Expected failure #4, in TearDown."; + } +}; + +TEST_F(NonFatalFailureInFixtureConstructorTest, FailureInConstructor) { + ADD_FAILURE() << "Expected failure #3, in the test body."; +} + +// Tests fatal failures in the fixture constructor. +class FatalFailureInFixtureConstructorTest : public testing::Test { + protected: + FatalFailureInFixtureConstructorTest() { + printf("(expecting 2 failures)\n"); + Init(); + } + + ~FatalFailureInFixtureConstructorTest() override { + ADD_FAILURE() << "Expected failure #2, in the test fixture d'tor."; + } + + void SetUp() override { + ADD_FAILURE() << "UNEXPECTED failure in SetUp(). " + << "We should never get here, as the test fixture c'tor " + << "had a fatal failure."; + } + + void TearDown() override { + ADD_FAILURE() << "UNEXPECTED failure in TearDown(). " + << "We should never get here, as the test fixture c'tor " + << "had a fatal failure."; + } + + private: + void Init() { + FAIL() << "Expected failure #1, in the test fixture c'tor."; + } +}; + +TEST_F(FatalFailureInFixtureConstructorTest, FailureInConstructor) { + ADD_FAILURE() << "UNEXPECTED failure in the test body. " + << "We should never get here, as the test fixture c'tor " + << "had a fatal failure."; +} + +// Tests non-fatal failures in SetUp(). +class NonFatalFailureInSetUpTest : public testing::Test { + protected: + ~NonFatalFailureInSetUpTest() override { Deinit(); } + + void SetUp() override { + printf("(expecting 4 failures)\n"); + ADD_FAILURE() << "Expected failure #1, in SetUp()."; + } + + void TearDown() override { FAIL() << "Expected failure #3, in TearDown()."; } + + private: + void Deinit() { + FAIL() << "Expected failure #4, in the test fixture d'tor."; + } +}; + +TEST_F(NonFatalFailureInSetUpTest, FailureInSetUp) { + FAIL() << "Expected failure #2, in the test function."; +} + +// Tests fatal failures in SetUp(). +class FatalFailureInSetUpTest : public testing::Test { + protected: + ~FatalFailureInSetUpTest() override { Deinit(); } + + void SetUp() override { + printf("(expecting 3 failures)\n"); + FAIL() << "Expected failure #1, in SetUp()."; + } + + void TearDown() override { FAIL() << "Expected failure #2, in TearDown()."; } + + private: + void Deinit() { + FAIL() << "Expected failure #3, in the test fixture d'tor."; + } +}; + +TEST_F(FatalFailureInSetUpTest, FailureInSetUp) { + FAIL() << "UNEXPECTED failure in the test function. " + << "We should never get here, as SetUp() failed."; +} + +TEST(AddFailureAtTest, MessageContainsSpecifiedFileAndLineNumber) { + ADD_FAILURE_AT("foo.cc", 42) << "Expected nonfatal failure in foo.cc"; +} + +TEST(GtestFailAtTest, MessageContainsSpecifiedFileAndLineNumber) { + GTEST_FAIL_AT("foo.cc", 42) << "Expected fatal failure in foo.cc"; +} + +// The MixedUpTestSuiteTest test case verifies that Google Test will fail a +// test if it uses a different fixture class than what other tests in +// the same test case use. It deliberately contains two fixture +// classes with the same name but defined in different namespaces. + +// The MixedUpTestSuiteWithSameTestNameTest test case verifies that +// when the user defines two tests with the same test case name AND +// same test name (but in different namespaces), the second test will +// fail. + +namespace foo { + +class MixedUpTestSuiteTest : public testing::Test { +}; + +TEST_F(MixedUpTestSuiteTest, FirstTestFromNamespaceFoo) {} +TEST_F(MixedUpTestSuiteTest, SecondTestFromNamespaceFoo) {} + +class MixedUpTestSuiteWithSameTestNameTest : public testing::Test { +}; + +TEST_F(MixedUpTestSuiteWithSameTestNameTest, + TheSecondTestWithThisNameShouldFail) {} + +} // namespace foo + +namespace bar { + +class MixedUpTestSuiteTest : public testing::Test { +}; + +// The following two tests are expected to fail. We rely on the +// golden file to check that Google Test generates the right error message. +TEST_F(MixedUpTestSuiteTest, ThisShouldFail) {} +TEST_F(MixedUpTestSuiteTest, ThisShouldFailToo) {} + +class MixedUpTestSuiteWithSameTestNameTest : public testing::Test { +}; + +// Expected to fail. We rely on the golden file to check that Google Test +// generates the right error message. +TEST_F(MixedUpTestSuiteWithSameTestNameTest, + TheSecondTestWithThisNameShouldFail) {} + +} // namespace bar + +// The following two test cases verify that Google Test catches the user +// error of mixing TEST and TEST_F in the same test case. The first +// test case checks the scenario where TEST_F appears before TEST, and +// the second one checks where TEST appears before TEST_F. + +class TEST_F_before_TEST_in_same_test_case : public testing::Test { +}; + +TEST_F(TEST_F_before_TEST_in_same_test_case, DefinedUsingTEST_F) {} + +// Expected to fail. We rely on the golden file to check that Google Test +// generates the right error message. +TEST(TEST_F_before_TEST_in_same_test_case, DefinedUsingTESTAndShouldFail) {} + +class TEST_before_TEST_F_in_same_test_case : public testing::Test { +}; + +TEST(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST) {} + +// Expected to fail. We rely on the golden file to check that Google Test +// generates the right error message. +TEST_F(TEST_before_TEST_F_in_same_test_case, DefinedUsingTEST_FAndShouldFail) { +} + +// Used for testing EXPECT_NONFATAL_FAILURE() and EXPECT_FATAL_FAILURE(). +int global_integer = 0; + +// Tests that EXPECT_NONFATAL_FAILURE() can reference global variables. +TEST(ExpectNonfatalFailureTest, CanReferenceGlobalVariables) { + global_integer = 0; + EXPECT_NONFATAL_FAILURE({ + EXPECT_EQ(1, global_integer) << "Expected non-fatal failure."; + }, "Expected non-fatal failure."); +} + +// Tests that EXPECT_NONFATAL_FAILURE() can reference local variables +// (static or not). +TEST(ExpectNonfatalFailureTest, CanReferenceLocalVariables) { + int m = 0; + static int n; + n = 1; + EXPECT_NONFATAL_FAILURE({ + EXPECT_EQ(m, n) << "Expected non-fatal failure."; + }, "Expected non-fatal failure."); +} + +// Tests that EXPECT_NONFATAL_FAILURE() succeeds when there is exactly +// one non-fatal failure and no fatal failure. +TEST(ExpectNonfatalFailureTest, SucceedsWhenThereIsOneNonfatalFailure) { + EXPECT_NONFATAL_FAILURE({ + ADD_FAILURE() << "Expected non-fatal failure."; + }, "Expected non-fatal failure."); +} + +// Tests that EXPECT_NONFATAL_FAILURE() fails when there is no +// non-fatal failure. +TEST(ExpectNonfatalFailureTest, FailsWhenThereIsNoNonfatalFailure) { + printf("(expecting a failure)\n"); + EXPECT_NONFATAL_FAILURE({ + }, ""); +} + +// Tests that EXPECT_NONFATAL_FAILURE() fails when there are two +// non-fatal failures. +TEST(ExpectNonfatalFailureTest, FailsWhenThereAreTwoNonfatalFailures) { + printf("(expecting a failure)\n"); + EXPECT_NONFATAL_FAILURE({ + ADD_FAILURE() << "Expected non-fatal failure 1."; + ADD_FAILURE() << "Expected non-fatal failure 2."; + }, ""); +} + +// Tests that EXPECT_NONFATAL_FAILURE() fails when there is one fatal +// failure. +TEST(ExpectNonfatalFailureTest, FailsWhenThereIsOneFatalFailure) { + printf("(expecting a failure)\n"); + EXPECT_NONFATAL_FAILURE({ + FAIL() << "Expected fatal failure."; + }, ""); +} + +// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being +// tested returns. +TEST(ExpectNonfatalFailureTest, FailsWhenStatementReturns) { + printf("(expecting a failure)\n"); + EXPECT_NONFATAL_FAILURE({ + return; + }, ""); +} + +#if GTEST_HAS_EXCEPTIONS + +// Tests that EXPECT_NONFATAL_FAILURE() fails when the statement being +// tested throws. +TEST(ExpectNonfatalFailureTest, FailsWhenStatementThrows) { + printf("(expecting a failure)\n"); + try { + EXPECT_NONFATAL_FAILURE({ + throw 0; + }, ""); + } catch(int) { // NOLINT + } +} + +#endif // GTEST_HAS_EXCEPTIONS + +// Tests that EXPECT_FATAL_FAILURE() can reference global variables. +TEST(ExpectFatalFailureTest, CanReferenceGlobalVariables) { + global_integer = 0; + EXPECT_FATAL_FAILURE({ + ASSERT_EQ(1, global_integer) << "Expected fatal failure."; + }, "Expected fatal failure."); +} + +// Tests that EXPECT_FATAL_FAILURE() can reference local static +// variables. +TEST(ExpectFatalFailureTest, CanReferenceLocalStaticVariables) { + static int n; + n = 1; + EXPECT_FATAL_FAILURE({ + ASSERT_EQ(0, n) << "Expected fatal failure."; + }, "Expected fatal failure."); +} + +// Tests that EXPECT_FATAL_FAILURE() succeeds when there is exactly +// one fatal failure and no non-fatal failure. +TEST(ExpectFatalFailureTest, SucceedsWhenThereIsOneFatalFailure) { + EXPECT_FATAL_FAILURE({ + FAIL() << "Expected fatal failure."; + }, "Expected fatal failure."); +} + +// Tests that EXPECT_FATAL_FAILURE() fails when there is no fatal +// failure. +TEST(ExpectFatalFailureTest, FailsWhenThereIsNoFatalFailure) { + printf("(expecting a failure)\n"); + EXPECT_FATAL_FAILURE({ + }, ""); +} + +// A helper for generating a fatal failure. +void FatalFailure() { + FAIL() << "Expected fatal failure."; +} + +// Tests that EXPECT_FATAL_FAILURE() fails when there are two +// fatal failures. +TEST(ExpectFatalFailureTest, FailsWhenThereAreTwoFatalFailures) { + printf("(expecting a failure)\n"); + EXPECT_FATAL_FAILURE({ + FatalFailure(); + FatalFailure(); + }, ""); +} + +// Tests that EXPECT_FATAL_FAILURE() fails when there is one non-fatal +// failure. +TEST(ExpectFatalFailureTest, FailsWhenThereIsOneNonfatalFailure) { + printf("(expecting a failure)\n"); + EXPECT_FATAL_FAILURE({ + ADD_FAILURE() << "Expected non-fatal failure."; + }, ""); +} + +// Tests that EXPECT_FATAL_FAILURE() fails when the statement being +// tested returns. +TEST(ExpectFatalFailureTest, FailsWhenStatementReturns) { + printf("(expecting a failure)\n"); + EXPECT_FATAL_FAILURE({ + return; + }, ""); +} + +#if GTEST_HAS_EXCEPTIONS + +// Tests that EXPECT_FATAL_FAILURE() fails when the statement being +// tested throws. +TEST(ExpectFatalFailureTest, FailsWhenStatementThrows) { + printf("(expecting a failure)\n"); + try { + EXPECT_FATAL_FAILURE({ + throw 0; + }, ""); + } catch(int) { // NOLINT + } +} + +#endif // GTEST_HAS_EXCEPTIONS + +// This #ifdef block tests the output of value-parameterized tests. + +std::string ParamNameFunc(const testing::TestParamInfo<std::string>& info) { + return info.param; +} + +class ParamTest : public testing::TestWithParam<std::string> { +}; + +TEST_P(ParamTest, Success) { + EXPECT_EQ("a", GetParam()); +} + +TEST_P(ParamTest, Failure) { + EXPECT_EQ("b", GetParam()) << "Expected failure"; +} + +INSTANTIATE_TEST_SUITE_P(PrintingStrings, + ParamTest, + testing::Values(std::string("a")), + ParamNameFunc); + +// The case where a suite has INSTANTIATE_TEST_SUITE_P but not TEST_P. +using NoTests = ParamTest; +INSTANTIATE_TEST_SUITE_P(ThisIsOdd, NoTests, ::testing::Values("Hello")); + +// fails under kErrorOnUninstantiatedParameterizedTest=true +class DetectNotInstantiatedTest : public testing::TestWithParam<int> {}; +TEST_P(DetectNotInstantiatedTest, Used) { } + +// This would make the test failure from the above go away. +// INSTANTIATE_TEST_SUITE_P(Fix, DetectNotInstantiatedTest, testing::Values(1)); + +template <typename T> +class TypedTest : public testing::Test { +}; + +TYPED_TEST_SUITE(TypedTest, testing::Types<int>); + +TYPED_TEST(TypedTest, Success) { + EXPECT_EQ(0, TypeParam()); +} + +TYPED_TEST(TypedTest, Failure) { + EXPECT_EQ(1, TypeParam()) << "Expected failure"; +} + +typedef testing::Types<char, int> TypesForTestWithNames; + +template <typename T> +class TypedTestWithNames : public testing::Test {}; + +class TypedTestNames { + public: + template <typename T> + static std::string GetName(int i) { + if (std::is_same<T, char>::value) + return std::string("char") + ::testing::PrintToString(i); + if (std::is_same<T, int>::value) + return std::string("int") + ::testing::PrintToString(i); + } +}; + +TYPED_TEST_SUITE(TypedTestWithNames, TypesForTestWithNames, TypedTestNames); + +TYPED_TEST(TypedTestWithNames, Success) {} + +TYPED_TEST(TypedTestWithNames, Failure) { FAIL(); } + +template <typename T> +class TypedTestP : public testing::Test { +}; + +TYPED_TEST_SUITE_P(TypedTestP); + +TYPED_TEST_P(TypedTestP, Success) { + EXPECT_EQ(0U, TypeParam()); +} + +TYPED_TEST_P(TypedTestP, Failure) { + EXPECT_EQ(1U, TypeParam()) << "Expected failure"; +} + +REGISTER_TYPED_TEST_SUITE_P(TypedTestP, Success, Failure); + +typedef testing::Types<unsigned char, unsigned int> UnsignedTypes; +INSTANTIATE_TYPED_TEST_SUITE_P(Unsigned, TypedTestP, UnsignedTypes); + +class TypedTestPNames { + public: + template <typename T> + static std::string GetName(int i) { + if (std::is_same<T, unsigned char>::value) { + return std::string("unsignedChar") + ::testing::PrintToString(i); + } + if (std::is_same<T, unsigned int>::value) { + return std::string("unsignedInt") + ::testing::PrintToString(i); + } + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(UnsignedCustomName, TypedTestP, UnsignedTypes, + TypedTestPNames); + +template <typename T> +class DetectNotInstantiatedTypesTest : public testing::Test {}; +TYPED_TEST_SUITE_P(DetectNotInstantiatedTypesTest); +TYPED_TEST_P(DetectNotInstantiatedTypesTest, Used) { + TypeParam instantiate; + (void)instantiate; +} +REGISTER_TYPED_TEST_SUITE_P(DetectNotInstantiatedTypesTest, Used); + +// kErrorOnUninstantiatedTypeParameterizedTest=true would make the above fail. +// Adding the following would make that test failure go away. +// +// typedef ::testing::Types<char, int, unsigned int> MyTypes; +// INSTANTIATE_TYPED_TEST_SUITE_P(All, DetectNotInstantiatedTypesTest, MyTypes); + +#if GTEST_HAS_DEATH_TEST + +// We rely on the golden file to verify that tests whose test case +// name ends with DeathTest are run first. + +TEST(ADeathTest, ShouldRunFirst) { +} + +// We rely on the golden file to verify that typed tests whose test +// case name ends with DeathTest are run first. + +template <typename T> +class ATypedDeathTest : public testing::Test { +}; + +typedef testing::Types<int, double> NumericTypes; +TYPED_TEST_SUITE(ATypedDeathTest, NumericTypes); + +TYPED_TEST(ATypedDeathTest, ShouldRunFirst) { +} + + +// We rely on the golden file to verify that type-parameterized tests +// whose test case name ends with DeathTest are run first. + +template <typename T> +class ATypeParamDeathTest : public testing::Test { +}; + +TYPED_TEST_SUITE_P(ATypeParamDeathTest); + +TYPED_TEST_P(ATypeParamDeathTest, ShouldRunFirst) { +} + +REGISTER_TYPED_TEST_SUITE_P(ATypeParamDeathTest, ShouldRunFirst); + +INSTANTIATE_TYPED_TEST_SUITE_P(My, ATypeParamDeathTest, NumericTypes); + +#endif // GTEST_HAS_DEATH_TEST + +// Tests various failure conditions of +// EXPECT_{,NON}FATAL_FAILURE{,_ON_ALL_THREADS}. +class ExpectFailureTest : public testing::Test { + public: // Must be public and not protected due to a bug in g++ 3.4.2. + enum FailureMode { + FATAL_FAILURE, + NONFATAL_FAILURE + }; + static void AddFailure(FailureMode failure) { + if (failure == FATAL_FAILURE) { + FAIL() << "Expected fatal failure."; + } else { + ADD_FAILURE() << "Expected non-fatal failure."; + } + } +}; + +TEST_F(ExpectFailureTest, ExpectFatalFailure) { + // Expected fatal failure, but succeeds. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE(SUCCEED(), "Expected fatal failure."); + // Expected fatal failure, but got a non-fatal failure. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Expected non-fatal " + "failure."); + // Wrong message. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE(AddFailure(FATAL_FAILURE), "Some other fatal failure " + "expected."); +} + +TEST_F(ExpectFailureTest, ExpectNonFatalFailure) { + // Expected non-fatal failure, but succeeds. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE(SUCCEED(), "Expected non-fatal failure."); + // Expected non-fatal failure, but got a fatal failure. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE(AddFailure(FATAL_FAILURE), "Expected fatal failure."); + // Wrong message. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE(AddFailure(NONFATAL_FAILURE), "Some other non-fatal " + "failure."); +} + +#if GTEST_IS_THREADSAFE + +class ExpectFailureWithThreadsTest : public ExpectFailureTest { + protected: + static void AddFailureInOtherThread(FailureMode failure) { + ThreadWithParam<FailureMode> thread(&AddFailure, failure, nullptr); + thread.Join(); + } +}; + +TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailure) { + // We only intercept the current thread. + printf("(expecting 2 failures)\n"); + EXPECT_FATAL_FAILURE(AddFailureInOtherThread(FATAL_FAILURE), + "Expected fatal failure."); +} + +TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailure) { + // We only intercept the current thread. + printf("(expecting 2 failures)\n"); + EXPECT_NONFATAL_FAILURE(AddFailureInOtherThread(NONFATAL_FAILURE), + "Expected non-fatal failure."); +} + +typedef ExpectFailureWithThreadsTest ScopedFakeTestPartResultReporterTest; + +// Tests that the ScopedFakeTestPartResultReporter only catches failures from +// the current thread if it is instantiated with INTERCEPT_ONLY_CURRENT_THREAD. +TEST_F(ScopedFakeTestPartResultReporterTest, InterceptOnlyCurrentThread) { + printf("(expecting 2 failures)\n"); + TestPartResultArray results; + { + ScopedFakeTestPartResultReporter reporter( + ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD, + &results); + AddFailureInOtherThread(FATAL_FAILURE); + AddFailureInOtherThread(NONFATAL_FAILURE); + } + // The two failures should not have been intercepted. + EXPECT_EQ(0, results.size()) << "This shouldn't fail."; +} + +#endif // GTEST_IS_THREADSAFE + +TEST_F(ExpectFailureTest, ExpectFatalFailureOnAllThreads) { + // Expected fatal failure, but succeeds. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected fatal failure."); + // Expected fatal failure, but got a non-fatal failure. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE), + "Expected non-fatal failure."); + // Wrong message. + printf("(expecting 1 failure)\n"); + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE), + "Some other fatal failure expected."); +} + +TEST_F(ExpectFailureTest, ExpectNonFatalFailureOnAllThreads) { + // Expected non-fatal failure, but succeeds. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(SUCCEED(), "Expected non-fatal " + "failure."); + // Expected non-fatal failure, but got a fatal failure. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(FATAL_FAILURE), + "Expected fatal failure."); + // Wrong message. + printf("(expecting 1 failure)\n"); + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddFailure(NONFATAL_FAILURE), + "Some other non-fatal failure."); +} + +class DynamicFixture : public testing::Test { + protected: + DynamicFixture() { printf("DynamicFixture()\n"); } + ~DynamicFixture() override { printf("~DynamicFixture()\n"); } + void SetUp() override { printf("DynamicFixture::SetUp\n"); } + void TearDown() override { printf("DynamicFixture::TearDown\n"); } + + static void SetUpTestSuite() { printf("DynamicFixture::SetUpTestSuite\n"); } + static void TearDownTestSuite() { + printf("DynamicFixture::TearDownTestSuite\n"); + } +}; + +template <bool Pass> +class DynamicTest : public DynamicFixture { + public: + void TestBody() override { EXPECT_TRUE(Pass); } +}; + +auto dynamic_test = ( + // Register two tests with the same fixture correctly. + testing::RegisterTest( + "DynamicFixture", "DynamicTestPass", nullptr, nullptr, __FILE__, + __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }), + testing::RegisterTest( + "DynamicFixture", "DynamicTestFail", nullptr, nullptr, __FILE__, + __LINE__, []() -> DynamicFixture* { return new DynamicTest<false>; }), + + // Register the same fixture with another name. That's fine. + testing::RegisterTest( + "DynamicFixtureAnotherName", "DynamicTestPass", nullptr, nullptr, + __FILE__, __LINE__, + []() -> DynamicFixture* { return new DynamicTest<true>; }), + + // Register two tests with the same fixture incorrectly. + testing::RegisterTest( + "BadDynamicFixture1", "FixtureBase", nullptr, nullptr, __FILE__, + __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }), + testing::RegisterTest( + "BadDynamicFixture1", "TestBase", nullptr, nullptr, __FILE__, __LINE__, + []() -> testing::Test* { return new DynamicTest<true>; }), + + // Register two tests with the same fixture incorrectly by omitting the + // return type. + testing::RegisterTest( + "BadDynamicFixture2", "FixtureBase", nullptr, nullptr, __FILE__, + __LINE__, []() -> DynamicFixture* { return new DynamicTest<true>; }), + testing::RegisterTest("BadDynamicFixture2", "Derived", nullptr, nullptr, + __FILE__, __LINE__, + []() { return new DynamicTest<true>; })); + +// Two test environments for testing testing::AddGlobalTestEnvironment(). + +class FooEnvironment : public testing::Environment { + public: + void SetUp() override { printf("%s", "FooEnvironment::SetUp() called.\n"); } + + void TearDown() override { + printf("%s", "FooEnvironment::TearDown() called.\n"); + FAIL() << "Expected fatal failure."; + } +}; + +class BarEnvironment : public testing::Environment { + public: + void SetUp() override { printf("%s", "BarEnvironment::SetUp() called.\n"); } + + void TearDown() override { + printf("%s", "BarEnvironment::TearDown() called.\n"); + ADD_FAILURE() << "Expected non-fatal failure."; + } +}; + +// The main function. +// +// The idea is to use Google Test to run all the tests we have defined (some +// of them are intended to fail), and then compare the test results +// with the "golden" file. +int main(int argc, char **argv) { + testing::GTEST_FLAG(print_time) = false; + + // We just run the tests, knowing some of them are intended to fail. + // We will use a separate Python script to compare the output of + // this program with the golden file. + + // It's hard to test InitGoogleTest() directly, as it has many + // global side effects. The following line serves as a sanity test + // for it. + testing::InitGoogleTest(&argc, argv); + bool internal_skip_environment_and_ad_hoc_tests = + std::count(argv, argv + argc, + std::string("internal_skip_environment_and_ad_hoc_tests")) > 0; + +#if GTEST_HAS_DEATH_TEST + if (testing::internal::GTEST_FLAG(internal_run_death_test) != "") { + // Skip the usual output capturing if we're running as the child + // process of an threadsafe-style death test. +# if GTEST_OS_WINDOWS + posix::FReopen("nul:", "w", stdout); +# else + posix::FReopen("/dev/null", "w", stdout); +# endif // GTEST_OS_WINDOWS + return RUN_ALL_TESTS(); + } +#endif // GTEST_HAS_DEATH_TEST + + if (internal_skip_environment_and_ad_hoc_tests) + return RUN_ALL_TESTS(); + + // Registers two global test environments. + // The golden file verifies that they are set up in the order they + // are registered, and torn down in the reverse order. + testing::AddGlobalTestEnvironment(new FooEnvironment); + testing::AddGlobalTestEnvironment(new BarEnvironment); +#if _MSC_VER +GTEST_DISABLE_MSC_WARNINGS_POP_() // 4127 +#endif // _MSC_VER + return RunAllTests(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test.py b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test.py new file mode 100644 index 0000000000..2a08477a77 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# +# Copyright 2015 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that Google Test warns the user when not initialized properly.""" + +import gtest_test_utils + +binary_name = 'googletest-param-test-invalid-name1-test_' +COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name) + + +def Assert(condition): + if not condition: + raise AssertionError + + +def TestExitCodeAndOutput(command): + """Runs the given command and verifies its exit code and output.""" + + err = ('Parameterized test name \'"InvalidWithQuotes"\' is invalid') + + p = gtest_test_utils.Subprocess(command) + Assert(p.terminated_by_signal) + + # Verify the output message contains appropriate output + Assert(err in p.output) + + +class GTestParamTestInvalidName1Test(gtest_test_utils.TestCase): + + def testExitCodeAndOutput(self): + TestExitCodeAndOutput(COMMAND) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test_.cc new file mode 100644 index 0000000000..955d699900 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name1-test_.cc @@ -0,0 +1,50 @@ +// Copyright 2015, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "gtest/gtest.h" + +namespace { +class DummyTest : public ::testing::TestWithParam<const char *> {}; + +TEST_P(DummyTest, Dummy) { +} + +INSTANTIATE_TEST_SUITE_P(InvalidTestName, + DummyTest, + ::testing::Values("InvalidWithQuotes"), + ::testing::PrintToStringParamName()); + +} // namespace + +int main(int argc, char *argv[]) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} + diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test.py b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test.py new file mode 100644 index 0000000000..ab838f4632 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test.py @@ -0,0 +1,62 @@ +#!/usr/bin/env python +# +# Copyright 2015 Google Inc. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that Google Test warns the user when not initialized properly.""" + +import gtest_test_utils + +binary_name = 'googletest-param-test-invalid-name2-test_' +COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name) + + +def Assert(condition): + if not condition: + raise AssertionError + + +def TestExitCodeAndOutput(command): + """Runs the given command and verifies its exit code and output.""" + + err = ('Duplicate parameterized test name \'a\'') + + p = gtest_test_utils.Subprocess(command) + Assert(p.terminated_by_signal) + + # Check for appropriate output + Assert(err in p.output) + + +class GTestParamTestInvalidName2Test(gtest_test_utils.TestCase): + + def testExitCodeAndOutput(self): + TestExitCodeAndOutput(COMMAND) + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test_.cc new file mode 100644 index 0000000000..76371df54f --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-invalid-name2-test_.cc @@ -0,0 +1,55 @@ +// Copyright 2015, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "gtest/gtest.h" + +namespace { +class DummyTest : public ::testing::TestWithParam<const char *> {}; + +std::string StringParamTestSuffix( + const testing::TestParamInfo<const char*>& info) { + return std::string(info.param); +} + +TEST_P(DummyTest, Dummy) { +} + +INSTANTIATE_TEST_SUITE_P(DuplicateTestNames, + DummyTest, + ::testing::Values("a", "b", "a", "c"), + StringParamTestSuffix); +} // namespace + +int main(int argc, char *argv[]) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} + + diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.cc new file mode 100644 index 0000000000..023aa46d69 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.cc @@ -0,0 +1,1119 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for Google Test itself. This file verifies that the parameter +// generators objects produce correct parameter sequences and that +// Google Test runtime instantiates correct tests from those sequences. + +#include "gtest/gtest.h" + +# include <algorithm> +# include <iostream> +# include <list> +# include <set> +# include <sstream> +# include <string> +# include <vector> + +# include "src/gtest-internal-inl.h" // for UnitTestOptions +# include "test/googletest-param-test-test.h" + +using ::std::vector; +using ::std::sort; + +using ::testing::AddGlobalTestEnvironment; +using ::testing::Bool; +using ::testing::Combine; +using ::testing::Message; +using ::testing::Range; +using ::testing::TestWithParam; +using ::testing::Values; +using ::testing::ValuesIn; + +using ::testing::internal::ParamGenerator; +using ::testing::internal::UnitTestOptions; + +// Prints a value to a string. +// +// FIXME: remove PrintValue() when we move matchers and +// EXPECT_THAT() from Google Mock to Google Test. At that time, we +// can write EXPECT_THAT(x, Eq(y)) to compare two tuples x and y, as +// EXPECT_THAT() and the matchers know how to print tuples. +template <typename T> +::std::string PrintValue(const T& value) { + return testing::PrintToString(value); +} + +// Verifies that a sequence generated by the generator and accessed +// via the iterator object matches the expected one using Google Test +// assertions. +template <typename T, size_t N> +void VerifyGenerator(const ParamGenerator<T>& generator, + const T (&expected_values)[N]) { + typename ParamGenerator<T>::iterator it = generator.begin(); + for (size_t i = 0; i < N; ++i) { + ASSERT_FALSE(it == generator.end()) + << "At element " << i << " when accessing via an iterator " + << "created with the copy constructor.\n"; + // We cannot use EXPECT_EQ() here as the values may be tuples, + // which don't support <<. + EXPECT_TRUE(expected_values[i] == *it) + << "where i is " << i + << ", expected_values[i] is " << PrintValue(expected_values[i]) + << ", *it is " << PrintValue(*it) + << ", and 'it' is an iterator created with the copy constructor.\n"; + ++it; + } + EXPECT_TRUE(it == generator.end()) + << "At the presumed end of sequence when accessing via an iterator " + << "created with the copy constructor.\n"; + + // Test the iterator assignment. The following lines verify that + // the sequence accessed via an iterator initialized via the + // assignment operator (as opposed to a copy constructor) matches + // just the same. + it = generator.begin(); + for (size_t i = 0; i < N; ++i) { + ASSERT_FALSE(it == generator.end()) + << "At element " << i << " when accessing via an iterator " + << "created with the assignment operator.\n"; + EXPECT_TRUE(expected_values[i] == *it) + << "where i is " << i + << ", expected_values[i] is " << PrintValue(expected_values[i]) + << ", *it is " << PrintValue(*it) + << ", and 'it' is an iterator created with the copy constructor.\n"; + ++it; + } + EXPECT_TRUE(it == generator.end()) + << "At the presumed end of sequence when accessing via an iterator " + << "created with the assignment operator.\n"; +} + +template <typename T> +void VerifyGeneratorIsEmpty(const ParamGenerator<T>& generator) { + typename ParamGenerator<T>::iterator it = generator.begin(); + EXPECT_TRUE(it == generator.end()); + + it = generator.begin(); + EXPECT_TRUE(it == generator.end()); +} + +// Generator tests. They test that each of the provided generator functions +// generates an expected sequence of values. The general test pattern +// instantiates a generator using one of the generator functions, +// checks the sequence produced by the generator using its iterator API, +// and then resets the iterator back to the beginning of the sequence +// and checks the sequence again. + +// Tests that iterators produced by generator functions conform to the +// ForwardIterator concept. +TEST(IteratorTest, ParamIteratorConformsToForwardIteratorConcept) { + const ParamGenerator<int> gen = Range(0, 10); + ParamGenerator<int>::iterator it = gen.begin(); + + // Verifies that iterator initialization works as expected. + ParamGenerator<int>::iterator it2 = it; + EXPECT_TRUE(*it == *it2) << "Initialized iterators must point to the " + << "element same as its source points to"; + + // Verifies that iterator assignment works as expected. + ++it; + EXPECT_FALSE(*it == *it2); + it2 = it; + EXPECT_TRUE(*it == *it2) << "Assigned iterators must point to the " + << "element same as its source points to"; + + // Verifies that prefix operator++() returns *this. + EXPECT_EQ(&it, &(++it)) << "Result of the prefix operator++ must be " + << "refer to the original object"; + + // Verifies that the result of the postfix operator++ points to the value + // pointed to by the original iterator. + int original_value = *it; // Have to compute it outside of macro call to be + // unaffected by the parameter evaluation order. + EXPECT_EQ(original_value, *(it++)); + + // Verifies that prefix and postfix operator++() advance an iterator + // all the same. + it2 = it; + ++it; + ++it2; + EXPECT_TRUE(*it == *it2); +} + +// Tests that Range() generates the expected sequence. +TEST(RangeTest, IntRangeWithDefaultStep) { + const ParamGenerator<int> gen = Range(0, 3); + const int expected_values[] = {0, 1, 2}; + VerifyGenerator(gen, expected_values); +} + +// Edge case. Tests that Range() generates the single element sequence +// as expected when provided with range limits that are equal. +TEST(RangeTest, IntRangeSingleValue) { + const ParamGenerator<int> gen = Range(0, 1); + const int expected_values[] = {0}; + VerifyGenerator(gen, expected_values); +} + +// Edge case. Tests that Range() with generates empty sequence when +// supplied with an empty range. +TEST(RangeTest, IntRangeEmpty) { + const ParamGenerator<int> gen = Range(0, 0); + VerifyGeneratorIsEmpty(gen); +} + +// Tests that Range() with custom step (greater then one) generates +// the expected sequence. +TEST(RangeTest, IntRangeWithCustomStep) { + const ParamGenerator<int> gen = Range(0, 9, 3); + const int expected_values[] = {0, 3, 6}; + VerifyGenerator(gen, expected_values); +} + +// Tests that Range() with custom step (greater then one) generates +// the expected sequence when the last element does not fall on the +// upper range limit. Sequences generated by Range() must not have +// elements beyond the range limits. +TEST(RangeTest, IntRangeWithCustomStepOverUpperBound) { + const ParamGenerator<int> gen = Range(0, 4, 3); + const int expected_values[] = {0, 3}; + VerifyGenerator(gen, expected_values); +} + +// Verifies that Range works with user-defined types that define +// copy constructor, operator=(), operator+(), and operator<(). +class DogAdder { + public: + explicit DogAdder(const char* a_value) : value_(a_value) {} + DogAdder(const DogAdder& other) : value_(other.value_.c_str()) {} + + DogAdder operator=(const DogAdder& other) { + if (this != &other) + value_ = other.value_; + return *this; + } + DogAdder operator+(const DogAdder& other) const { + Message msg; + msg << value_.c_str() << other.value_.c_str(); + return DogAdder(msg.GetString().c_str()); + } + bool operator<(const DogAdder& other) const { + return value_ < other.value_; + } + const std::string& value() const { return value_; } + + private: + std::string value_; +}; + +TEST(RangeTest, WorksWithACustomType) { + const ParamGenerator<DogAdder> gen = + Range(DogAdder("cat"), DogAdder("catdogdog"), DogAdder("dog")); + ParamGenerator<DogAdder>::iterator it = gen.begin(); + + ASSERT_FALSE(it == gen.end()); + EXPECT_STREQ("cat", it->value().c_str()); + + ASSERT_FALSE(++it == gen.end()); + EXPECT_STREQ("catdog", it->value().c_str()); + + EXPECT_TRUE(++it == gen.end()); +} + +class IntWrapper { + public: + explicit IntWrapper(int a_value) : value_(a_value) {} + IntWrapper(const IntWrapper& other) : value_(other.value_) {} + + IntWrapper operator=(const IntWrapper& other) { + value_ = other.value_; + return *this; + } + // operator+() adds a different type. + IntWrapper operator+(int other) const { return IntWrapper(value_ + other); } + bool operator<(const IntWrapper& other) const { + return value_ < other.value_; + } + int value() const { return value_; } + + private: + int value_; +}; + +TEST(RangeTest, WorksWithACustomTypeWithDifferentIncrementType) { + const ParamGenerator<IntWrapper> gen = Range(IntWrapper(0), IntWrapper(2)); + ParamGenerator<IntWrapper>::iterator it = gen.begin(); + + ASSERT_FALSE(it == gen.end()); + EXPECT_EQ(0, it->value()); + + ASSERT_FALSE(++it == gen.end()); + EXPECT_EQ(1, it->value()); + + EXPECT_TRUE(++it == gen.end()); +} + +// Tests that ValuesIn() with an array parameter generates +// the expected sequence. +TEST(ValuesInTest, ValuesInArray) { + int array[] = {3, 5, 8}; + const ParamGenerator<int> gen = ValuesIn(array); + VerifyGenerator(gen, array); +} + +// Tests that ValuesIn() with a const array parameter generates +// the expected sequence. +TEST(ValuesInTest, ValuesInConstArray) { + const int array[] = {3, 5, 8}; + const ParamGenerator<int> gen = ValuesIn(array); + VerifyGenerator(gen, array); +} + +// Edge case. Tests that ValuesIn() with an array parameter containing a +// single element generates the single element sequence. +TEST(ValuesInTest, ValuesInSingleElementArray) { + int array[] = {42}; + const ParamGenerator<int> gen = ValuesIn(array); + VerifyGenerator(gen, array); +} + +// Tests that ValuesIn() generates the expected sequence for an STL +// container (vector). +TEST(ValuesInTest, ValuesInVector) { + typedef ::std::vector<int> ContainerType; + ContainerType values; + values.push_back(3); + values.push_back(5); + values.push_back(8); + const ParamGenerator<int> gen = ValuesIn(values); + + const int expected_values[] = {3, 5, 8}; + VerifyGenerator(gen, expected_values); +} + +// Tests that ValuesIn() generates the expected sequence. +TEST(ValuesInTest, ValuesInIteratorRange) { + typedef ::std::vector<int> ContainerType; + ContainerType values; + values.push_back(3); + values.push_back(5); + values.push_back(8); + const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end()); + + const int expected_values[] = {3, 5, 8}; + VerifyGenerator(gen, expected_values); +} + +// Edge case. Tests that ValuesIn() provided with an iterator range specifying a +// single value generates a single-element sequence. +TEST(ValuesInTest, ValuesInSingleElementIteratorRange) { + typedef ::std::vector<int> ContainerType; + ContainerType values; + values.push_back(42); + const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end()); + + const int expected_values[] = {42}; + VerifyGenerator(gen, expected_values); +} + +// Edge case. Tests that ValuesIn() provided with an empty iterator range +// generates an empty sequence. +TEST(ValuesInTest, ValuesInEmptyIteratorRange) { + typedef ::std::vector<int> ContainerType; + ContainerType values; + const ParamGenerator<int> gen = ValuesIn(values.begin(), values.end()); + + VerifyGeneratorIsEmpty(gen); +} + +// Tests that the Values() generates the expected sequence. +TEST(ValuesTest, ValuesWorks) { + const ParamGenerator<int> gen = Values(3, 5, 8); + + const int expected_values[] = {3, 5, 8}; + VerifyGenerator(gen, expected_values); +} + +// Tests that Values() generates the expected sequences from elements of +// different types convertible to ParamGenerator's parameter type. +TEST(ValuesTest, ValuesWorksForValuesOfCompatibleTypes) { + const ParamGenerator<double> gen = Values(3, 5.0f, 8.0); + + const double expected_values[] = {3.0, 5.0, 8.0}; + VerifyGenerator(gen, expected_values); +} + +TEST(ValuesTest, ValuesWorksForMaxLengthList) { + const ParamGenerator<int> gen = Values( + 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, + 110, 120, 130, 140, 150, 160, 170, 180, 190, 200, + 210, 220, 230, 240, 250, 260, 270, 280, 290, 300, + 310, 320, 330, 340, 350, 360, 370, 380, 390, 400, + 410, 420, 430, 440, 450, 460, 470, 480, 490, 500); + + const int expected_values[] = { + 10, 20, 30, 40, 50, 60, 70, 80, 90, 100, + 110, 120, 130, 140, 150, 160, 170, 180, 190, 200, + 210, 220, 230, 240, 250, 260, 270, 280, 290, 300, + 310, 320, 330, 340, 350, 360, 370, 380, 390, 400, + 410, 420, 430, 440, 450, 460, 470, 480, 490, 500}; + VerifyGenerator(gen, expected_values); +} + +// Edge case test. Tests that single-parameter Values() generates the sequence +// with the single value. +TEST(ValuesTest, ValuesWithSingleParameter) { + const ParamGenerator<int> gen = Values(42); + + const int expected_values[] = {42}; + VerifyGenerator(gen, expected_values); +} + +// Tests that Bool() generates sequence (false, true). +TEST(BoolTest, BoolWorks) { + const ParamGenerator<bool> gen = Bool(); + + const bool expected_values[] = {false, true}; + VerifyGenerator(gen, expected_values); +} + +// Tests that Combine() with two parameters generates the expected sequence. +TEST(CombineTest, CombineWithTwoParameters) { + const char* foo = "foo"; + const char* bar = "bar"; + const ParamGenerator<std::tuple<const char*, int> > gen = + Combine(Values(foo, bar), Values(3, 4)); + + std::tuple<const char*, int> expected_values[] = { + std::make_tuple(foo, 3), std::make_tuple(foo, 4), std::make_tuple(bar, 3), + std::make_tuple(bar, 4)}; + VerifyGenerator(gen, expected_values); +} + +// Tests that Combine() with three parameters generates the expected sequence. +TEST(CombineTest, CombineWithThreeParameters) { + const ParamGenerator<std::tuple<int, int, int> > gen = + Combine(Values(0, 1), Values(3, 4), Values(5, 6)); + std::tuple<int, int, int> expected_values[] = { + std::make_tuple(0, 3, 5), std::make_tuple(0, 3, 6), + std::make_tuple(0, 4, 5), std::make_tuple(0, 4, 6), + std::make_tuple(1, 3, 5), std::make_tuple(1, 3, 6), + std::make_tuple(1, 4, 5), std::make_tuple(1, 4, 6)}; + VerifyGenerator(gen, expected_values); +} + +// Tests that the Combine() with the first parameter generating a single value +// sequence generates a sequence with the number of elements equal to the +// number of elements in the sequence generated by the second parameter. +TEST(CombineTest, CombineWithFirstParameterSingleValue) { + const ParamGenerator<std::tuple<int, int> > gen = + Combine(Values(42), Values(0, 1)); + + std::tuple<int, int> expected_values[] = {std::make_tuple(42, 0), + std::make_tuple(42, 1)}; + VerifyGenerator(gen, expected_values); +} + +// Tests that the Combine() with the second parameter generating a single value +// sequence generates a sequence with the number of elements equal to the +// number of elements in the sequence generated by the first parameter. +TEST(CombineTest, CombineWithSecondParameterSingleValue) { + const ParamGenerator<std::tuple<int, int> > gen = + Combine(Values(0, 1), Values(42)); + + std::tuple<int, int> expected_values[] = {std::make_tuple(0, 42), + std::make_tuple(1, 42)}; + VerifyGenerator(gen, expected_values); +} + +// Tests that when the first parameter produces an empty sequence, +// Combine() produces an empty sequence, too. +TEST(CombineTest, CombineWithFirstParameterEmptyRange) { + const ParamGenerator<std::tuple<int, int> > gen = + Combine(Range(0, 0), Values(0, 1)); + VerifyGeneratorIsEmpty(gen); +} + +// Tests that when the second parameter produces an empty sequence, +// Combine() produces an empty sequence, too. +TEST(CombineTest, CombineWithSecondParameterEmptyRange) { + const ParamGenerator<std::tuple<int, int> > gen = + Combine(Values(0, 1), Range(1, 1)); + VerifyGeneratorIsEmpty(gen); +} + +// Edge case. Tests that combine works with the maximum number +// of parameters supported by Google Test (currently 10). +TEST(CombineTest, CombineWithMaxNumberOfParameters) { + const char* foo = "foo"; + const char* bar = "bar"; + const ParamGenerator< + std::tuple<const char*, int, int, int, int, int, int, int, int, int> > + gen = + Combine(Values(foo, bar), Values(1), Values(2), Values(3), Values(4), + Values(5), Values(6), Values(7), Values(8), Values(9)); + + std::tuple<const char*, int, int, int, int, int, int, int, int, int> + expected_values[] = {std::make_tuple(foo, 1, 2, 3, 4, 5, 6, 7, 8, 9), + std::make_tuple(bar, 1, 2, 3, 4, 5, 6, 7, 8, 9)}; + VerifyGenerator(gen, expected_values); +} + +class NonDefaultConstructAssignString { + public: + NonDefaultConstructAssignString(const std::string& s) : str_(s) {} + NonDefaultConstructAssignString() = delete; + NonDefaultConstructAssignString(const NonDefaultConstructAssignString&) = + default; + NonDefaultConstructAssignString& operator=( + const NonDefaultConstructAssignString&) = delete; + ~NonDefaultConstructAssignString() = default; + + const std::string& str() const { return str_; } + + private: + std::string str_; +}; + +TEST(CombineTest, NonDefaultConstructAssign) { + const ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> > gen = + Combine(Values(0, 1), Values(NonDefaultConstructAssignString("A"), + NonDefaultConstructAssignString("B"))); + + ParamGenerator<std::tuple<int, NonDefaultConstructAssignString> >::iterator + it = gen.begin(); + + EXPECT_EQ(0, std::get<0>(*it)); + EXPECT_EQ("A", std::get<1>(*it).str()); + ++it; + + EXPECT_EQ(0, std::get<0>(*it)); + EXPECT_EQ("B", std::get<1>(*it).str()); + ++it; + + EXPECT_EQ(1, std::get<0>(*it)); + EXPECT_EQ("A", std::get<1>(*it).str()); + ++it; + + EXPECT_EQ(1, std::get<0>(*it)); + EXPECT_EQ("B", std::get<1>(*it).str()); + ++it; + + EXPECT_TRUE(it == gen.end()); +} + + +// Tests that an generator produces correct sequence after being +// assigned from another generator. +TEST(ParamGeneratorTest, AssignmentWorks) { + ParamGenerator<int> gen = Values(1, 2); + const ParamGenerator<int> gen2 = Values(3, 4); + gen = gen2; + + const int expected_values[] = {3, 4}; + VerifyGenerator(gen, expected_values); +} + +// This test verifies that the tests are expanded and run as specified: +// one test per element from the sequence produced by the generator +// specified in INSTANTIATE_TEST_SUITE_P. It also verifies that the test's +// fixture constructor, SetUp(), and TearDown() have run and have been +// supplied with the correct parameters. + +// The use of environment object allows detection of the case where no test +// case functionality is run at all. In this case TearDownTestSuite will not +// be able to detect missing tests, naturally. +template <int kExpectedCalls> +class TestGenerationEnvironment : public ::testing::Environment { + public: + static TestGenerationEnvironment* Instance() { + static TestGenerationEnvironment* instance = new TestGenerationEnvironment; + return instance; + } + + void FixtureConstructorExecuted() { fixture_constructor_count_++; } + void SetUpExecuted() { set_up_count_++; } + void TearDownExecuted() { tear_down_count_++; } + void TestBodyExecuted() { test_body_count_++; } + + void TearDown() override { + // If all MultipleTestGenerationTest tests have been de-selected + // by the filter flag, the following checks make no sense. + bool perform_check = false; + + for (int i = 0; i < kExpectedCalls; ++i) { + Message msg; + msg << "TestsExpandedAndRun/" << i; + if (UnitTestOptions::FilterMatchesTest( + "TestExpansionModule/MultipleTestGenerationTest", + msg.GetString().c_str())) { + perform_check = true; + } + } + if (perform_check) { + EXPECT_EQ(kExpectedCalls, fixture_constructor_count_) + << "Fixture constructor of ParamTestGenerationTest test case " + << "has not been run as expected."; + EXPECT_EQ(kExpectedCalls, set_up_count_) + << "Fixture SetUp method of ParamTestGenerationTest test case " + << "has not been run as expected."; + EXPECT_EQ(kExpectedCalls, tear_down_count_) + << "Fixture TearDown method of ParamTestGenerationTest test case " + << "has not been run as expected."; + EXPECT_EQ(kExpectedCalls, test_body_count_) + << "Test in ParamTestGenerationTest test case " + << "has not been run as expected."; + } + } + + private: + TestGenerationEnvironment() : fixture_constructor_count_(0), set_up_count_(0), + tear_down_count_(0), test_body_count_(0) {} + + int fixture_constructor_count_; + int set_up_count_; + int tear_down_count_; + int test_body_count_; + + GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationEnvironment); +}; + +const int test_generation_params[] = {36, 42, 72}; + +class TestGenerationTest : public TestWithParam<int> { + public: + enum { + PARAMETER_COUNT = + sizeof(test_generation_params)/sizeof(test_generation_params[0]) + }; + + typedef TestGenerationEnvironment<PARAMETER_COUNT> Environment; + + TestGenerationTest() { + Environment::Instance()->FixtureConstructorExecuted(); + current_parameter_ = GetParam(); + } + void SetUp() override { + Environment::Instance()->SetUpExecuted(); + EXPECT_EQ(current_parameter_, GetParam()); + } + void TearDown() override { + Environment::Instance()->TearDownExecuted(); + EXPECT_EQ(current_parameter_, GetParam()); + } + + static void SetUpTestSuite() { + bool all_tests_in_test_case_selected = true; + + for (int i = 0; i < PARAMETER_COUNT; ++i) { + Message test_name; + test_name << "TestsExpandedAndRun/" << i; + if ( !UnitTestOptions::FilterMatchesTest( + "TestExpansionModule/MultipleTestGenerationTest", + test_name.GetString())) { + all_tests_in_test_case_selected = false; + } + } + EXPECT_TRUE(all_tests_in_test_case_selected) + << "When running the TestGenerationTest test case all of its tests\n" + << "must be selected by the filter flag for the test case to pass.\n" + << "If not all of them are enabled, we can't reliably conclude\n" + << "that the correct number of tests have been generated."; + + collected_parameters_.clear(); + } + + static void TearDownTestSuite() { + vector<int> expected_values(test_generation_params, + test_generation_params + PARAMETER_COUNT); + // Test execution order is not guaranteed by Google Test, + // so the order of values in collected_parameters_ can be + // different and we have to sort to compare. + sort(expected_values.begin(), expected_values.end()); + sort(collected_parameters_.begin(), collected_parameters_.end()); + + EXPECT_TRUE(collected_parameters_ == expected_values); + } + + protected: + int current_parameter_; + static vector<int> collected_parameters_; + + private: + GTEST_DISALLOW_COPY_AND_ASSIGN_(TestGenerationTest); +}; +vector<int> TestGenerationTest::collected_parameters_; + +TEST_P(TestGenerationTest, TestsExpandedAndRun) { + Environment::Instance()->TestBodyExecuted(); + EXPECT_EQ(current_parameter_, GetParam()); + collected_parameters_.push_back(GetParam()); +} +INSTANTIATE_TEST_SUITE_P(TestExpansionModule, TestGenerationTest, + ValuesIn(test_generation_params)); + +// This test verifies that the element sequence (third parameter of +// INSTANTIATE_TEST_SUITE_P) is evaluated in InitGoogleTest() and neither at +// the call site of INSTANTIATE_TEST_SUITE_P nor in RUN_ALL_TESTS(). For +// that, we declare param_value_ to be a static member of +// GeneratorEvaluationTest and initialize it to 0. We set it to 1 in +// main(), just before invocation of InitGoogleTest(). After calling +// InitGoogleTest(), we set the value to 2. If the sequence is evaluated +// before or after InitGoogleTest, INSTANTIATE_TEST_SUITE_P will create a +// test with parameter other than 1, and the test body will fail the +// assertion. +class GeneratorEvaluationTest : public TestWithParam<int> { + public: + static int param_value() { return param_value_; } + static void set_param_value(int param_value) { param_value_ = param_value; } + + private: + static int param_value_; +}; +int GeneratorEvaluationTest::param_value_ = 0; + +TEST_P(GeneratorEvaluationTest, GeneratorsEvaluatedInMain) { + EXPECT_EQ(1, GetParam()); +} +INSTANTIATE_TEST_SUITE_P(GenEvalModule, GeneratorEvaluationTest, + Values(GeneratorEvaluationTest::param_value())); + +// Tests that generators defined in a different translation unit are +// functional. Generator extern_gen is defined in gtest-param-test_test2.cc. +extern ParamGenerator<int> extern_gen; +class ExternalGeneratorTest : public TestWithParam<int> {}; +TEST_P(ExternalGeneratorTest, ExternalGenerator) { + // Sequence produced by extern_gen contains only a single value + // which we verify here. + EXPECT_EQ(GetParam(), 33); +} +INSTANTIATE_TEST_SUITE_P(ExternalGeneratorModule, ExternalGeneratorTest, + extern_gen); + +// Tests that a parameterized test case can be defined in one translation +// unit and instantiated in another. This test will be instantiated in +// gtest-param-test_test2.cc. ExternalInstantiationTest fixture class is +// defined in gtest-param-test_test.h. +TEST_P(ExternalInstantiationTest, IsMultipleOf33) { + EXPECT_EQ(0, GetParam() % 33); +} + +// Tests that a parameterized test case can be instantiated with multiple +// generators. +class MultipleInstantiationTest : public TestWithParam<int> {}; +TEST_P(MultipleInstantiationTest, AllowsMultipleInstances) { +} +INSTANTIATE_TEST_SUITE_P(Sequence1, MultipleInstantiationTest, Values(1, 2)); +INSTANTIATE_TEST_SUITE_P(Sequence2, MultipleInstantiationTest, Range(3, 5)); + +// Tests that a parameterized test case can be instantiated +// in multiple translation units. This test will be instantiated +// here and in gtest-param-test_test2.cc. +// InstantiationInMultipleTranslationUnitsTest fixture class +// is defined in gtest-param-test_test.h. +TEST_P(InstantiationInMultipleTranslationUnitsTest, IsMultipleOf42) { + EXPECT_EQ(0, GetParam() % 42); +} +INSTANTIATE_TEST_SUITE_P(Sequence1, InstantiationInMultipleTranslationUnitsTest, + Values(42, 42 * 2)); + +// Tests that each iteration of parameterized test runs in a separate test +// object. +class SeparateInstanceTest : public TestWithParam<int> { + public: + SeparateInstanceTest() : count_(0) {} + + static void TearDownTestSuite() { + EXPECT_GE(global_count_, 2) + << "If some (but not all) SeparateInstanceTest tests have been " + << "filtered out this test will fail. Make sure that all " + << "GeneratorEvaluationTest are selected or de-selected together " + << "by the test filter."; + } + + protected: + int count_; + static int global_count_; +}; +int SeparateInstanceTest::global_count_ = 0; + +TEST_P(SeparateInstanceTest, TestsRunInSeparateInstances) { + EXPECT_EQ(0, count_++); + global_count_++; +} +INSTANTIATE_TEST_SUITE_P(FourElemSequence, SeparateInstanceTest, Range(1, 4)); + +// Tests that all instantiations of a test have named appropriately. Test +// defined with TEST_P(TestSuiteName, TestName) and instantiated with +// INSTANTIATE_TEST_SUITE_P(SequenceName, TestSuiteName, generator) must be +// named SequenceName/TestSuiteName.TestName/i, where i is the 0-based index of +// the sequence element used to instantiate the test. +class NamingTest : public TestWithParam<int> {}; + +TEST_P(NamingTest, TestsReportCorrectNamesAndParameters) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + EXPECT_STREQ("ZeroToFiveSequence/NamingTest", test_info->test_suite_name()); + + Message index_stream; + index_stream << "TestsReportCorrectNamesAndParameters/" << GetParam(); + EXPECT_STREQ(index_stream.GetString().c_str(), test_info->name()); + + EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param()); +} + +INSTANTIATE_TEST_SUITE_P(ZeroToFiveSequence, NamingTest, Range(0, 5)); + +// Tests that macros in test names are expanded correctly. +class MacroNamingTest : public TestWithParam<int> {}; + +#define PREFIX_WITH_FOO(test_name) Foo##test_name +#define PREFIX_WITH_MACRO(test_name) Macro##test_name + +TEST_P(PREFIX_WITH_MACRO(NamingTest), PREFIX_WITH_FOO(SomeTestName)) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + EXPECT_STREQ("FortyTwo/MacroNamingTest", test_info->test_suite_name()); + EXPECT_STREQ("FooSomeTestName/0", test_info->name()); +} + +INSTANTIATE_TEST_SUITE_P(FortyTwo, MacroNamingTest, Values(42)); + +// Tests the same thing for non-parametrized tests. +class MacroNamingTestNonParametrized : public ::testing::Test {}; + +TEST_F(PREFIX_WITH_MACRO(NamingTestNonParametrized), + PREFIX_WITH_FOO(SomeTestName)) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + EXPECT_STREQ("MacroNamingTestNonParametrized", test_info->test_suite_name()); + EXPECT_STREQ("FooSomeTestName", test_info->name()); +} + +TEST(MacroNameing, LookupNames) { + std::set<std::string> know_suite_names, know_test_names; + + auto ins = testing::UnitTest::GetInstance(); + int ts = 0; + while (const testing::TestSuite* suite = ins->GetTestSuite(ts++)) { + know_suite_names.insert(suite->name()); + + int ti = 0; + while (const testing::TestInfo* info = suite->GetTestInfo(ti++)) { + know_test_names.insert(std::string(suite->name()) + "." + info->name()); + } + } + + // Check that the expected form of the test suit name actually exists. + EXPECT_NE( // + know_suite_names.find("FortyTwo/MacroNamingTest"), + know_suite_names.end()); + EXPECT_NE( + know_suite_names.find("MacroNamingTestNonParametrized"), + know_suite_names.end()); + // Check that the expected form of the test name actually exists. + EXPECT_NE( // + know_test_names.find("FortyTwo/MacroNamingTest.FooSomeTestName/0"), + know_test_names.end()); + EXPECT_NE( + know_test_names.find("MacroNamingTestNonParametrized.FooSomeTestName"), + know_test_names.end()); +} + +// Tests that user supplied custom parameter names are working correctly. +// Runs the test with a builtin helper method which uses PrintToString, +// as well as a custom function and custom functor to ensure all possible +// uses work correctly. +class CustomFunctorNamingTest : public TestWithParam<std::string> {}; +TEST_P(CustomFunctorNamingTest, CustomTestNames) {} + +struct CustomParamNameFunctor { + std::string operator()(const ::testing::TestParamInfo<std::string>& inf) { + return inf.param; + } +}; + +INSTANTIATE_TEST_SUITE_P(CustomParamNameFunctor, CustomFunctorNamingTest, + Values(std::string("FunctorName")), + CustomParamNameFunctor()); + +INSTANTIATE_TEST_SUITE_P(AllAllowedCharacters, CustomFunctorNamingTest, + Values("abcdefghijklmnopqrstuvwxyz", + "ABCDEFGHIJKLMNOPQRSTUVWXYZ", "01234567890_"), + CustomParamNameFunctor()); + +inline std::string CustomParamNameFunction( + const ::testing::TestParamInfo<std::string>& inf) { + return inf.param; +} + +class CustomFunctionNamingTest : public TestWithParam<std::string> {}; +TEST_P(CustomFunctionNamingTest, CustomTestNames) {} + +INSTANTIATE_TEST_SUITE_P(CustomParamNameFunction, CustomFunctionNamingTest, + Values(std::string("FunctionName")), + CustomParamNameFunction); + +INSTANTIATE_TEST_SUITE_P(CustomParamNameFunctionP, CustomFunctionNamingTest, + Values(std::string("FunctionNameP")), + &CustomParamNameFunction); + +// Test custom naming with a lambda + +class CustomLambdaNamingTest : public TestWithParam<std::string> {}; +TEST_P(CustomLambdaNamingTest, CustomTestNames) {} + +INSTANTIATE_TEST_SUITE_P(CustomParamNameLambda, CustomLambdaNamingTest, + Values(std::string("LambdaName")), + [](const ::testing::TestParamInfo<std::string>& inf) { + return inf.param; + }); + +TEST(CustomNamingTest, CheckNameRegistry) { + ::testing::UnitTest* unit_test = ::testing::UnitTest::GetInstance(); + std::set<std::string> test_names; + for (int suite_num = 0; suite_num < unit_test->total_test_suite_count(); + ++suite_num) { + const ::testing::TestSuite* test_suite = unit_test->GetTestSuite(suite_num); + for (int test_num = 0; test_num < test_suite->total_test_count(); + ++test_num) { + const ::testing::TestInfo* test_info = test_suite->GetTestInfo(test_num); + test_names.insert(std::string(test_info->name())); + } + } + EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctorName")); + EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctionName")); + EXPECT_EQ(1u, test_names.count("CustomTestNames/FunctionNameP")); + EXPECT_EQ(1u, test_names.count("CustomTestNames/LambdaName")); +} + +// Test a numeric name to ensure PrintToStringParamName works correctly. + +class CustomIntegerNamingTest : public TestWithParam<int> {}; + +TEST_P(CustomIntegerNamingTest, TestsReportCorrectNames) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + Message test_name_stream; + test_name_stream << "TestsReportCorrectNames/" << GetParam(); + EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name()); +} + +INSTANTIATE_TEST_SUITE_P(PrintToString, CustomIntegerNamingTest, Range(0, 5), + ::testing::PrintToStringParamName()); + +// Test a custom struct with PrintToString. + +struct CustomStruct { + explicit CustomStruct(int value) : x(value) {} + int x; +}; + +std::ostream& operator<<(std::ostream& stream, const CustomStruct& val) { + stream << val.x; + return stream; +} + +class CustomStructNamingTest : public TestWithParam<CustomStruct> {}; + +TEST_P(CustomStructNamingTest, TestsReportCorrectNames) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + Message test_name_stream; + test_name_stream << "TestsReportCorrectNames/" << GetParam(); + EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name()); +} + +INSTANTIATE_TEST_SUITE_P(PrintToString, CustomStructNamingTest, + Values(CustomStruct(0), CustomStruct(1)), + ::testing::PrintToStringParamName()); + +// Test that using a stateful parameter naming function works as expected. + +struct StatefulNamingFunctor { + StatefulNamingFunctor() : sum(0) {} + std::string operator()(const ::testing::TestParamInfo<int>& info) { + int value = info.param + sum; + sum += info.param; + return ::testing::PrintToString(value); + } + int sum; +}; + +class StatefulNamingTest : public ::testing::TestWithParam<int> { + protected: + StatefulNamingTest() : sum_(0) {} + int sum_; +}; + +TEST_P(StatefulNamingTest, TestsReportCorrectNames) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + sum_ += GetParam(); + Message test_name_stream; + test_name_stream << "TestsReportCorrectNames/" << sum_; + EXPECT_STREQ(test_name_stream.GetString().c_str(), test_info->name()); +} + +INSTANTIATE_TEST_SUITE_P(StatefulNamingFunctor, StatefulNamingTest, Range(0, 5), + StatefulNamingFunctor()); + +// Class that cannot be streamed into an ostream. It needs to be copyable +// (and, in case of MSVC, also assignable) in order to be a test parameter +// type. Its default copy constructor and assignment operator do exactly +// what we need. +class Unstreamable { + public: + explicit Unstreamable(int value) : value_(value) {} + // -Wunused-private-field: dummy accessor for `value_`. + const int& dummy_value() const { return value_; } + + private: + int value_; +}; + +class CommentTest : public TestWithParam<Unstreamable> {}; + +TEST_P(CommentTest, TestsCorrectlyReportUnstreamableParams) { + const ::testing::TestInfo* const test_info = + ::testing::UnitTest::GetInstance()->current_test_info(); + + EXPECT_EQ(::testing::PrintToString(GetParam()), test_info->value_param()); +} + +INSTANTIATE_TEST_SUITE_P(InstantiationWithComments, CommentTest, + Values(Unstreamable(1))); + +// Verify that we can create a hierarchy of test fixtures, where the base +// class fixture is not parameterized and the derived class is. In this case +// ParameterizedDerivedTest inherits from NonParameterizedBaseTest. We +// perform simple tests on both. +class NonParameterizedBaseTest : public ::testing::Test { + public: + NonParameterizedBaseTest() : n_(17) { } + protected: + int n_; +}; + +class ParameterizedDerivedTest : public NonParameterizedBaseTest, + public ::testing::WithParamInterface<int> { + protected: + ParameterizedDerivedTest() : count_(0) { } + int count_; + static int global_count_; +}; + +int ParameterizedDerivedTest::global_count_ = 0; + +TEST_F(NonParameterizedBaseTest, FixtureIsInitialized) { + EXPECT_EQ(17, n_); +} + +TEST_P(ParameterizedDerivedTest, SeesSequence) { + EXPECT_EQ(17, n_); + EXPECT_EQ(0, count_++); + EXPECT_EQ(GetParam(), global_count_++); +} + +class ParameterizedDeathTest : public ::testing::TestWithParam<int> { }; + +TEST_F(ParameterizedDeathTest, GetParamDiesFromTestF) { + EXPECT_DEATH_IF_SUPPORTED(GetParam(), + ".* value-parameterized test .*"); +} + +INSTANTIATE_TEST_SUITE_P(RangeZeroToFive, ParameterizedDerivedTest, + Range(0, 5)); + +// Tests param generator working with Enums +enum MyEnums { + ENUM1 = 1, + ENUM2 = 3, + ENUM3 = 8, +}; + +class MyEnumTest : public testing::TestWithParam<MyEnums> {}; + +TEST_P(MyEnumTest, ChecksParamMoreThanZero) { EXPECT_GE(10, GetParam()); } +INSTANTIATE_TEST_SUITE_P(MyEnumTests, MyEnumTest, + ::testing::Values(ENUM1, ENUM2, 0)); + +namespace works_here { +// Never used not instantiated, this should work. +class NotUsedTest : public testing::TestWithParam<int> {}; + +/////// +// Never used not instantiated, this should work. +template <typename T> +class NotUsedTypeTest : public testing::Test {}; +TYPED_TEST_SUITE_P(NotUsedTypeTest); + +// Used but not instantiated, this would fail. but... +class NotInstantiatedTest : public testing::TestWithParam<int> {}; +// ... we mark is as allowed. +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NotInstantiatedTest); + +TEST_P(NotInstantiatedTest, Used) { } + +using OtherName = NotInstantiatedTest; +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(OtherName); +TEST_P(OtherName, Used) { } + +// Used but not instantiated, this would fail. but... +template <typename T> +class NotInstantiatedTypeTest : public testing::Test {}; +TYPED_TEST_SUITE_P(NotInstantiatedTypeTest); +// ... we mark is as allowed. +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(NotInstantiatedTypeTest); + +TYPED_TEST_P(NotInstantiatedTypeTest, Used) { } +REGISTER_TYPED_TEST_SUITE_P(NotInstantiatedTypeTest, Used); +} // namespace works_here + +int main(int argc, char **argv) { + // Used in TestGenerationTest test suite. + AddGlobalTestEnvironment(TestGenerationTest::Environment::Instance()); + // Used in GeneratorEvaluationTest test suite. Tests that the updated value + // will be picked up for instantiating tests in GeneratorEvaluationTest. + GeneratorEvaluationTest::set_param_value(1); + + ::testing::InitGoogleTest(&argc, argv); + + // Used in GeneratorEvaluationTest test suite. Tests that value updated + // here will NOT be used for instantiating tests in + // GeneratorEvaluationTest. + GeneratorEvaluationTest::set_param_value(2); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.h b/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.h new file mode 100644 index 0000000000..891937538d --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test-test.h @@ -0,0 +1,51 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// The Google C++ Testing and Mocking Framework (Google Test) +// +// This header file provides classes and functions used internally +// for testing Google Test itself. + +#ifndef GOOGLETEST_TEST_GOOGLETEST_PARAM_TEST_TEST_H_ +#define GOOGLETEST_TEST_GOOGLETEST_PARAM_TEST_TEST_H_ + +#include "gtest/gtest.h" + +// Test fixture for testing definition and instantiation of a test +// in separate translation units. +class ExternalInstantiationTest : public ::testing::TestWithParam<int> { +}; + +// Test fixture for testing instantiation of a test in multiple +// translation units. +class InstantiationInMultipleTranslationUnitsTest + : public ::testing::TestWithParam<int> { +}; + +#endif // GOOGLETEST_TEST_GOOGLETEST_PARAM_TEST_TEST_H_ diff --git a/security/nss/gtests/google_test/gtest/test/googletest-param-test2-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-param-test2-test.cc new file mode 100644 index 0000000000..2a29fb1d06 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-param-test2-test.cc @@ -0,0 +1,61 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for Google Test itself. This verifies that the basic constructs of +// Google Test work. + +#include "gtest/gtest.h" +#include "test/googletest-param-test-test.h" + +using ::testing::Values; +using ::testing::internal::ParamGenerator; + +// Tests that generators defined in a different translation unit +// are functional. The test using extern_gen is defined +// in googletest-param-test-test.cc. +ParamGenerator<int> extern_gen = Values(33); + +// Tests that a parameterized test case can be defined in one translation unit +// and instantiated in another. The test is defined in +// googletest-param-test-test.cc and ExternalInstantiationTest fixture class is +// defined in gtest-param-test_test.h. +INSTANTIATE_TEST_SUITE_P(MultiplesOf33, + ExternalInstantiationTest, + Values(33, 66)); + +// Tests that a parameterized test case can be instantiated +// in multiple translation units. Another instantiation is defined +// in googletest-param-test-test.cc and +// InstantiationInMultipleTranslationUnitsTest fixture is defined in +// gtest-param-test_test.h +INSTANTIATE_TEST_SUITE_P(Sequence2, + InstantiationInMultipleTranslationUnitsTest, + Values(42*3, 42*4, 42*5)); + diff --git a/security/nss/gtests/google_test/gtest/test/googletest-port-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-port-test.cc new file mode 100644 index 0000000000..1e0c861632 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-port-test.cc @@ -0,0 +1,1276 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// This file tests the internal cross-platform support utilities. +#include <stdio.h> + +#include "gtest/internal/gtest-port.h" + +#if GTEST_OS_MAC +# include <time.h> +#endif // GTEST_OS_MAC + +#include <list> +#include <memory> +#include <utility> // For std::pair and std::make_pair. +#include <vector> + +#include "gtest/gtest.h" +#include "gtest/gtest-spi.h" +#include "src/gtest-internal-inl.h" + +using std::make_pair; +using std::pair; + +namespace testing { +namespace internal { + +TEST(IsXDigitTest, WorksForNarrowAscii) { + EXPECT_TRUE(IsXDigit('0')); + EXPECT_TRUE(IsXDigit('9')); + EXPECT_TRUE(IsXDigit('A')); + EXPECT_TRUE(IsXDigit('F')); + EXPECT_TRUE(IsXDigit('a')); + EXPECT_TRUE(IsXDigit('f')); + + EXPECT_FALSE(IsXDigit('-')); + EXPECT_FALSE(IsXDigit('g')); + EXPECT_FALSE(IsXDigit('G')); +} + +TEST(IsXDigitTest, ReturnsFalseForNarrowNonAscii) { + EXPECT_FALSE(IsXDigit(static_cast<char>('\x80'))); + EXPECT_FALSE(IsXDigit(static_cast<char>('0' | '\x80'))); +} + +TEST(IsXDigitTest, WorksForWideAscii) { + EXPECT_TRUE(IsXDigit(L'0')); + EXPECT_TRUE(IsXDigit(L'9')); + EXPECT_TRUE(IsXDigit(L'A')); + EXPECT_TRUE(IsXDigit(L'F')); + EXPECT_TRUE(IsXDigit(L'a')); + EXPECT_TRUE(IsXDigit(L'f')); + + EXPECT_FALSE(IsXDigit(L'-')); + EXPECT_FALSE(IsXDigit(L'g')); + EXPECT_FALSE(IsXDigit(L'G')); +} + +TEST(IsXDigitTest, ReturnsFalseForWideNonAscii) { + EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(0x80))); + EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x80))); + EXPECT_FALSE(IsXDigit(static_cast<wchar_t>(L'0' | 0x100))); +} + +class Base { + public: + Base() : member_(0) {} + explicit Base(int n) : member_(n) {} + Base(const Base&) = default; + Base& operator=(const Base&) = default; + virtual ~Base() {} + int member() { return member_; } + + private: + int member_; +}; + +class Derived : public Base { + public: + explicit Derived(int n) : Base(n) {} +}; + +TEST(ImplicitCastTest, ConvertsPointers) { + Derived derived(0); + EXPECT_TRUE(&derived == ::testing::internal::ImplicitCast_<Base*>(&derived)); +} + +TEST(ImplicitCastTest, CanUseInheritance) { + Derived derived(1); + Base base = ::testing::internal::ImplicitCast_<Base>(derived); + EXPECT_EQ(derived.member(), base.member()); +} + +class Castable { + public: + explicit Castable(bool* converted) : converted_(converted) {} + operator Base() { + *converted_ = true; + return Base(); + } + + private: + bool* converted_; +}; + +TEST(ImplicitCastTest, CanUseNonConstCastOperator) { + bool converted = false; + Castable castable(&converted); + Base base = ::testing::internal::ImplicitCast_<Base>(castable); + EXPECT_TRUE(converted); +} + +class ConstCastable { + public: + explicit ConstCastable(bool* converted) : converted_(converted) {} + operator Base() const { + *converted_ = true; + return Base(); + } + + private: + bool* converted_; +}; + +TEST(ImplicitCastTest, CanUseConstCastOperatorOnConstValues) { + bool converted = false; + const ConstCastable const_castable(&converted); + Base base = ::testing::internal::ImplicitCast_<Base>(const_castable); + EXPECT_TRUE(converted); +} + +class ConstAndNonConstCastable { + public: + ConstAndNonConstCastable(bool* converted, bool* const_converted) + : converted_(converted), const_converted_(const_converted) {} + operator Base() { + *converted_ = true; + return Base(); + } + operator Base() const { + *const_converted_ = true; + return Base(); + } + + private: + bool* converted_; + bool* const_converted_; +}; + +TEST(ImplicitCastTest, CanSelectBetweenConstAndNonConstCasrAppropriately) { + bool converted = false; + bool const_converted = false; + ConstAndNonConstCastable castable(&converted, &const_converted); + Base base = ::testing::internal::ImplicitCast_<Base>(castable); + EXPECT_TRUE(converted); + EXPECT_FALSE(const_converted); + + converted = false; + const_converted = false; + const ConstAndNonConstCastable const_castable(&converted, &const_converted); + base = ::testing::internal::ImplicitCast_<Base>(const_castable); + EXPECT_FALSE(converted); + EXPECT_TRUE(const_converted); +} + +class To { + public: + To(bool* converted) { *converted = true; } // NOLINT +}; + +TEST(ImplicitCastTest, CanUseImplicitConstructor) { + bool converted = false; + To to = ::testing::internal::ImplicitCast_<To>(&converted); + (void)to; + EXPECT_TRUE(converted); +} + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +TEST(GtestCheckSyntaxTest, BehavesLikeASingleStatement) { + if (AlwaysFalse()) + GTEST_CHECK_(false) << "This should never be executed; " + "It's a compilation test only."; + + if (AlwaysTrue()) + GTEST_CHECK_(true); + else + ; // NOLINT + + if (AlwaysFalse()) + ; // NOLINT + else + GTEST_CHECK_(true) << ""; +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +TEST(GtestCheckSyntaxTest, WorksWithSwitch) { + switch (0) { + case 1: + break; + default: + GTEST_CHECK_(true); + } + + switch (0) + case 0: + GTEST_CHECK_(true) << "Check failed in switch case"; +} + +// Verifies behavior of FormatFileLocation. +TEST(FormatFileLocationTest, FormatsFileLocation) { + EXPECT_PRED_FORMAT2(IsSubstring, "foo.cc", FormatFileLocation("foo.cc", 42)); + EXPECT_PRED_FORMAT2(IsSubstring, "42", FormatFileLocation("foo.cc", 42)); +} + +TEST(FormatFileLocationTest, FormatsUnknownFile) { + EXPECT_PRED_FORMAT2(IsSubstring, "unknown file", + FormatFileLocation(nullptr, 42)); + EXPECT_PRED_FORMAT2(IsSubstring, "42", FormatFileLocation(nullptr, 42)); +} + +TEST(FormatFileLocationTest, FormatsUknownLine) { + EXPECT_EQ("foo.cc:", FormatFileLocation("foo.cc", -1)); +} + +TEST(FormatFileLocationTest, FormatsUknownFileAndLine) { + EXPECT_EQ("unknown file:", FormatFileLocation(nullptr, -1)); +} + +// Verifies behavior of FormatCompilerIndependentFileLocation. +TEST(FormatCompilerIndependentFileLocationTest, FormatsFileLocation) { + EXPECT_EQ("foo.cc:42", FormatCompilerIndependentFileLocation("foo.cc", 42)); +} + +TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFile) { + EXPECT_EQ("unknown file:42", + FormatCompilerIndependentFileLocation(nullptr, 42)); +} + +TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownLine) { + EXPECT_EQ("foo.cc", FormatCompilerIndependentFileLocation("foo.cc", -1)); +} + +TEST(FormatCompilerIndependentFileLocationTest, FormatsUknownFileAndLine) { + EXPECT_EQ("unknown file", FormatCompilerIndependentFileLocation(nullptr, -1)); +} + +#if GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA || \ + GTEST_OS_DRAGONFLY || GTEST_OS_FREEBSD || GTEST_OS_GNU_KFREEBSD || \ + GTEST_OS_NETBSD || GTEST_OS_OPENBSD +void* ThreadFunc(void* data) { + internal::Mutex* mutex = static_cast<internal::Mutex*>(data); + mutex->Lock(); + mutex->Unlock(); + return nullptr; +} + +TEST(GetThreadCountTest, ReturnsCorrectValue) { + const size_t starting_count = GetThreadCount(); + pthread_t thread_id; + + internal::Mutex mutex; + { + internal::MutexLock lock(&mutex); + pthread_attr_t attr; + ASSERT_EQ(0, pthread_attr_init(&attr)); + ASSERT_EQ(0, pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE)); + + const int status = pthread_create(&thread_id, &attr, &ThreadFunc, &mutex); + ASSERT_EQ(0, pthread_attr_destroy(&attr)); + ASSERT_EQ(0, status); + EXPECT_EQ(starting_count + 1, GetThreadCount()); + } + + void* dummy; + ASSERT_EQ(0, pthread_join(thread_id, &dummy)); + + // The OS may not immediately report the updated thread count after + // joining a thread, causing flakiness in this test. To counter that, we + // wait for up to .5 seconds for the OS to report the correct value. + for (int i = 0; i < 5; ++i) { + if (GetThreadCount() == starting_count) + break; + + SleepMilliseconds(100); + } + + EXPECT_EQ(starting_count, GetThreadCount()); +} +#else +TEST(GetThreadCountTest, ReturnsZeroWhenUnableToCountThreads) { + EXPECT_EQ(0U, GetThreadCount()); +} +#endif // GTEST_OS_LINUX || GTEST_OS_MAC || GTEST_OS_QNX || GTEST_OS_FUCHSIA + +TEST(GtestCheckDeathTest, DiesWithCorrectOutputOnFailure) { + const bool a_false_condition = false; + const char regex[] = +#ifdef _MSC_VER + "googletest-port-test\\.cc\\(\\d+\\):" +#elif GTEST_USES_POSIX_RE + "googletest-port-test\\.cc:[0-9]+" +#else + "googletest-port-test\\.cc:\\d+" +#endif // _MSC_VER + ".*a_false_condition.*Extra info.*"; + + EXPECT_DEATH_IF_SUPPORTED(GTEST_CHECK_(a_false_condition) << "Extra info", + regex); +} + +#if GTEST_HAS_DEATH_TEST + +TEST(GtestCheckDeathTest, LivesSilentlyOnSuccess) { + EXPECT_EXIT({ + GTEST_CHECK_(true) << "Extra info"; + ::std::cerr << "Success\n"; + exit(0); }, + ::testing::ExitedWithCode(0), "Success"); +} + +#endif // GTEST_HAS_DEATH_TEST + +// Verifies that Google Test choose regular expression engine appropriate to +// the platform. The test will produce compiler errors in case of failure. +// For simplicity, we only cover the most important platforms here. +TEST(RegexEngineSelectionTest, SelectsCorrectRegexEngine) { +#if !GTEST_USES_PCRE +# if GTEST_HAS_POSIX_RE + + EXPECT_TRUE(GTEST_USES_POSIX_RE); + +# else + + EXPECT_TRUE(GTEST_USES_SIMPLE_RE); + +# endif +#endif // !GTEST_USES_PCRE +} + +#if GTEST_USES_POSIX_RE + +template <typename Str> +class RETest : public ::testing::Test {}; + +// Defines StringTypes as the list of all string types that class RE +// supports. +typedef testing::Types< ::std::string, const char*> StringTypes; + +TYPED_TEST_SUITE(RETest, StringTypes); + +// Tests RE's implicit constructors. +TYPED_TEST(RETest, ImplicitConstructorWorks) { + const RE empty(TypeParam("")); + EXPECT_STREQ("", empty.pattern()); + + const RE simple(TypeParam("hello")); + EXPECT_STREQ("hello", simple.pattern()); + + const RE normal(TypeParam(".*(\\w+)")); + EXPECT_STREQ(".*(\\w+)", normal.pattern()); +} + +// Tests that RE's constructors reject invalid regular expressions. +TYPED_TEST(RETest, RejectsInvalidRegex) { + EXPECT_NONFATAL_FAILURE({ + const RE invalid(TypeParam("?")); + }, "\"?\" is not a valid POSIX Extended regular expression."); +} + +// Tests RE::FullMatch(). +TYPED_TEST(RETest, FullMatchWorks) { + const RE empty(TypeParam("")); + EXPECT_TRUE(RE::FullMatch(TypeParam(""), empty)); + EXPECT_FALSE(RE::FullMatch(TypeParam("a"), empty)); + + const RE re(TypeParam("a.*z")); + EXPECT_TRUE(RE::FullMatch(TypeParam("az"), re)); + EXPECT_TRUE(RE::FullMatch(TypeParam("axyz"), re)); + EXPECT_FALSE(RE::FullMatch(TypeParam("baz"), re)); + EXPECT_FALSE(RE::FullMatch(TypeParam("azy"), re)); +} + +// Tests RE::PartialMatch(). +TYPED_TEST(RETest, PartialMatchWorks) { + const RE empty(TypeParam("")); + EXPECT_TRUE(RE::PartialMatch(TypeParam(""), empty)); + EXPECT_TRUE(RE::PartialMatch(TypeParam("a"), empty)); + + const RE re(TypeParam("a.*z")); + EXPECT_TRUE(RE::PartialMatch(TypeParam("az"), re)); + EXPECT_TRUE(RE::PartialMatch(TypeParam("axyz"), re)); + EXPECT_TRUE(RE::PartialMatch(TypeParam("baz"), re)); + EXPECT_TRUE(RE::PartialMatch(TypeParam("azy"), re)); + EXPECT_FALSE(RE::PartialMatch(TypeParam("zza"), re)); +} + +#elif GTEST_USES_SIMPLE_RE + +TEST(IsInSetTest, NulCharIsNotInAnySet) { + EXPECT_FALSE(IsInSet('\0', "")); + EXPECT_FALSE(IsInSet('\0', "\0")); + EXPECT_FALSE(IsInSet('\0', "a")); +} + +TEST(IsInSetTest, WorksForNonNulChars) { + EXPECT_FALSE(IsInSet('a', "Ab")); + EXPECT_FALSE(IsInSet('c', "")); + + EXPECT_TRUE(IsInSet('b', "bcd")); + EXPECT_TRUE(IsInSet('b', "ab")); +} + +TEST(IsAsciiDigitTest, IsFalseForNonDigit) { + EXPECT_FALSE(IsAsciiDigit('\0')); + EXPECT_FALSE(IsAsciiDigit(' ')); + EXPECT_FALSE(IsAsciiDigit('+')); + EXPECT_FALSE(IsAsciiDigit('-')); + EXPECT_FALSE(IsAsciiDigit('.')); + EXPECT_FALSE(IsAsciiDigit('a')); +} + +TEST(IsAsciiDigitTest, IsTrueForDigit) { + EXPECT_TRUE(IsAsciiDigit('0')); + EXPECT_TRUE(IsAsciiDigit('1')); + EXPECT_TRUE(IsAsciiDigit('5')); + EXPECT_TRUE(IsAsciiDigit('9')); +} + +TEST(IsAsciiPunctTest, IsFalseForNonPunct) { + EXPECT_FALSE(IsAsciiPunct('\0')); + EXPECT_FALSE(IsAsciiPunct(' ')); + EXPECT_FALSE(IsAsciiPunct('\n')); + EXPECT_FALSE(IsAsciiPunct('a')); + EXPECT_FALSE(IsAsciiPunct('0')); +} + +TEST(IsAsciiPunctTest, IsTrueForPunct) { + for (const char* p = "^-!\"#$%&'()*+,./:;<=>?@[\\]_`{|}~"; *p; p++) { + EXPECT_PRED1(IsAsciiPunct, *p); + } +} + +TEST(IsRepeatTest, IsFalseForNonRepeatChar) { + EXPECT_FALSE(IsRepeat('\0')); + EXPECT_FALSE(IsRepeat(' ')); + EXPECT_FALSE(IsRepeat('a')); + EXPECT_FALSE(IsRepeat('1')); + EXPECT_FALSE(IsRepeat('-')); +} + +TEST(IsRepeatTest, IsTrueForRepeatChar) { + EXPECT_TRUE(IsRepeat('?')); + EXPECT_TRUE(IsRepeat('*')); + EXPECT_TRUE(IsRepeat('+')); +} + +TEST(IsAsciiWhiteSpaceTest, IsFalseForNonWhiteSpace) { + EXPECT_FALSE(IsAsciiWhiteSpace('\0')); + EXPECT_FALSE(IsAsciiWhiteSpace('a')); + EXPECT_FALSE(IsAsciiWhiteSpace('1')); + EXPECT_FALSE(IsAsciiWhiteSpace('+')); + EXPECT_FALSE(IsAsciiWhiteSpace('_')); +} + +TEST(IsAsciiWhiteSpaceTest, IsTrueForWhiteSpace) { + EXPECT_TRUE(IsAsciiWhiteSpace(' ')); + EXPECT_TRUE(IsAsciiWhiteSpace('\n')); + EXPECT_TRUE(IsAsciiWhiteSpace('\r')); + EXPECT_TRUE(IsAsciiWhiteSpace('\t')); + EXPECT_TRUE(IsAsciiWhiteSpace('\v')); + EXPECT_TRUE(IsAsciiWhiteSpace('\f')); +} + +TEST(IsAsciiWordCharTest, IsFalseForNonWordChar) { + EXPECT_FALSE(IsAsciiWordChar('\0')); + EXPECT_FALSE(IsAsciiWordChar('+')); + EXPECT_FALSE(IsAsciiWordChar('.')); + EXPECT_FALSE(IsAsciiWordChar(' ')); + EXPECT_FALSE(IsAsciiWordChar('\n')); +} + +TEST(IsAsciiWordCharTest, IsTrueForLetter) { + EXPECT_TRUE(IsAsciiWordChar('a')); + EXPECT_TRUE(IsAsciiWordChar('b')); + EXPECT_TRUE(IsAsciiWordChar('A')); + EXPECT_TRUE(IsAsciiWordChar('Z')); +} + +TEST(IsAsciiWordCharTest, IsTrueForDigit) { + EXPECT_TRUE(IsAsciiWordChar('0')); + EXPECT_TRUE(IsAsciiWordChar('1')); + EXPECT_TRUE(IsAsciiWordChar('7')); + EXPECT_TRUE(IsAsciiWordChar('9')); +} + +TEST(IsAsciiWordCharTest, IsTrueForUnderscore) { + EXPECT_TRUE(IsAsciiWordChar('_')); +} + +TEST(IsValidEscapeTest, IsFalseForNonPrintable) { + EXPECT_FALSE(IsValidEscape('\0')); + EXPECT_FALSE(IsValidEscape('\007')); +} + +TEST(IsValidEscapeTest, IsFalseForDigit) { + EXPECT_FALSE(IsValidEscape('0')); + EXPECT_FALSE(IsValidEscape('9')); +} + +TEST(IsValidEscapeTest, IsFalseForWhiteSpace) { + EXPECT_FALSE(IsValidEscape(' ')); + EXPECT_FALSE(IsValidEscape('\n')); +} + +TEST(IsValidEscapeTest, IsFalseForSomeLetter) { + EXPECT_FALSE(IsValidEscape('a')); + EXPECT_FALSE(IsValidEscape('Z')); +} + +TEST(IsValidEscapeTest, IsTrueForPunct) { + EXPECT_TRUE(IsValidEscape('.')); + EXPECT_TRUE(IsValidEscape('-')); + EXPECT_TRUE(IsValidEscape('^')); + EXPECT_TRUE(IsValidEscape('$')); + EXPECT_TRUE(IsValidEscape('(')); + EXPECT_TRUE(IsValidEscape(']')); + EXPECT_TRUE(IsValidEscape('{')); + EXPECT_TRUE(IsValidEscape('|')); +} + +TEST(IsValidEscapeTest, IsTrueForSomeLetter) { + EXPECT_TRUE(IsValidEscape('d')); + EXPECT_TRUE(IsValidEscape('D')); + EXPECT_TRUE(IsValidEscape('s')); + EXPECT_TRUE(IsValidEscape('S')); + EXPECT_TRUE(IsValidEscape('w')); + EXPECT_TRUE(IsValidEscape('W')); +} + +TEST(AtomMatchesCharTest, EscapedPunct) { + EXPECT_FALSE(AtomMatchesChar(true, '\\', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, '\\', ' ')); + EXPECT_FALSE(AtomMatchesChar(true, '_', '.')); + EXPECT_FALSE(AtomMatchesChar(true, '.', 'a')); + + EXPECT_TRUE(AtomMatchesChar(true, '\\', '\\')); + EXPECT_TRUE(AtomMatchesChar(true, '_', '_')); + EXPECT_TRUE(AtomMatchesChar(true, '+', '+')); + EXPECT_TRUE(AtomMatchesChar(true, '.', '.')); +} + +TEST(AtomMatchesCharTest, Escaped_d) { + EXPECT_FALSE(AtomMatchesChar(true, 'd', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'd', 'a')); + EXPECT_FALSE(AtomMatchesChar(true, 'd', '.')); + + EXPECT_TRUE(AtomMatchesChar(true, 'd', '0')); + EXPECT_TRUE(AtomMatchesChar(true, 'd', '9')); +} + +TEST(AtomMatchesCharTest, Escaped_D) { + EXPECT_FALSE(AtomMatchesChar(true, 'D', '0')); + EXPECT_FALSE(AtomMatchesChar(true, 'D', '9')); + + EXPECT_TRUE(AtomMatchesChar(true, 'D', '\0')); + EXPECT_TRUE(AtomMatchesChar(true, 'D', 'a')); + EXPECT_TRUE(AtomMatchesChar(true, 'D', '-')); +} + +TEST(AtomMatchesCharTest, Escaped_s) { + EXPECT_FALSE(AtomMatchesChar(true, 's', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 's', 'a')); + EXPECT_FALSE(AtomMatchesChar(true, 's', '.')); + EXPECT_FALSE(AtomMatchesChar(true, 's', '9')); + + EXPECT_TRUE(AtomMatchesChar(true, 's', ' ')); + EXPECT_TRUE(AtomMatchesChar(true, 's', '\n')); + EXPECT_TRUE(AtomMatchesChar(true, 's', '\t')); +} + +TEST(AtomMatchesCharTest, Escaped_S) { + EXPECT_FALSE(AtomMatchesChar(true, 'S', ' ')); + EXPECT_FALSE(AtomMatchesChar(true, 'S', '\r')); + + EXPECT_TRUE(AtomMatchesChar(true, 'S', '\0')); + EXPECT_TRUE(AtomMatchesChar(true, 'S', 'a')); + EXPECT_TRUE(AtomMatchesChar(true, 'S', '9')); +} + +TEST(AtomMatchesCharTest, Escaped_w) { + EXPECT_FALSE(AtomMatchesChar(true, 'w', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'w', '+')); + EXPECT_FALSE(AtomMatchesChar(true, 'w', ' ')); + EXPECT_FALSE(AtomMatchesChar(true, 'w', '\n')); + + EXPECT_TRUE(AtomMatchesChar(true, 'w', '0')); + EXPECT_TRUE(AtomMatchesChar(true, 'w', 'b')); + EXPECT_TRUE(AtomMatchesChar(true, 'w', 'C')); + EXPECT_TRUE(AtomMatchesChar(true, 'w', '_')); +} + +TEST(AtomMatchesCharTest, Escaped_W) { + EXPECT_FALSE(AtomMatchesChar(true, 'W', 'A')); + EXPECT_FALSE(AtomMatchesChar(true, 'W', 'b')); + EXPECT_FALSE(AtomMatchesChar(true, 'W', '9')); + EXPECT_FALSE(AtomMatchesChar(true, 'W', '_')); + + EXPECT_TRUE(AtomMatchesChar(true, 'W', '\0')); + EXPECT_TRUE(AtomMatchesChar(true, 'W', '*')); + EXPECT_TRUE(AtomMatchesChar(true, 'W', '\n')); +} + +TEST(AtomMatchesCharTest, EscapedWhiteSpace) { + EXPECT_FALSE(AtomMatchesChar(true, 'f', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'f', '\n')); + EXPECT_FALSE(AtomMatchesChar(true, 'n', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'n', '\r')); + EXPECT_FALSE(AtomMatchesChar(true, 'r', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'r', 'a')); + EXPECT_FALSE(AtomMatchesChar(true, 't', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 't', 't')); + EXPECT_FALSE(AtomMatchesChar(true, 'v', '\0')); + EXPECT_FALSE(AtomMatchesChar(true, 'v', '\f')); + + EXPECT_TRUE(AtomMatchesChar(true, 'f', '\f')); + EXPECT_TRUE(AtomMatchesChar(true, 'n', '\n')); + EXPECT_TRUE(AtomMatchesChar(true, 'r', '\r')); + EXPECT_TRUE(AtomMatchesChar(true, 't', '\t')); + EXPECT_TRUE(AtomMatchesChar(true, 'v', '\v')); +} + +TEST(AtomMatchesCharTest, UnescapedDot) { + EXPECT_FALSE(AtomMatchesChar(false, '.', '\n')); + + EXPECT_TRUE(AtomMatchesChar(false, '.', '\0')); + EXPECT_TRUE(AtomMatchesChar(false, '.', '.')); + EXPECT_TRUE(AtomMatchesChar(false, '.', 'a')); + EXPECT_TRUE(AtomMatchesChar(false, '.', ' ')); +} + +TEST(AtomMatchesCharTest, UnescapedChar) { + EXPECT_FALSE(AtomMatchesChar(false, 'a', '\0')); + EXPECT_FALSE(AtomMatchesChar(false, 'a', 'b')); + EXPECT_FALSE(AtomMatchesChar(false, '$', 'a')); + + EXPECT_TRUE(AtomMatchesChar(false, '$', '$')); + EXPECT_TRUE(AtomMatchesChar(false, '5', '5')); + EXPECT_TRUE(AtomMatchesChar(false, 'Z', 'Z')); +} + +TEST(ValidateRegexTest, GeneratesFailureAndReturnsFalseForInvalid) { + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(NULL)), + "NULL is not a valid simple regular expression"); + EXPECT_NONFATAL_FAILURE( + ASSERT_FALSE(ValidateRegex("a\\")), + "Syntax error at index 1 in simple regular expression \"a\\\": "); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a\\")), + "'\\' cannot appear at the end"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\n\\")), + "'\\' cannot appear at the end"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("\\s\\hb")), + "invalid escape sequence \"\\h\""); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^^")), + "'^' can only appear at the beginning"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex(".*^b")), + "'^' can only appear at the beginning"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("$$")), + "'$' can only appear at the end"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^$a")), + "'$' can only appear at the end"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a(b")), + "'(' is unsupported"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("ab)")), + "')' is unsupported"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("[ab")), + "'[' is unsupported"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("a{2")), + "'{' is unsupported"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("?")), + "'?' can only follow a repeatable token"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("^*")), + "'*' can only follow a repeatable token"); + EXPECT_NONFATAL_FAILURE(ASSERT_FALSE(ValidateRegex("5*+")), + "'+' can only follow a repeatable token"); +} + +TEST(ValidateRegexTest, ReturnsTrueForValid) { + EXPECT_TRUE(ValidateRegex("")); + EXPECT_TRUE(ValidateRegex("a")); + EXPECT_TRUE(ValidateRegex(".*")); + EXPECT_TRUE(ValidateRegex("^a_+")); + EXPECT_TRUE(ValidateRegex("^a\\t\\&?")); + EXPECT_TRUE(ValidateRegex("09*$")); + EXPECT_TRUE(ValidateRegex("^Z$")); + EXPECT_TRUE(ValidateRegex("a\\^Z\\$\\(\\)\\|\\[\\]\\{\\}")); +} + +TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrOne) { + EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "a", "ba")); + // Repeating more than once. + EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "aab")); + + // Repeating zero times. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ba")); + // Repeating once. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, 'a', '?', "b", "ab")); + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '#', '?', ".", "##")); +} + +TEST(MatchRepetitionAndRegexAtHeadTest, WorksForZeroOrMany) { + EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '*', "a$", "baab")); + + // Repeating zero times. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "bc")); + // Repeating once. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '*', "b", "abc")); + // Repeating more than once. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '*', "-", "ab_1-g")); +} + +TEST(MatchRepetitionAndRegexAtHeadTest, WorksForOneOrMany) { + EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "a$", "baab")); + // Repeating zero times. + EXPECT_FALSE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "bc")); + + // Repeating once. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(false, '.', '+', "b", "abc")); + // Repeating more than once. + EXPECT_TRUE(MatchRepetitionAndRegexAtHead(true, 'w', '+', "-", "ab_1-g")); +} + +TEST(MatchRegexAtHeadTest, ReturnsTrueForEmptyRegex) { + EXPECT_TRUE(MatchRegexAtHead("", "")); + EXPECT_TRUE(MatchRegexAtHead("", "ab")); +} + +TEST(MatchRegexAtHeadTest, WorksWhenDollarIsInRegex) { + EXPECT_FALSE(MatchRegexAtHead("$", "a")); + + EXPECT_TRUE(MatchRegexAtHead("$", "")); + EXPECT_TRUE(MatchRegexAtHead("a$", "a")); +} + +TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithEscapeSequence) { + EXPECT_FALSE(MatchRegexAtHead("\\w", "+")); + EXPECT_FALSE(MatchRegexAtHead("\\W", "ab")); + + EXPECT_TRUE(MatchRegexAtHead("\\sa", "\nab")); + EXPECT_TRUE(MatchRegexAtHead("\\d", "1a")); +} + +TEST(MatchRegexAtHeadTest, WorksWhenRegexStartsWithRepetition) { + EXPECT_FALSE(MatchRegexAtHead(".+a", "abc")); + EXPECT_FALSE(MatchRegexAtHead("a?b", "aab")); + + EXPECT_TRUE(MatchRegexAtHead(".*a", "bc12-ab")); + EXPECT_TRUE(MatchRegexAtHead("a?b", "b")); + EXPECT_TRUE(MatchRegexAtHead("a?b", "ab")); +} + +TEST(MatchRegexAtHeadTest, + WorksWhenRegexStartsWithRepetionOfEscapeSequence) { + EXPECT_FALSE(MatchRegexAtHead("\\.+a", "abc")); + EXPECT_FALSE(MatchRegexAtHead("\\s?b", " b")); + + EXPECT_TRUE(MatchRegexAtHead("\\(*a", "((((ab")); + EXPECT_TRUE(MatchRegexAtHead("\\^?b", "^b")); + EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "b")); + EXPECT_TRUE(MatchRegexAtHead("\\\\?b", "\\b")); +} + +TEST(MatchRegexAtHeadTest, MatchesSequentially) { + EXPECT_FALSE(MatchRegexAtHead("ab.*c", "acabc")); + + EXPECT_TRUE(MatchRegexAtHead("ab.*c", "ab-fsc")); +} + +TEST(MatchRegexAnywhereTest, ReturnsFalseWhenStringIsNull) { + EXPECT_FALSE(MatchRegexAnywhere("", NULL)); +} + +TEST(MatchRegexAnywhereTest, WorksWhenRegexStartsWithCaret) { + EXPECT_FALSE(MatchRegexAnywhere("^a", "ba")); + EXPECT_FALSE(MatchRegexAnywhere("^$", "a")); + + EXPECT_TRUE(MatchRegexAnywhere("^a", "ab")); + EXPECT_TRUE(MatchRegexAnywhere("^", "ab")); + EXPECT_TRUE(MatchRegexAnywhere("^$", "")); +} + +TEST(MatchRegexAnywhereTest, ReturnsFalseWhenNoMatch) { + EXPECT_FALSE(MatchRegexAnywhere("a", "bcde123")); + EXPECT_FALSE(MatchRegexAnywhere("a.+a", "--aa88888888")); +} + +TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingPrefix) { + EXPECT_TRUE(MatchRegexAnywhere("\\w+", "ab1_ - 5")); + EXPECT_TRUE(MatchRegexAnywhere(".*=", "=")); + EXPECT_TRUE(MatchRegexAnywhere("x.*ab?.*bc", "xaaabc")); +} + +TEST(MatchRegexAnywhereTest, ReturnsTrueWhenMatchingNonPrefix) { + EXPECT_TRUE(MatchRegexAnywhere("\\w+", "$$$ ab1_ - 5")); + EXPECT_TRUE(MatchRegexAnywhere("\\.+=", "= ...=")); +} + +// Tests RE's implicit constructors. +TEST(RETest, ImplicitConstructorWorks) { + const RE empty(""); + EXPECT_STREQ("", empty.pattern()); + + const RE simple("hello"); + EXPECT_STREQ("hello", simple.pattern()); +} + +// Tests that RE's constructors reject invalid regular expressions. +TEST(RETest, RejectsInvalidRegex) { + EXPECT_NONFATAL_FAILURE({ + const RE normal(NULL); + }, "NULL is not a valid simple regular expression"); + + EXPECT_NONFATAL_FAILURE({ + const RE normal(".*(\\w+"); + }, "'(' is unsupported"); + + EXPECT_NONFATAL_FAILURE({ + const RE invalid("^?"); + }, "'?' can only follow a repeatable token"); +} + +// Tests RE::FullMatch(). +TEST(RETest, FullMatchWorks) { + const RE empty(""); + EXPECT_TRUE(RE::FullMatch("", empty)); + EXPECT_FALSE(RE::FullMatch("a", empty)); + + const RE re1("a"); + EXPECT_TRUE(RE::FullMatch("a", re1)); + + const RE re("a.*z"); + EXPECT_TRUE(RE::FullMatch("az", re)); + EXPECT_TRUE(RE::FullMatch("axyz", re)); + EXPECT_FALSE(RE::FullMatch("baz", re)); + EXPECT_FALSE(RE::FullMatch("azy", re)); +} + +// Tests RE::PartialMatch(). +TEST(RETest, PartialMatchWorks) { + const RE empty(""); + EXPECT_TRUE(RE::PartialMatch("", empty)); + EXPECT_TRUE(RE::PartialMatch("a", empty)); + + const RE re("a.*z"); + EXPECT_TRUE(RE::PartialMatch("az", re)); + EXPECT_TRUE(RE::PartialMatch("axyz", re)); + EXPECT_TRUE(RE::PartialMatch("baz", re)); + EXPECT_TRUE(RE::PartialMatch("azy", re)); + EXPECT_FALSE(RE::PartialMatch("zza", re)); +} + +#endif // GTEST_USES_POSIX_RE + +#if !GTEST_OS_WINDOWS_MOBILE + +TEST(CaptureTest, CapturesStdout) { + CaptureStdout(); + fprintf(stdout, "abc"); + EXPECT_STREQ("abc", GetCapturedStdout().c_str()); + + CaptureStdout(); + fprintf(stdout, "def%cghi", '\0'); + EXPECT_EQ(::std::string("def\0ghi", 7), ::std::string(GetCapturedStdout())); +} + +TEST(CaptureTest, CapturesStderr) { + CaptureStderr(); + fprintf(stderr, "jkl"); + EXPECT_STREQ("jkl", GetCapturedStderr().c_str()); + + CaptureStderr(); + fprintf(stderr, "jkl%cmno", '\0'); + EXPECT_EQ(::std::string("jkl\0mno", 7), ::std::string(GetCapturedStderr())); +} + +// Tests that stdout and stderr capture don't interfere with each other. +TEST(CaptureTest, CapturesStdoutAndStderr) { + CaptureStdout(); + CaptureStderr(); + fprintf(stdout, "pqr"); + fprintf(stderr, "stu"); + EXPECT_STREQ("pqr", GetCapturedStdout().c_str()); + EXPECT_STREQ("stu", GetCapturedStderr().c_str()); +} + +TEST(CaptureDeathTest, CannotReenterStdoutCapture) { + CaptureStdout(); + EXPECT_DEATH_IF_SUPPORTED(CaptureStdout(), + "Only one stdout capturer can exist at a time"); + GetCapturedStdout(); + + // We cannot test stderr capturing using death tests as they use it + // themselves. +} + +#endif // !GTEST_OS_WINDOWS_MOBILE + +TEST(ThreadLocalTest, DefaultConstructorInitializesToDefaultValues) { + ThreadLocal<int> t1; + EXPECT_EQ(0, t1.get()); + + ThreadLocal<void*> t2; + EXPECT_TRUE(t2.get() == nullptr); +} + +TEST(ThreadLocalTest, SingleParamConstructorInitializesToParam) { + ThreadLocal<int> t1(123); + EXPECT_EQ(123, t1.get()); + + int i = 0; + ThreadLocal<int*> t2(&i); + EXPECT_EQ(&i, t2.get()); +} + +class NoDefaultContructor { + public: + explicit NoDefaultContructor(const char*) {} + NoDefaultContructor(const NoDefaultContructor&) {} +}; + +TEST(ThreadLocalTest, ValueDefaultContructorIsNotRequiredForParamVersion) { + ThreadLocal<NoDefaultContructor> bar(NoDefaultContructor("foo")); + bar.pointer(); +} + +TEST(ThreadLocalTest, GetAndPointerReturnSameValue) { + ThreadLocal<std::string> thread_local_string; + + EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get())); + + // Verifies the condition still holds after calling set. + thread_local_string.set("foo"); + EXPECT_EQ(thread_local_string.pointer(), &(thread_local_string.get())); +} + +TEST(ThreadLocalTest, PointerAndConstPointerReturnSameValue) { + ThreadLocal<std::string> thread_local_string; + const ThreadLocal<std::string>& const_thread_local_string = + thread_local_string; + + EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer()); + + thread_local_string.set("foo"); + EXPECT_EQ(thread_local_string.pointer(), const_thread_local_string.pointer()); +} + +#if GTEST_IS_THREADSAFE + +void AddTwo(int* param) { *param += 2; } + +TEST(ThreadWithParamTest, ConstructorExecutesThreadFunc) { + int i = 40; + ThreadWithParam<int*> thread(&AddTwo, &i, nullptr); + thread.Join(); + EXPECT_EQ(42, i); +} + +TEST(MutexDeathTest, AssertHeldShouldAssertWhenNotLocked) { + // AssertHeld() is flaky only in the presence of multiple threads accessing + // the lock. In this case, the test is robust. + EXPECT_DEATH_IF_SUPPORTED({ + Mutex m; + { MutexLock lock(&m); } + m.AssertHeld(); + }, + "thread .*hold"); +} + +TEST(MutexTest, AssertHeldShouldNotAssertWhenLocked) { + Mutex m; + MutexLock lock(&m); + m.AssertHeld(); +} + +class AtomicCounterWithMutex { + public: + explicit AtomicCounterWithMutex(Mutex* mutex) : + value_(0), mutex_(mutex), random_(42) {} + + void Increment() { + MutexLock lock(mutex_); + int temp = value_; + { + // We need to put up a memory barrier to prevent reads and writes to + // value_ rearranged with the call to SleepMilliseconds when observed + // from other threads. +#if GTEST_HAS_PTHREAD + // On POSIX, locking a mutex puts up a memory barrier. We cannot use + // Mutex and MutexLock here or rely on their memory barrier + // functionality as we are testing them here. + pthread_mutex_t memory_barrier_mutex; + GTEST_CHECK_POSIX_SUCCESS_( + pthread_mutex_init(&memory_barrier_mutex, nullptr)); + GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_lock(&memory_barrier_mutex)); + + SleepMilliseconds(static_cast<int>(random_.Generate(30))); + + GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_unlock(&memory_barrier_mutex)); + GTEST_CHECK_POSIX_SUCCESS_(pthread_mutex_destroy(&memory_barrier_mutex)); +#elif GTEST_OS_WINDOWS + // On Windows, performing an interlocked access puts up a memory barrier. + volatile LONG dummy = 0; + ::InterlockedIncrement(&dummy); + SleepMilliseconds(static_cast<int>(random_.Generate(30))); + ::InterlockedIncrement(&dummy); +#else +# error "Memory barrier not implemented on this platform." +#endif // GTEST_HAS_PTHREAD + } + value_ = temp + 1; + } + int value() const { return value_; } + + private: + volatile int value_; + Mutex* const mutex_; // Protects value_. + Random random_; +}; + +void CountingThreadFunc(pair<AtomicCounterWithMutex*, int> param) { + for (int i = 0; i < param.second; ++i) + param.first->Increment(); +} + +// Tests that the mutex only lets one thread at a time to lock it. +TEST(MutexTest, OnlyOneThreadCanLockAtATime) { + Mutex mutex; + AtomicCounterWithMutex locked_counter(&mutex); + + typedef ThreadWithParam<pair<AtomicCounterWithMutex*, int> > ThreadType; + const int kCycleCount = 20; + const int kThreadCount = 7; + std::unique_ptr<ThreadType> counting_threads[kThreadCount]; + Notification threads_can_start; + // Creates and runs kThreadCount threads that increment locked_counter + // kCycleCount times each. + for (int i = 0; i < kThreadCount; ++i) { + counting_threads[i].reset(new ThreadType(&CountingThreadFunc, + make_pair(&locked_counter, + kCycleCount), + &threads_can_start)); + } + threads_can_start.Notify(); + for (int i = 0; i < kThreadCount; ++i) + counting_threads[i]->Join(); + + // If the mutex lets more than one thread to increment the counter at a + // time, they are likely to encounter a race condition and have some + // increments overwritten, resulting in the lower then expected counter + // value. + EXPECT_EQ(kCycleCount * kThreadCount, locked_counter.value()); +} + +template <typename T> +void RunFromThread(void (func)(T), T param) { + ThreadWithParam<T> thread(func, param, nullptr); + thread.Join(); +} + +void RetrieveThreadLocalValue( + pair<ThreadLocal<std::string>*, std::string*> param) { + *param.second = param.first->get(); +} + +TEST(ThreadLocalTest, ParameterizedConstructorSetsDefault) { + ThreadLocal<std::string> thread_local_string("foo"); + EXPECT_STREQ("foo", thread_local_string.get().c_str()); + + thread_local_string.set("bar"); + EXPECT_STREQ("bar", thread_local_string.get().c_str()); + + std::string result; + RunFromThread(&RetrieveThreadLocalValue, + make_pair(&thread_local_string, &result)); + EXPECT_STREQ("foo", result.c_str()); +} + +// Keeps track of whether of destructors being called on instances of +// DestructorTracker. On Windows, waits for the destructor call reports. +class DestructorCall { + public: + DestructorCall() { + invoked_ = false; +#if GTEST_OS_WINDOWS + wait_event_.Reset(::CreateEvent(NULL, TRUE, FALSE, NULL)); + GTEST_CHECK_(wait_event_.Get() != NULL); +#endif + } + + bool CheckDestroyed() const { +#if GTEST_OS_WINDOWS + if (::WaitForSingleObject(wait_event_.Get(), 1000) != WAIT_OBJECT_0) + return false; +#endif + return invoked_; + } + + void ReportDestroyed() { + invoked_ = true; +#if GTEST_OS_WINDOWS + ::SetEvent(wait_event_.Get()); +#endif + } + + static std::vector<DestructorCall*>& List() { return *list_; } + + static void ResetList() { + for (size_t i = 0; i < list_->size(); ++i) { + delete list_->at(i); + } + list_->clear(); + } + + private: + bool invoked_; +#if GTEST_OS_WINDOWS + AutoHandle wait_event_; +#endif + static std::vector<DestructorCall*>* const list_; + + GTEST_DISALLOW_COPY_AND_ASSIGN_(DestructorCall); +}; + +std::vector<DestructorCall*>* const DestructorCall::list_ = + new std::vector<DestructorCall*>; + +// DestructorTracker keeps track of whether its instances have been +// destroyed. +class DestructorTracker { + public: + DestructorTracker() : index_(GetNewIndex()) {} + DestructorTracker(const DestructorTracker& /* rhs */) + : index_(GetNewIndex()) {} + ~DestructorTracker() { + // We never access DestructorCall::List() concurrently, so we don't need + // to protect this access with a mutex. + DestructorCall::List()[index_]->ReportDestroyed(); + } + + private: + static size_t GetNewIndex() { + DestructorCall::List().push_back(new DestructorCall); + return DestructorCall::List().size() - 1; + } + const size_t index_; +}; + +typedef ThreadLocal<DestructorTracker>* ThreadParam; + +void CallThreadLocalGet(ThreadParam thread_local_param) { + thread_local_param->get(); +} + +// Tests that when a ThreadLocal object dies in a thread, it destroys +// the managed object for that thread. +TEST(ThreadLocalTest, DestroysManagedObjectForOwnThreadWhenDying) { + DestructorCall::ResetList(); + + { + ThreadLocal<DestructorTracker> thread_local_tracker; + ASSERT_EQ(0U, DestructorCall::List().size()); + + // This creates another DestructorTracker object for the main thread. + thread_local_tracker.get(); + ASSERT_EQ(1U, DestructorCall::List().size()); + ASSERT_FALSE(DestructorCall::List()[0]->CheckDestroyed()); + } + + // Now thread_local_tracker has died. + ASSERT_EQ(1U, DestructorCall::List().size()); + EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed()); + + DestructorCall::ResetList(); +} + +// Tests that when a thread exits, the thread-local object for that +// thread is destroyed. +TEST(ThreadLocalTest, DestroysManagedObjectAtThreadExit) { + DestructorCall::ResetList(); + + { + ThreadLocal<DestructorTracker> thread_local_tracker; + ASSERT_EQ(0U, DestructorCall::List().size()); + + // This creates another DestructorTracker object in the new thread. + ThreadWithParam<ThreadParam> thread(&CallThreadLocalGet, + &thread_local_tracker, nullptr); + thread.Join(); + + // The thread has exited, and we should have a DestroyedTracker + // instance created for it. But it may not have been destroyed yet. + ASSERT_EQ(1U, DestructorCall::List().size()); + } + + // The thread has exited and thread_local_tracker has died. + ASSERT_EQ(1U, DestructorCall::List().size()); + EXPECT_TRUE(DestructorCall::List()[0]->CheckDestroyed()); + + DestructorCall::ResetList(); +} + +TEST(ThreadLocalTest, ThreadLocalMutationsAffectOnlyCurrentThread) { + ThreadLocal<std::string> thread_local_string; + thread_local_string.set("Foo"); + EXPECT_STREQ("Foo", thread_local_string.get().c_str()); + + std::string result; + RunFromThread(&RetrieveThreadLocalValue, + make_pair(&thread_local_string, &result)); + EXPECT_TRUE(result.empty()); +} + +#endif // GTEST_IS_THREADSAFE + +#if GTEST_OS_WINDOWS +TEST(WindowsTypesTest, HANDLEIsVoidStar) { + StaticAssertTypeEq<HANDLE, void*>(); +} + +#if GTEST_OS_WINDOWS_MINGW && !defined(__MINGW64_VERSION_MAJOR) +TEST(WindowsTypesTest, _CRITICAL_SECTIONIs_CRITICAL_SECTION) { + StaticAssertTypeEq<CRITICAL_SECTION, _CRITICAL_SECTION>(); +} +#else +TEST(WindowsTypesTest, CRITICAL_SECTIONIs_RTL_CRITICAL_SECTION) { + StaticAssertTypeEq<CRITICAL_SECTION, _RTL_CRITICAL_SECTION>(); +} +#endif + +#endif // GTEST_OS_WINDOWS + +} // namespace internal +} // namespace testing diff --git a/security/nss/gtests/google_test/gtest/test/googletest-printers-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-printers-test.cc new file mode 100644 index 0000000000..e1e8e1c7a0 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-printers-test.cc @@ -0,0 +1,1962 @@ +// Copyright 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Google Test - The Google C++ Testing and Mocking Framework +// +// This file tests the universal value printer. + +#include <algorithm> +#include <cctype> +#include <cstdint> +#include <cstring> +#include <deque> +#include <forward_list> +#include <limits> +#include <list> +#include <map> +#include <memory> +#include <set> +#include <sstream> +#include <string> +#include <unordered_map> +#include <unordered_set> +#include <utility> +#include <vector> + +#include "gtest/gtest-printers.h" +#include "gtest/gtest.h" + +// Some user-defined types for testing the universal value printer. + +// An anonymous enum type. +enum AnonymousEnum { + kAE1 = -1, + kAE2 = 1 +}; + +// An enum without a user-defined printer. +enum EnumWithoutPrinter { + kEWP1 = -2, + kEWP2 = 42 +}; + +// An enum with a << operator. +enum EnumWithStreaming { + kEWS1 = 10 +}; + +std::ostream& operator<<(std::ostream& os, EnumWithStreaming e) { + return os << (e == kEWS1 ? "kEWS1" : "invalid"); +} + +// An enum with a PrintTo() function. +enum EnumWithPrintTo { + kEWPT1 = 1 +}; + +void PrintTo(EnumWithPrintTo e, std::ostream* os) { + *os << (e == kEWPT1 ? "kEWPT1" : "invalid"); +} + +// A class implicitly convertible to BiggestInt. +class BiggestIntConvertible { + public: + operator ::testing::internal::BiggestInt() const { return 42; } +}; + +// A parent class with two child classes. The parent and one of the kids have +// stream operators. +class ParentClass {}; +class ChildClassWithStreamOperator : public ParentClass {}; +class ChildClassWithoutStreamOperator : public ParentClass {}; +static void operator<<(std::ostream& os, const ParentClass&) { + os << "ParentClass"; +} +static void operator<<(std::ostream& os, const ChildClassWithStreamOperator&) { + os << "ChildClassWithStreamOperator"; +} + +// A user-defined unprintable class template in the global namespace. +template <typename T> +class UnprintableTemplateInGlobal { + public: + UnprintableTemplateInGlobal() : value_() {} + private: + T value_; +}; + +// A user-defined streamable type in the global namespace. +class StreamableInGlobal { + public: + virtual ~StreamableInGlobal() {} +}; + +inline void operator<<(::std::ostream& os, const StreamableInGlobal& /* x */) { + os << "StreamableInGlobal"; +} + +void operator<<(::std::ostream& os, const StreamableInGlobal* /* x */) { + os << "StreamableInGlobal*"; +} + +namespace foo { + +// A user-defined unprintable type in a user namespace. +class UnprintableInFoo { + public: + UnprintableInFoo() : z_(0) { memcpy(xy_, "\xEF\x12\x0\x0\x34\xAB\x0\x0", 8); } + double z() const { return z_; } + private: + char xy_[8]; + double z_; +}; + +// A user-defined printable type in a user-chosen namespace. +struct PrintableViaPrintTo { + PrintableViaPrintTo() : value() {} + int value; +}; + +void PrintTo(const PrintableViaPrintTo& x, ::std::ostream* os) { + *os << "PrintableViaPrintTo: " << x.value; +} + +// A type with a user-defined << for printing its pointer. +struct PointerPrintable { +}; + +::std::ostream& operator<<(::std::ostream& os, + const PointerPrintable* /* x */) { + return os << "PointerPrintable*"; +} + +// A user-defined printable class template in a user-chosen namespace. +template <typename T> +class PrintableViaPrintToTemplate { + public: + explicit PrintableViaPrintToTemplate(const T& a_value) : value_(a_value) {} + + const T& value() const { return value_; } + private: + T value_; +}; + +template <typename T> +void PrintTo(const PrintableViaPrintToTemplate<T>& x, ::std::ostream* os) { + *os << "PrintableViaPrintToTemplate: " << x.value(); +} + +// A user-defined streamable class template in a user namespace. +template <typename T> +class StreamableTemplateInFoo { + public: + StreamableTemplateInFoo() : value_() {} + + const T& value() const { return value_; } + private: + T value_; +}; + +template <typename T> +inline ::std::ostream& operator<<(::std::ostream& os, + const StreamableTemplateInFoo<T>& x) { + return os << "StreamableTemplateInFoo: " << x.value(); +} + +// A user-defined streamable type in a user namespace whose operator<< is +// templated on the type of the output stream. +struct TemplatedStreamableInFoo {}; + +template <typename OutputStream> +OutputStream& operator<<(OutputStream& os, + const TemplatedStreamableInFoo& /*ts*/) { + os << "TemplatedStreamableInFoo"; + return os; +} + +// A user-defined streamable but recursively-defined container type in +// a user namespace, it mimics therefore std::filesystem::path or +// boost::filesystem::path. +class PathLike { + public: + struct iterator { + typedef PathLike value_type; + + iterator& operator++(); + PathLike& operator*(); + }; + + using value_type = char; + using const_iterator = iterator; + + PathLike() {} + + iterator begin() const { return iterator(); } + iterator end() const { return iterator(); } + + friend ::std::ostream& operator<<(::std::ostream& os, const PathLike&) { + return os << "Streamable-PathLike"; + } +}; + +} // namespace foo + +namespace testing { +namespace { +template <typename T> +class Wrapper { + public: + explicit Wrapper(T&& value) : value_(std::forward<T>(value)) {} + + const T& value() const { return value_; } + + private: + T value_; +}; + +} // namespace + +namespace internal { +template <typename T> +class UniversalPrinter<Wrapper<T>> { + public: + static void Print(const Wrapper<T>& w, ::std::ostream* os) { + *os << "Wrapper("; + UniversalPrint(w.value(), os); + *os << ')'; + } +}; +} // namespace internal + + +namespace gtest_printers_test { + +using ::std::deque; +using ::std::list; +using ::std::make_pair; +using ::std::map; +using ::std::multimap; +using ::std::multiset; +using ::std::pair; +using ::std::set; +using ::std::vector; +using ::testing::PrintToString; +using ::testing::internal::FormatForComparisonFailureMessage; +using ::testing::internal::ImplicitCast_; +using ::testing::internal::NativeArray; +using ::testing::internal::RelationToSourceReference; +using ::testing::internal::Strings; +using ::testing::internal::UniversalPrint; +using ::testing::internal::UniversalPrinter; +using ::testing::internal::UniversalTersePrint; +using ::testing::internal::UniversalTersePrintTupleFieldsToStrings; + +// Prints a value to a string using the universal value printer. This +// is a helper for testing UniversalPrinter<T>::Print() for various types. +template <typename T> +std::string Print(const T& value) { + ::std::stringstream ss; + UniversalPrinter<T>::Print(value, &ss); + return ss.str(); +} + +// Prints a value passed by reference to a string, using the universal +// value printer. This is a helper for testing +// UniversalPrinter<T&>::Print() for various types. +template <typename T> +std::string PrintByRef(const T& value) { + ::std::stringstream ss; + UniversalPrinter<T&>::Print(value, &ss); + return ss.str(); +} + +// Tests printing various enum types. + +TEST(PrintEnumTest, AnonymousEnum) { + EXPECT_EQ("-1", Print(kAE1)); + EXPECT_EQ("1", Print(kAE2)); +} + +TEST(PrintEnumTest, EnumWithoutPrinter) { + EXPECT_EQ("-2", Print(kEWP1)); + EXPECT_EQ("42", Print(kEWP2)); +} + +TEST(PrintEnumTest, EnumWithStreaming) { + EXPECT_EQ("kEWS1", Print(kEWS1)); + EXPECT_EQ("invalid", Print(static_cast<EnumWithStreaming>(0))); +} + +TEST(PrintEnumTest, EnumWithPrintTo) { + EXPECT_EQ("kEWPT1", Print(kEWPT1)); + EXPECT_EQ("invalid", Print(static_cast<EnumWithPrintTo>(0))); +} + +// Tests printing a class implicitly convertible to BiggestInt. + +TEST(PrintClassTest, BiggestIntConvertible) { + EXPECT_EQ("42", Print(BiggestIntConvertible())); +} + +// Tests printing various char types. + +// char. +TEST(PrintCharTest, PlainChar) { + EXPECT_EQ("'\\0'", Print('\0')); + EXPECT_EQ("'\\'' (39, 0x27)", Print('\'')); + EXPECT_EQ("'\"' (34, 0x22)", Print('"')); + EXPECT_EQ("'?' (63, 0x3F)", Print('?')); + EXPECT_EQ("'\\\\' (92, 0x5C)", Print('\\')); + EXPECT_EQ("'\\a' (7)", Print('\a')); + EXPECT_EQ("'\\b' (8)", Print('\b')); + EXPECT_EQ("'\\f' (12, 0xC)", Print('\f')); + EXPECT_EQ("'\\n' (10, 0xA)", Print('\n')); + EXPECT_EQ("'\\r' (13, 0xD)", Print('\r')); + EXPECT_EQ("'\\t' (9)", Print('\t')); + EXPECT_EQ("'\\v' (11, 0xB)", Print('\v')); + EXPECT_EQ("'\\x7F' (127)", Print('\x7F')); + EXPECT_EQ("'\\xFF' (255)", Print('\xFF')); + EXPECT_EQ("' ' (32, 0x20)", Print(' ')); + EXPECT_EQ("'a' (97, 0x61)", Print('a')); +} + +// signed char. +TEST(PrintCharTest, SignedChar) { + EXPECT_EQ("'\\0'", Print(static_cast<signed char>('\0'))); + EXPECT_EQ("'\\xCE' (-50)", + Print(static_cast<signed char>(-50))); +} + +// unsigned char. +TEST(PrintCharTest, UnsignedChar) { + EXPECT_EQ("'\\0'", Print(static_cast<unsigned char>('\0'))); + EXPECT_EQ("'b' (98, 0x62)", + Print(static_cast<unsigned char>('b'))); +} + +TEST(PrintCharTest, Char16) { + EXPECT_EQ("U+0041", Print(u'A')); +} + +TEST(PrintCharTest, Char32) { + EXPECT_EQ("U+0041", Print(U'A')); +} + +#ifdef __cpp_char8_t +TEST(PrintCharTest, Char8) { + EXPECT_EQ("U+0041", Print(u8'A')); +} +#endif + +// Tests printing other simple, built-in types. + +// bool. +TEST(PrintBuiltInTypeTest, Bool) { + EXPECT_EQ("false", Print(false)); + EXPECT_EQ("true", Print(true)); +} + +// wchar_t. +TEST(PrintBuiltInTypeTest, Wchar_t) { + EXPECT_EQ("L'\\0'", Print(L'\0')); + EXPECT_EQ("L'\\'' (39, 0x27)", Print(L'\'')); + EXPECT_EQ("L'\"' (34, 0x22)", Print(L'"')); + EXPECT_EQ("L'?' (63, 0x3F)", Print(L'?')); + EXPECT_EQ("L'\\\\' (92, 0x5C)", Print(L'\\')); + EXPECT_EQ("L'\\a' (7)", Print(L'\a')); + EXPECT_EQ("L'\\b' (8)", Print(L'\b')); + EXPECT_EQ("L'\\f' (12, 0xC)", Print(L'\f')); + EXPECT_EQ("L'\\n' (10, 0xA)", Print(L'\n')); + EXPECT_EQ("L'\\r' (13, 0xD)", Print(L'\r')); + EXPECT_EQ("L'\\t' (9)", Print(L'\t')); + EXPECT_EQ("L'\\v' (11, 0xB)", Print(L'\v')); + EXPECT_EQ("L'\\x7F' (127)", Print(L'\x7F')); + EXPECT_EQ("L'\\xFF' (255)", Print(L'\xFF')); + EXPECT_EQ("L' ' (32, 0x20)", Print(L' ')); + EXPECT_EQ("L'a' (97, 0x61)", Print(L'a')); + EXPECT_EQ("L'\\x576' (1398)", Print(static_cast<wchar_t>(0x576))); + EXPECT_EQ("L'\\xC74D' (51021)", Print(static_cast<wchar_t>(0xC74D))); +} + +// Test that int64_t provides more storage than wchar_t. +TEST(PrintTypeSizeTest, Wchar_t) { + EXPECT_LT(sizeof(wchar_t), sizeof(int64_t)); +} + +// Various integer types. +TEST(PrintBuiltInTypeTest, Integer) { + EXPECT_EQ("'\\xFF' (255)", Print(static_cast<unsigned char>(255))); // uint8 + EXPECT_EQ("'\\x80' (-128)", Print(static_cast<signed char>(-128))); // int8 + EXPECT_EQ("65535", Print(std::numeric_limits<uint16_t>::max())); // uint16 + EXPECT_EQ("-32768", Print(std::numeric_limits<int16_t>::min())); // int16 + EXPECT_EQ("4294967295", + Print(std::numeric_limits<uint32_t>::max())); // uint32 + EXPECT_EQ("-2147483648", + Print(std::numeric_limits<int32_t>::min())); // int32 + EXPECT_EQ("18446744073709551615", + Print(std::numeric_limits<uint64_t>::max())); // uint64 + EXPECT_EQ("-9223372036854775808", + Print(std::numeric_limits<int64_t>::min())); // int64 +#ifdef __cpp_char8_t + EXPECT_EQ("U+0000", + Print(std::numeric_limits<char8_t>::min())); // char8_t + EXPECT_EQ("U+00FF", + Print(std::numeric_limits<char8_t>::max())); // char8_t +#endif + EXPECT_EQ("U+0000", + Print(std::numeric_limits<char16_t>::min())); // char16_t + EXPECT_EQ("U+FFFF", + Print(std::numeric_limits<char16_t>::max())); // char16_t + EXPECT_EQ("U+0000", + Print(std::numeric_limits<char32_t>::min())); // char32_t + EXPECT_EQ("U+FFFFFFFF", + Print(std::numeric_limits<char32_t>::max())); // char32_t +} + +// Size types. +TEST(PrintBuiltInTypeTest, Size_t) { + EXPECT_EQ("1", Print(sizeof('a'))); // size_t. +#if !GTEST_OS_WINDOWS + // Windows has no ssize_t type. + EXPECT_EQ("-2", Print(static_cast<ssize_t>(-2))); // ssize_t. +#endif // !GTEST_OS_WINDOWS +} + +// Floating-points. +TEST(PrintBuiltInTypeTest, FloatingPoints) { + EXPECT_EQ("1.5", Print(1.5f)); // float + EXPECT_EQ("-2.5", Print(-2.5)); // double +} + +// Since ::std::stringstream::operator<<(const void *) formats the pointer +// output differently with different compilers, we have to create the expected +// output first and use it as our expectation. +static std::string PrintPointer(const void* p) { + ::std::stringstream expected_result_stream; + expected_result_stream << p; + return expected_result_stream.str(); +} + +// Tests printing C strings. + +// const char*. +TEST(PrintCStringTest, Const) { + const char* p = "World"; + EXPECT_EQ(PrintPointer(p) + " pointing to \"World\"", Print(p)); +} + +// char*. +TEST(PrintCStringTest, NonConst) { + char p[] = "Hi"; + EXPECT_EQ(PrintPointer(p) + " pointing to \"Hi\"", + Print(static_cast<char*>(p))); +} + +// NULL C string. +TEST(PrintCStringTest, Null) { + const char* p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests that C strings are escaped properly. +TEST(PrintCStringTest, EscapesProperly) { + const char* p = "'\"?\\\a\b\f\n\r\t\v\x7F\xFF a"; + EXPECT_EQ(PrintPointer(p) + " pointing to \"'\\\"?\\\\\\a\\b\\f" + "\\n\\r\\t\\v\\x7F\\xFF a\"", + Print(p)); +} + +#ifdef __cpp_char8_t +// const char8_t*. +TEST(PrintU8StringTest, Const) { + const char8_t* p = u8"界"; + EXPECT_EQ(PrintPointer(p) + " pointing to u8\"\\xE7\\x95\\x8C\"", Print(p)); +} + +// char8_t*. +TEST(PrintU8StringTest, NonConst) { + char8_t p[] = u8"世"; + EXPECT_EQ(PrintPointer(p) + " pointing to u8\"\\xE4\\xB8\\x96\"", + Print(static_cast<char8_t*>(p))); +} + +// NULL u8 string. +TEST(PrintU8StringTest, Null) { + const char8_t* p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests that u8 strings are escaped properly. +TEST(PrintU8StringTest, EscapesProperly) { + const char8_t* p = u8"'\"?\\\a\b\f\n\r\t\v\x7F\xFF hello 世界"; + EXPECT_EQ(PrintPointer(p) + + " pointing to u8\"'\\\"?\\\\\\a\\b\\f\\n\\r\\t\\v\\x7F\\xFF " + "hello \\xE4\\xB8\\x96\\xE7\\x95\\x8C\"", + Print(p)); +} +#endif + +// const char16_t*. +TEST(PrintU16StringTest, Const) { + const char16_t* p = u"界"; + EXPECT_EQ(PrintPointer(p) + " pointing to u\"\\x754C\"", Print(p)); +} + +// char16_t*. +TEST(PrintU16StringTest, NonConst) { + char16_t p[] = u"世"; + EXPECT_EQ(PrintPointer(p) + " pointing to u\"\\x4E16\"", + Print(static_cast<char16_t*>(p))); +} + +// NULL u16 string. +TEST(PrintU16StringTest, Null) { + const char16_t* p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests that u16 strings are escaped properly. +TEST(PrintU16StringTest, EscapesProperly) { + const char16_t* p = u"'\"?\\\a\b\f\n\r\t\v\x7F\xFF hello 世界"; + EXPECT_EQ(PrintPointer(p) + + " pointing to u\"'\\\"?\\\\\\a\\b\\f\\n\\r\\t\\v\\x7F\\xFF " + "hello \\x4E16\\x754C\"", + Print(p)); +} + +// const char32_t*. +TEST(PrintU32StringTest, Const) { + const char32_t* p = U"🗺️"; + EXPECT_EQ(PrintPointer(p) + " pointing to U\"\\x1F5FA\\xFE0F\"", Print(p)); +} + +// char32_t*. +TEST(PrintU32StringTest, NonConst) { + char32_t p[] = U"🌌"; + EXPECT_EQ(PrintPointer(p) + " pointing to U\"\\x1F30C\"", + Print(static_cast<char32_t*>(p))); +} + +// NULL u32 string. +TEST(PrintU32StringTest, Null) { + const char32_t* p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests that u32 strings are escaped properly. +TEST(PrintU32StringTest, EscapesProperly) { + const char32_t* p = U"'\"?\\\a\b\f\n\r\t\v\x7F\xFF hello 🗺️"; + EXPECT_EQ(PrintPointer(p) + + " pointing to U\"'\\\"?\\\\\\a\\b\\f\\n\\r\\t\\v\\x7F\\xFF " + "hello \\x1F5FA\\xFE0F\"", + Print(p)); +} + +// MSVC compiler can be configured to define whar_t as a typedef +// of unsigned short. Defining an overload for const wchar_t* in that case +// would cause pointers to unsigned shorts be printed as wide strings, +// possibly accessing more memory than intended and causing invalid +// memory accesses. MSVC defines _NATIVE_WCHAR_T_DEFINED symbol when +// wchar_t is implemented as a native type. +#if !defined(_MSC_VER) || defined(_NATIVE_WCHAR_T_DEFINED) + +// const wchar_t*. +TEST(PrintWideCStringTest, Const) { + const wchar_t* p = L"World"; + EXPECT_EQ(PrintPointer(p) + " pointing to L\"World\"", Print(p)); +} + +// wchar_t*. +TEST(PrintWideCStringTest, NonConst) { + wchar_t p[] = L"Hi"; + EXPECT_EQ(PrintPointer(p) + " pointing to L\"Hi\"", + Print(static_cast<wchar_t*>(p))); +} + +// NULL wide C string. +TEST(PrintWideCStringTest, Null) { + const wchar_t* p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests that wide C strings are escaped properly. +TEST(PrintWideCStringTest, EscapesProperly) { + const wchar_t s[] = {'\'', '"', '?', '\\', '\a', '\b', '\f', '\n', '\r', + '\t', '\v', 0xD3, 0x576, 0x8D3, 0xC74D, ' ', 'a', '\0'}; + EXPECT_EQ(PrintPointer(s) + " pointing to L\"'\\\"?\\\\\\a\\b\\f" + "\\n\\r\\t\\v\\xD3\\x576\\x8D3\\xC74D a\"", + Print(static_cast<const wchar_t*>(s))); +} +#endif // native wchar_t + +// Tests printing pointers to other char types. + +// signed char*. +TEST(PrintCharPointerTest, SignedChar) { + signed char* p = reinterpret_cast<signed char*>(0x1234); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// const signed char*. +TEST(PrintCharPointerTest, ConstSignedChar) { + signed char* p = reinterpret_cast<signed char*>(0x1234); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// unsigned char*. +TEST(PrintCharPointerTest, UnsignedChar) { + unsigned char* p = reinterpret_cast<unsigned char*>(0x1234); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// const unsigned char*. +TEST(PrintCharPointerTest, ConstUnsignedChar) { + const unsigned char* p = reinterpret_cast<const unsigned char*>(0x1234); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests printing pointers to simple, built-in types. + +// bool*. +TEST(PrintPointerToBuiltInTypeTest, Bool) { + bool* p = reinterpret_cast<bool*>(0xABCD); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// void*. +TEST(PrintPointerToBuiltInTypeTest, Void) { + void* p = reinterpret_cast<void*>(0xABCD); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// const void*. +TEST(PrintPointerToBuiltInTypeTest, ConstVoid) { + const void* p = reinterpret_cast<const void*>(0xABCD); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests printing pointers to pointers. +TEST(PrintPointerToPointerTest, IntPointerPointer) { + int** p = reinterpret_cast<int**>(0xABCD); + EXPECT_EQ(PrintPointer(p), Print(p)); + p = nullptr; + EXPECT_EQ("NULL", Print(p)); +} + +// Tests printing (non-member) function pointers. + +void MyFunction(int /* n */) {} + +TEST(PrintPointerTest, NonMemberFunctionPointer) { + // We cannot directly cast &MyFunction to const void* because the + // standard disallows casting between pointers to functions and + // pointers to objects, and some compilers (e.g. GCC 3.4) enforce + // this limitation. + EXPECT_EQ( + PrintPointer(reinterpret_cast<const void*>( + reinterpret_cast<internal::BiggestInt>(&MyFunction))), + Print(&MyFunction)); + int (*p)(bool) = NULL; // NOLINT + EXPECT_EQ("NULL", Print(p)); +} + +// An assertion predicate determining whether a one string is a prefix for +// another. +template <typename StringType> +AssertionResult HasPrefix(const StringType& str, const StringType& prefix) { + if (str.find(prefix, 0) == 0) + return AssertionSuccess(); + + const bool is_wide_string = sizeof(prefix[0]) > 1; + const char* const begin_string_quote = is_wide_string ? "L\"" : "\""; + return AssertionFailure() + << begin_string_quote << prefix << "\" is not a prefix of " + << begin_string_quote << str << "\"\n"; +} + +// Tests printing member variable pointers. Although they are called +// pointers, they don't point to a location in the address space. +// Their representation is implementation-defined. Thus they will be +// printed as raw bytes. + +struct Foo { + public: + virtual ~Foo() {} + int MyMethod(char x) { return x + 1; } + virtual char MyVirtualMethod(int /* n */) { return 'a'; } + + int value; +}; + +TEST(PrintPointerTest, MemberVariablePointer) { + EXPECT_TRUE(HasPrefix(Print(&Foo::value), + Print(sizeof(&Foo::value)) + "-byte object ")); + int Foo::*p = NULL; // NOLINT + EXPECT_TRUE(HasPrefix(Print(p), + Print(sizeof(p)) + "-byte object ")); +} + +// Tests printing member function pointers. Although they are called +// pointers, they don't point to a location in the address space. +// Their representation is implementation-defined. Thus they will be +// printed as raw bytes. +TEST(PrintPointerTest, MemberFunctionPointer) { + EXPECT_TRUE(HasPrefix(Print(&Foo::MyMethod), + Print(sizeof(&Foo::MyMethod)) + "-byte object ")); + EXPECT_TRUE( + HasPrefix(Print(&Foo::MyVirtualMethod), + Print(sizeof((&Foo::MyVirtualMethod))) + "-byte object ")); + int (Foo::*p)(char) = NULL; // NOLINT + EXPECT_TRUE(HasPrefix(Print(p), + Print(sizeof(p)) + "-byte object ")); +} + +// Tests printing C arrays. + +// The difference between this and Print() is that it ensures that the +// argument is a reference to an array. +template <typename T, size_t N> +std::string PrintArrayHelper(T (&a)[N]) { + return Print(a); +} + +// One-dimensional array. +TEST(PrintArrayTest, OneDimensionalArray) { + int a[5] = { 1, 2, 3, 4, 5 }; + EXPECT_EQ("{ 1, 2, 3, 4, 5 }", PrintArrayHelper(a)); +} + +// Two-dimensional array. +TEST(PrintArrayTest, TwoDimensionalArray) { + int a[2][5] = { + { 1, 2, 3, 4, 5 }, + { 6, 7, 8, 9, 0 } + }; + EXPECT_EQ("{ { 1, 2, 3, 4, 5 }, { 6, 7, 8, 9, 0 } }", PrintArrayHelper(a)); +} + +// Array of const elements. +TEST(PrintArrayTest, ConstArray) { + const bool a[1] = { false }; + EXPECT_EQ("{ false }", PrintArrayHelper(a)); +} + +// char array without terminating NUL. +TEST(PrintArrayTest, CharArrayWithNoTerminatingNul) { + // Array a contains '\0' in the middle and doesn't end with '\0'. + char a[] = { 'H', '\0', 'i' }; + EXPECT_EQ("\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a)); +} + +// char array with terminating NUL. +TEST(PrintArrayTest, CharArrayWithTerminatingNul) { + const char a[] = "\0Hi"; + EXPECT_EQ("\"\\0Hi\"", PrintArrayHelper(a)); +} + +#ifdef __cpp_char8_t +// char_t array without terminating NUL. +TEST(PrintArrayTest, Char8ArrayWithNoTerminatingNul) { + // Array a contains '\0' in the middle and doesn't end with '\0'. + const char8_t a[] = {u8'H', u8'\0', u8'i'}; + EXPECT_EQ("u8\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a)); +} + +// char8_t array with terminating NUL. +TEST(PrintArrayTest, Char8ArrayWithTerminatingNul) { + const char8_t a[] = u8"\0世界"; + EXPECT_EQ( + "u8\"\\0\\xE4\\xB8\\x96\\xE7\\x95\\x8C\"", + PrintArrayHelper(a)); +} +#endif + +// const char16_t array without terminating NUL. +TEST(PrintArrayTest, Char16ArrayWithNoTerminatingNul) { + // Array a contains '\0' in the middle and doesn't end with '\0'. + const char16_t a[] = {u'こ', u'\0', u'ん', u'に', u'ち', u'は'}; + EXPECT_EQ("u\"\\x3053\\0\\x3093\\x306B\\x3061\\x306F\" (no terminating NUL)", + PrintArrayHelper(a)); +} + +// char16_t array with terminating NUL. +TEST(PrintArrayTest, Char16ArrayWithTerminatingNul) { + const char16_t a[] = u"\0こんにちは"; + EXPECT_EQ("u\"\\0\\x3053\\x3093\\x306B\\x3061\\x306F\"", PrintArrayHelper(a)); +} + +// char32_t array without terminating NUL. +TEST(PrintArrayTest, Char32ArrayWithNoTerminatingNul) { + // Array a contains '\0' in the middle and doesn't end with '\0'. + const char32_t a[] = {U'👋', U'\0', U'🌌'}; + EXPECT_EQ("U\"\\x1F44B\\0\\x1F30C\" (no terminating NUL)", + PrintArrayHelper(a)); +} + +// char32_t array with terminating NUL. +TEST(PrintArrayTest, Char32ArrayWithTerminatingNul) { + const char32_t a[] = U"\0👋🌌"; + EXPECT_EQ("U\"\\0\\x1F44B\\x1F30C\"", PrintArrayHelper(a)); +} + +// wchar_t array without terminating NUL. +TEST(PrintArrayTest, WCharArrayWithNoTerminatingNul) { + // Array a contains '\0' in the middle and doesn't end with '\0'. + const wchar_t a[] = {L'H', L'\0', L'i'}; + EXPECT_EQ("L\"H\\0i\" (no terminating NUL)", PrintArrayHelper(a)); +} + +// wchar_t array with terminating NUL. +TEST(PrintArrayTest, WCharArrayWithTerminatingNul) { + const wchar_t a[] = L"\0Hi"; + EXPECT_EQ("L\"\\0Hi\"", PrintArrayHelper(a)); +} + +// Array of objects. +TEST(PrintArrayTest, ObjectArray) { + std::string a[3] = {"Hi", "Hello", "Ni hao"}; + EXPECT_EQ("{ \"Hi\", \"Hello\", \"Ni hao\" }", PrintArrayHelper(a)); +} + +// Array with many elements. +TEST(PrintArrayTest, BigArray) { + int a[100] = { 1, 2, 3 }; + EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, ..., 0, 0, 0, 0, 0, 0, 0, 0 }", + PrintArrayHelper(a)); +} + +// Tests printing ::string and ::std::string. + +// ::std::string. +TEST(PrintStringTest, StringInStdNamespace) { + const char s[] = "'\"?\\\a\b\f\n\0\r\t\v\x7F\xFF a"; + const ::std::string str(s, sizeof(s)); + EXPECT_EQ("\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v\\x7F\\xFF a\\0\"", + Print(str)); +} + +TEST(PrintStringTest, StringAmbiguousHex) { + // "\x6BANANA" is ambiguous, it can be interpreted as starting with either of: + // '\x6', '\x6B', or '\x6BA'. + + // a hex escaping sequence following by a decimal digit + EXPECT_EQ("\"0\\x12\" \"3\"", Print(::std::string("0\x12" "3"))); + // a hex escaping sequence following by a hex digit (lower-case) + EXPECT_EQ("\"mm\\x6\" \"bananas\"", Print(::std::string("mm\x6" "bananas"))); + // a hex escaping sequence following by a hex digit (upper-case) + EXPECT_EQ("\"NOM\\x6\" \"BANANA\"", Print(::std::string("NOM\x6" "BANANA"))); + // a hex escaping sequence following by a non-xdigit + EXPECT_EQ("\"!\\x5-!\"", Print(::std::string("!\x5-!"))); +} + +// Tests printing ::std::wstring. +#if GTEST_HAS_STD_WSTRING +// ::std::wstring. +TEST(PrintWideStringTest, StringInStdNamespace) { + const wchar_t s[] = L"'\"?\\\a\b\f\n\0\r\t\v\xD3\x576\x8D3\xC74D a"; + const ::std::wstring str(s, sizeof(s)/sizeof(wchar_t)); + EXPECT_EQ("L\"'\\\"?\\\\\\a\\b\\f\\n\\0\\r\\t\\v" + "\\xD3\\x576\\x8D3\\xC74D a\\0\"", + Print(str)); +} + +TEST(PrintWideStringTest, StringAmbiguousHex) { + // same for wide strings. + EXPECT_EQ("L\"0\\x12\" L\"3\"", Print(::std::wstring(L"0\x12" L"3"))); + EXPECT_EQ("L\"mm\\x6\" L\"bananas\"", + Print(::std::wstring(L"mm\x6" L"bananas"))); + EXPECT_EQ("L\"NOM\\x6\" L\"BANANA\"", + Print(::std::wstring(L"NOM\x6" L"BANANA"))); + EXPECT_EQ("L\"!\\x5-!\"", Print(::std::wstring(L"!\x5-!"))); +} +#endif // GTEST_HAS_STD_WSTRING + +#ifdef __cpp_char8_t +TEST(PrintStringTest, U8String) { + std::u8string str = u8"Hello, 世界"; + EXPECT_EQ(str, str); // Verify EXPECT_EQ compiles with this type. + EXPECT_EQ("u8\"Hello, \\xE4\\xB8\\x96\\xE7\\x95\\x8C\"", Print(str)); +} +#endif + +TEST(PrintStringTest, U16String) { + std::u16string str = u"Hello, 世界"; + EXPECT_EQ(str, str); // Verify EXPECT_EQ compiles with this type. + EXPECT_EQ("u\"Hello, \\x4E16\\x754C\"", Print(str)); +} + +TEST(PrintStringTest, U32String) { + std::u32string str = U"Hello, 🗺️"; + EXPECT_EQ(str, str); // Verify EXPECT_EQ compiles with this type + EXPECT_EQ("U\"Hello, \\x1F5FA\\xFE0F\"", Print(str)); +} + +// Tests printing types that support generic streaming (i.e. streaming +// to std::basic_ostream<Char, CharTraits> for any valid Char and +// CharTraits types). + +// Tests printing a non-template type that supports generic streaming. + +class AllowsGenericStreaming {}; + +template <typename Char, typename CharTraits> +std::basic_ostream<Char, CharTraits>& operator<<( + std::basic_ostream<Char, CharTraits>& os, + const AllowsGenericStreaming& /* a */) { + return os << "AllowsGenericStreaming"; +} + +TEST(PrintTypeWithGenericStreamingTest, NonTemplateType) { + AllowsGenericStreaming a; + EXPECT_EQ("AllowsGenericStreaming", Print(a)); +} + +// Tests printing a template type that supports generic streaming. + +template <typename T> +class AllowsGenericStreamingTemplate {}; + +template <typename Char, typename CharTraits, typename T> +std::basic_ostream<Char, CharTraits>& operator<<( + std::basic_ostream<Char, CharTraits>& os, + const AllowsGenericStreamingTemplate<T>& /* a */) { + return os << "AllowsGenericStreamingTemplate"; +} + +TEST(PrintTypeWithGenericStreamingTest, TemplateType) { + AllowsGenericStreamingTemplate<int> a; + EXPECT_EQ("AllowsGenericStreamingTemplate", Print(a)); +} + +// Tests printing a type that supports generic streaming and can be +// implicitly converted to another printable type. + +template <typename T> +class AllowsGenericStreamingAndImplicitConversionTemplate { + public: + operator bool() const { return false; } +}; + +template <typename Char, typename CharTraits, typename T> +std::basic_ostream<Char, CharTraits>& operator<<( + std::basic_ostream<Char, CharTraits>& os, + const AllowsGenericStreamingAndImplicitConversionTemplate<T>& /* a */) { + return os << "AllowsGenericStreamingAndImplicitConversionTemplate"; +} + +TEST(PrintTypeWithGenericStreamingTest, TypeImplicitlyConvertible) { + AllowsGenericStreamingAndImplicitConversionTemplate<int> a; + EXPECT_EQ("AllowsGenericStreamingAndImplicitConversionTemplate", Print(a)); +} + +#if GTEST_INTERNAL_HAS_STRING_VIEW + +// Tests printing internal::StringView. + +TEST(PrintStringViewTest, SimpleStringView) { + const internal::StringView sp = "Hello"; + EXPECT_EQ("\"Hello\"", Print(sp)); +} + +TEST(PrintStringViewTest, UnprintableCharacters) { + const char str[] = "NUL (\0) and \r\t"; + const internal::StringView sp(str, sizeof(str) - 1); + EXPECT_EQ("\"NUL (\\0) and \\r\\t\"", Print(sp)); +} + +#endif // GTEST_INTERNAL_HAS_STRING_VIEW + +// Tests printing STL containers. + +TEST(PrintStlContainerTest, EmptyDeque) { + deque<char> empty; + EXPECT_EQ("{}", Print(empty)); +} + +TEST(PrintStlContainerTest, NonEmptyDeque) { + deque<int> non_empty; + non_empty.push_back(1); + non_empty.push_back(3); + EXPECT_EQ("{ 1, 3 }", Print(non_empty)); +} + + +TEST(PrintStlContainerTest, OneElementHashMap) { + ::std::unordered_map<int, char> map1; + map1[1] = 'a'; + EXPECT_EQ("{ (1, 'a' (97, 0x61)) }", Print(map1)); +} + +TEST(PrintStlContainerTest, HashMultiMap) { + ::std::unordered_multimap<int, bool> map1; + map1.insert(make_pair(5, true)); + map1.insert(make_pair(5, false)); + + // Elements of hash_multimap can be printed in any order. + const std::string result = Print(map1); + EXPECT_TRUE(result == "{ (5, true), (5, false) }" || + result == "{ (5, false), (5, true) }") + << " where Print(map1) returns \"" << result << "\"."; +} + + + +TEST(PrintStlContainerTest, HashSet) { + ::std::unordered_set<int> set1; + set1.insert(1); + EXPECT_EQ("{ 1 }", Print(set1)); +} + +TEST(PrintStlContainerTest, HashMultiSet) { + const int kSize = 5; + int a[kSize] = { 1, 1, 2, 5, 1 }; + ::std::unordered_multiset<int> set1(a, a + kSize); + + // Elements of hash_multiset can be printed in any order. + const std::string result = Print(set1); + const std::string expected_pattern = "{ d, d, d, d, d }"; // d means a digit. + + // Verifies the result matches the expected pattern; also extracts + // the numbers in the result. + ASSERT_EQ(expected_pattern.length(), result.length()); + std::vector<int> numbers; + for (size_t i = 0; i != result.length(); i++) { + if (expected_pattern[i] == 'd') { + ASSERT_NE(isdigit(static_cast<unsigned char>(result[i])), 0); + numbers.push_back(result[i] - '0'); + } else { + EXPECT_EQ(expected_pattern[i], result[i]) << " where result is " + << result; + } + } + + // Makes sure the result contains the right numbers. + std::sort(numbers.begin(), numbers.end()); + std::sort(a, a + kSize); + EXPECT_TRUE(std::equal(a, a + kSize, numbers.begin())); +} + + +TEST(PrintStlContainerTest, List) { + const std::string a[] = {"hello", "world"}; + const list<std::string> strings(a, a + 2); + EXPECT_EQ("{ \"hello\", \"world\" }", Print(strings)); +} + +TEST(PrintStlContainerTest, Map) { + map<int, bool> map1; + map1[1] = true; + map1[5] = false; + map1[3] = true; + EXPECT_EQ("{ (1, true), (3, true), (5, false) }", Print(map1)); +} + +TEST(PrintStlContainerTest, MultiMap) { + multimap<bool, int> map1; + // The make_pair template function would deduce the type as + // pair<bool, int> here, and since the key part in a multimap has to + // be constant, without a templated ctor in the pair class (as in + // libCstd on Solaris), make_pair call would fail to compile as no + // implicit conversion is found. Thus explicit typename is used + // here instead. + map1.insert(pair<const bool, int>(true, 0)); + map1.insert(pair<const bool, int>(true, 1)); + map1.insert(pair<const bool, int>(false, 2)); + EXPECT_EQ("{ (false, 2), (true, 0), (true, 1) }", Print(map1)); +} + +TEST(PrintStlContainerTest, Set) { + const unsigned int a[] = { 3, 0, 5 }; + set<unsigned int> set1(a, a + 3); + EXPECT_EQ("{ 0, 3, 5 }", Print(set1)); +} + +TEST(PrintStlContainerTest, MultiSet) { + const int a[] = { 1, 1, 2, 5, 1 }; + multiset<int> set1(a, a + 5); + EXPECT_EQ("{ 1, 1, 1, 2, 5 }", Print(set1)); +} + + +TEST(PrintStlContainerTest, SinglyLinkedList) { + int a[] = { 9, 2, 8 }; + const std::forward_list<int> ints(a, a + 3); + EXPECT_EQ("{ 9, 2, 8 }", Print(ints)); +} + +TEST(PrintStlContainerTest, Pair) { + pair<const bool, int> p(true, 5); + EXPECT_EQ("(true, 5)", Print(p)); +} + +TEST(PrintStlContainerTest, Vector) { + vector<int> v; + v.push_back(1); + v.push_back(2); + EXPECT_EQ("{ 1, 2 }", Print(v)); +} + +TEST(PrintStlContainerTest, LongSequence) { + const int a[100] = { 1, 2, 3 }; + const vector<int> v(a, a + 100); + EXPECT_EQ("{ 1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, " + "0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ... }", Print(v)); +} + +TEST(PrintStlContainerTest, NestedContainer) { + const int a1[] = { 1, 2 }; + const int a2[] = { 3, 4, 5 }; + const list<int> l1(a1, a1 + 2); + const list<int> l2(a2, a2 + 3); + + vector<list<int> > v; + v.push_back(l1); + v.push_back(l2); + EXPECT_EQ("{ { 1, 2 }, { 3, 4, 5 } }", Print(v)); +} + +TEST(PrintStlContainerTest, OneDimensionalNativeArray) { + const int a[3] = { 1, 2, 3 }; + NativeArray<int> b(a, 3, RelationToSourceReference()); + EXPECT_EQ("{ 1, 2, 3 }", Print(b)); +} + +TEST(PrintStlContainerTest, TwoDimensionalNativeArray) { + const int a[2][3] = { { 1, 2, 3 }, { 4, 5, 6 } }; + NativeArray<int[3]> b(a, 2, RelationToSourceReference()); + EXPECT_EQ("{ { 1, 2, 3 }, { 4, 5, 6 } }", Print(b)); +} + +// Tests that a class named iterator isn't treated as a container. + +struct iterator { + char x; +}; + +TEST(PrintStlContainerTest, Iterator) { + iterator it = {}; + EXPECT_EQ("1-byte object <00>", Print(it)); +} + +// Tests that a class named const_iterator isn't treated as a container. + +struct const_iterator { + char x; +}; + +TEST(PrintStlContainerTest, ConstIterator) { + const_iterator it = {}; + EXPECT_EQ("1-byte object <00>", Print(it)); +} + +// Tests printing ::std::tuples. + +// Tuples of various arities. +TEST(PrintStdTupleTest, VariousSizes) { + ::std::tuple<> t0; + EXPECT_EQ("()", Print(t0)); + + ::std::tuple<int> t1(5); + EXPECT_EQ("(5)", Print(t1)); + + ::std::tuple<char, bool> t2('a', true); + EXPECT_EQ("('a' (97, 0x61), true)", Print(t2)); + + ::std::tuple<bool, int, int> t3(false, 2, 3); + EXPECT_EQ("(false, 2, 3)", Print(t3)); + + ::std::tuple<bool, int, int, int> t4(false, 2, 3, 4); + EXPECT_EQ("(false, 2, 3, 4)", Print(t4)); + + const char* const str = "8"; + ::std::tuple<bool, char, short, int32_t, int64_t, float, double, // NOLINT + const char*, void*, std::string> + t10(false, 'a', static_cast<short>(3), 4, 5, 1.5F, -2.5, str, // NOLINT + nullptr, "10"); + EXPECT_EQ("(false, 'a' (97, 0x61), 3, 4, 5, 1.5, -2.5, " + PrintPointer(str) + + " pointing to \"8\", NULL, \"10\")", + Print(t10)); +} + +// Nested tuples. +TEST(PrintStdTupleTest, NestedTuple) { + ::std::tuple< ::std::tuple<int, bool>, char> nested( + ::std::make_tuple(5, true), 'a'); + EXPECT_EQ("((5, true), 'a' (97, 0x61))", Print(nested)); +} + +TEST(PrintNullptrT, Basic) { + EXPECT_EQ("(nullptr)", Print(nullptr)); +} + +TEST(PrintReferenceWrapper, Printable) { + int x = 5; + EXPECT_EQ("@" + PrintPointer(&x) + " 5", Print(std::ref(x))); + EXPECT_EQ("@" + PrintPointer(&x) + " 5", Print(std::cref(x))); +} + +TEST(PrintReferenceWrapper, Unprintable) { + ::foo::UnprintableInFoo up; + EXPECT_EQ( + "@" + PrintPointer(&up) + + " 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>", + Print(std::ref(up))); + EXPECT_EQ( + "@" + PrintPointer(&up) + + " 16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>", + Print(std::cref(up))); +} + +// Tests printing user-defined unprintable types. + +// Unprintable types in the global namespace. +TEST(PrintUnprintableTypeTest, InGlobalNamespace) { + EXPECT_EQ("1-byte object <00>", + Print(UnprintableTemplateInGlobal<char>())); +} + +// Unprintable types in a user namespace. +TEST(PrintUnprintableTypeTest, InUserNamespace) { + EXPECT_EQ("16-byte object <EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>", + Print(::foo::UnprintableInFoo())); +} + +// Unprintable types are that too big to be printed completely. + +struct Big { + Big() { memset(array, 0, sizeof(array)); } + char array[257]; +}; + +TEST(PrintUnpritableTypeTest, BigObject) { + EXPECT_EQ("257-byte object <00-00 00-00 00-00 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 ... 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 " + "00-00 00-00 00-00 00-00 00-00 00-00 00-00 00-00 00>", + Print(Big())); +} + +// Tests printing user-defined streamable types. + +// Streamable types in the global namespace. +TEST(PrintStreamableTypeTest, InGlobalNamespace) { + StreamableInGlobal x; + EXPECT_EQ("StreamableInGlobal", Print(x)); + EXPECT_EQ("StreamableInGlobal*", Print(&x)); +} + +// Printable template types in a user namespace. +TEST(PrintStreamableTypeTest, TemplateTypeInUserNamespace) { + EXPECT_EQ("StreamableTemplateInFoo: 0", + Print(::foo::StreamableTemplateInFoo<int>())); +} + +TEST(PrintStreamableTypeTest, TypeInUserNamespaceWithTemplatedStreamOperator) { + EXPECT_EQ("TemplatedStreamableInFoo", + Print(::foo::TemplatedStreamableInFoo())); +} + +TEST(PrintStreamableTypeTest, SubclassUsesSuperclassStreamOperator) { + ParentClass parent; + ChildClassWithStreamOperator child_stream; + ChildClassWithoutStreamOperator child_no_stream; + EXPECT_EQ("ParentClass", Print(parent)); + EXPECT_EQ("ChildClassWithStreamOperator", Print(child_stream)); + EXPECT_EQ("ParentClass", Print(child_no_stream)); +} + +// Tests printing a user-defined recursive container type that has a << +// operator. +TEST(PrintStreamableTypeTest, PathLikeInUserNamespace) { + ::foo::PathLike x; + EXPECT_EQ("Streamable-PathLike", Print(x)); + const ::foo::PathLike cx; + EXPECT_EQ("Streamable-PathLike", Print(cx)); +} + +// Tests printing user-defined types that have a PrintTo() function. +TEST(PrintPrintableTypeTest, InUserNamespace) { + EXPECT_EQ("PrintableViaPrintTo: 0", + Print(::foo::PrintableViaPrintTo())); +} + +// Tests printing a pointer to a user-defined type that has a << +// operator for its pointer. +TEST(PrintPrintableTypeTest, PointerInUserNamespace) { + ::foo::PointerPrintable x; + EXPECT_EQ("PointerPrintable*", Print(&x)); +} + +// Tests printing user-defined class template that have a PrintTo() function. +TEST(PrintPrintableTypeTest, TemplateInUserNamespace) { + EXPECT_EQ("PrintableViaPrintToTemplate: 5", + Print(::foo::PrintableViaPrintToTemplate<int>(5))); +} + +// Tests that the universal printer prints both the address and the +// value of a reference. +TEST(PrintReferenceTest, PrintsAddressAndValue) { + int n = 5; + EXPECT_EQ("@" + PrintPointer(&n) + " 5", PrintByRef(n)); + + int a[2][3] = { + { 0, 1, 2 }, + { 3, 4, 5 } + }; + EXPECT_EQ("@" + PrintPointer(a) + " { { 0, 1, 2 }, { 3, 4, 5 } }", + PrintByRef(a)); + + const ::foo::UnprintableInFoo x; + EXPECT_EQ("@" + PrintPointer(&x) + " 16-byte object " + "<EF-12 00-00 34-AB 00-00 00-00 00-00 00-00 00-00>", + PrintByRef(x)); +} + +// Tests that the universal printer prints a function pointer passed by +// reference. +TEST(PrintReferenceTest, HandlesFunctionPointer) { + void (*fp)(int n) = &MyFunction; + const std::string fp_pointer_string = + PrintPointer(reinterpret_cast<const void*>(&fp)); + // We cannot directly cast &MyFunction to const void* because the + // standard disallows casting between pointers to functions and + // pointers to objects, and some compilers (e.g. GCC 3.4) enforce + // this limitation. + const std::string fp_string = PrintPointer(reinterpret_cast<const void*>( + reinterpret_cast<internal::BiggestInt>(fp))); + EXPECT_EQ("@" + fp_pointer_string + " " + fp_string, + PrintByRef(fp)); +} + +// Tests that the universal printer prints a member function pointer +// passed by reference. +TEST(PrintReferenceTest, HandlesMemberFunctionPointer) { + int (Foo::*p)(char ch) = &Foo::MyMethod; + EXPECT_TRUE(HasPrefix( + PrintByRef(p), + "@" + PrintPointer(reinterpret_cast<const void*>(&p)) + " " + + Print(sizeof(p)) + "-byte object ")); + + char (Foo::*p2)(int n) = &Foo::MyVirtualMethod; + EXPECT_TRUE(HasPrefix( + PrintByRef(p2), + "@" + PrintPointer(reinterpret_cast<const void*>(&p2)) + " " + + Print(sizeof(p2)) + "-byte object ")); +} + +// Tests that the universal printer prints a member variable pointer +// passed by reference. +TEST(PrintReferenceTest, HandlesMemberVariablePointer) { + int Foo::*p = &Foo::value; // NOLINT + EXPECT_TRUE(HasPrefix( + PrintByRef(p), + "@" + PrintPointer(&p) + " " + Print(sizeof(p)) + "-byte object ")); +} + +// Tests that FormatForComparisonFailureMessage(), which is used to print +// an operand in a comparison assertion (e.g. ASSERT_EQ) when the assertion +// fails, formats the operand in the desired way. + +// scalar +TEST(FormatForComparisonFailureMessageTest, WorksForScalar) { + EXPECT_STREQ("123", + FormatForComparisonFailureMessage(123, 124).c_str()); +} + +// non-char pointer +TEST(FormatForComparisonFailureMessageTest, WorksForNonCharPointer) { + int n = 0; + EXPECT_EQ(PrintPointer(&n), + FormatForComparisonFailureMessage(&n, &n).c_str()); +} + +// non-char array +TEST(FormatForComparisonFailureMessageTest, FormatsNonCharArrayAsPointer) { + // In expression 'array == x', 'array' is compared by pointer. + // Therefore we want to print an array operand as a pointer. + int n[] = { 1, 2, 3 }; + EXPECT_EQ(PrintPointer(n), + FormatForComparisonFailureMessage(n, n).c_str()); +} + +// Tests formatting a char pointer when it's compared with another pointer. +// In this case we want to print it as a raw pointer, as the comparison is by +// pointer. + +// char pointer vs pointer +TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsPointer) { + // In expression 'p == x', where 'p' and 'x' are (const or not) char + // pointers, the operands are compared by pointer. Therefore we + // want to print 'p' as a pointer instead of a C string (we don't + // even know if it's supposed to point to a valid C string). + + // const char* + const char* s = "hello"; + EXPECT_EQ(PrintPointer(s), + FormatForComparisonFailureMessage(s, s).c_str()); + + // char* + char ch = 'a'; + EXPECT_EQ(PrintPointer(&ch), + FormatForComparisonFailureMessage(&ch, &ch).c_str()); +} + +// wchar_t pointer vs pointer +TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsPointer) { + // In expression 'p == x', where 'p' and 'x' are (const or not) char + // pointers, the operands are compared by pointer. Therefore we + // want to print 'p' as a pointer instead of a wide C string (we don't + // even know if it's supposed to point to a valid wide C string). + + // const wchar_t* + const wchar_t* s = L"hello"; + EXPECT_EQ(PrintPointer(s), + FormatForComparisonFailureMessage(s, s).c_str()); + + // wchar_t* + wchar_t ch = L'a'; + EXPECT_EQ(PrintPointer(&ch), + FormatForComparisonFailureMessage(&ch, &ch).c_str()); +} + +// Tests formatting a char pointer when it's compared to a string object. +// In this case we want to print the char pointer as a C string. + +// char pointer vs std::string +TEST(FormatForComparisonFailureMessageTest, WorksForCharPointerVsStdString) { + const char* s = "hello \"world"; + EXPECT_STREQ("\"hello \\\"world\"", // The string content should be escaped. + FormatForComparisonFailureMessage(s, ::std::string()).c_str()); + + // char* + char str[] = "hi\1"; + char* p = str; + EXPECT_STREQ("\"hi\\x1\"", // The string content should be escaped. + FormatForComparisonFailureMessage(p, ::std::string()).c_str()); +} + +#if GTEST_HAS_STD_WSTRING +// wchar_t pointer vs std::wstring +TEST(FormatForComparisonFailureMessageTest, WorksForWCharPointerVsStdWString) { + const wchar_t* s = L"hi \"world"; + EXPECT_STREQ("L\"hi \\\"world\"", // The string content should be escaped. + FormatForComparisonFailureMessage(s, ::std::wstring()).c_str()); + + // wchar_t* + wchar_t str[] = L"hi\1"; + wchar_t* p = str; + EXPECT_STREQ("L\"hi\\x1\"", // The string content should be escaped. + FormatForComparisonFailureMessage(p, ::std::wstring()).c_str()); +} +#endif + +// Tests formatting a char array when it's compared with a pointer or array. +// In this case we want to print the array as a row pointer, as the comparison +// is by pointer. + +// char array vs pointer +TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsPointer) { + char str[] = "hi \"world\""; + char* p = nullptr; + EXPECT_EQ(PrintPointer(str), + FormatForComparisonFailureMessage(str, p).c_str()); +} + +// char array vs char array +TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsCharArray) { + const char str[] = "hi \"world\""; + EXPECT_EQ(PrintPointer(str), + FormatForComparisonFailureMessage(str, str).c_str()); +} + +// wchar_t array vs pointer +TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsPointer) { + wchar_t str[] = L"hi \"world\""; + wchar_t* p = nullptr; + EXPECT_EQ(PrintPointer(str), + FormatForComparisonFailureMessage(str, p).c_str()); +} + +// wchar_t array vs wchar_t array +TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsWCharArray) { + const wchar_t str[] = L"hi \"world\""; + EXPECT_EQ(PrintPointer(str), + FormatForComparisonFailureMessage(str, str).c_str()); +} + +// Tests formatting a char array when it's compared with a string object. +// In this case we want to print the array as a C string. + +// char array vs std::string +TEST(FormatForComparisonFailureMessageTest, WorksForCharArrayVsStdString) { + const char str[] = "hi \"world\""; + EXPECT_STREQ("\"hi \\\"world\\\"\"", // The content should be escaped. + FormatForComparisonFailureMessage(str, ::std::string()).c_str()); +} + +#if GTEST_HAS_STD_WSTRING +// wchar_t array vs std::wstring +TEST(FormatForComparisonFailureMessageTest, WorksForWCharArrayVsStdWString) { + const wchar_t str[] = L"hi \"w\0rld\""; + EXPECT_STREQ( + "L\"hi \\\"w\"", // The content should be escaped. + // Embedded NUL terminates the string. + FormatForComparisonFailureMessage(str, ::std::wstring()).c_str()); +} +#endif + +// Useful for testing PrintToString(). We cannot use EXPECT_EQ() +// there as its implementation uses PrintToString(). The caller must +// ensure that 'value' has no side effect. +#define EXPECT_PRINT_TO_STRING_(value, expected_string) \ + EXPECT_TRUE(PrintToString(value) == (expected_string)) \ + << " where " #value " prints as " << (PrintToString(value)) + +TEST(PrintToStringTest, WorksForScalar) { + EXPECT_PRINT_TO_STRING_(123, "123"); +} + +TEST(PrintToStringTest, WorksForPointerToConstChar) { + const char* p = "hello"; + EXPECT_PRINT_TO_STRING_(p, "\"hello\""); +} + +TEST(PrintToStringTest, WorksForPointerToNonConstChar) { + char s[] = "hello"; + char* p = s; + EXPECT_PRINT_TO_STRING_(p, "\"hello\""); +} + +TEST(PrintToStringTest, EscapesForPointerToConstChar) { + const char* p = "hello\n"; + EXPECT_PRINT_TO_STRING_(p, "\"hello\\n\""); +} + +TEST(PrintToStringTest, EscapesForPointerToNonConstChar) { + char s[] = "hello\1"; + char* p = s; + EXPECT_PRINT_TO_STRING_(p, "\"hello\\x1\""); +} + +TEST(PrintToStringTest, WorksForArray) { + int n[3] = { 1, 2, 3 }; + EXPECT_PRINT_TO_STRING_(n, "{ 1, 2, 3 }"); +} + +TEST(PrintToStringTest, WorksForCharArray) { + char s[] = "hello"; + EXPECT_PRINT_TO_STRING_(s, "\"hello\""); +} + +TEST(PrintToStringTest, WorksForCharArrayWithEmbeddedNul) { + const char str_with_nul[] = "hello\0 world"; + EXPECT_PRINT_TO_STRING_(str_with_nul, "\"hello\\0 world\""); + + char mutable_str_with_nul[] = "hello\0 world"; + EXPECT_PRINT_TO_STRING_(mutable_str_with_nul, "\"hello\\0 world\""); +} + + TEST(PrintToStringTest, ContainsNonLatin) { + // Sanity test with valid UTF-8. Prints both in hex and as text. + std::string non_ascii_str = ::std::string("오전 4:30"); + EXPECT_PRINT_TO_STRING_(non_ascii_str, + "\"\\xEC\\x98\\xA4\\xEC\\xA0\\x84 4:30\"\n" + " As Text: \"오전 4:30\""); + non_ascii_str = ::std::string("From ä — ẑ"); + EXPECT_PRINT_TO_STRING_(non_ascii_str, + "\"From \\xC3\\xA4 \\xE2\\x80\\x94 \\xE1\\xBA\\x91\"" + "\n As Text: \"From ä — ẑ\""); +} + +TEST(IsValidUTF8Test, IllFormedUTF8) { + // The following test strings are ill-formed UTF-8 and are printed + // as hex only (or ASCII, in case of ASCII bytes) because IsValidUTF8() is + // expected to fail, thus output does not contain "As Text:". + + static const char *const kTestdata[][2] = { + // 2-byte lead byte followed by a single-byte character. + {"\xC3\x74", "\"\\xC3t\""}, + // Valid 2-byte character followed by an orphan trail byte. + {"\xC3\x84\xA4", "\"\\xC3\\x84\\xA4\""}, + // Lead byte without trail byte. + {"abc\xC3", "\"abc\\xC3\""}, + // 3-byte lead byte, single-byte character, orphan trail byte. + {"x\xE2\x70\x94", "\"x\\xE2p\\x94\""}, + // Truncated 3-byte character. + {"\xE2\x80", "\"\\xE2\\x80\""}, + // Truncated 3-byte character followed by valid 2-byte char. + {"\xE2\x80\xC3\x84", "\"\\xE2\\x80\\xC3\\x84\""}, + // Truncated 3-byte character followed by a single-byte character. + {"\xE2\x80\x7A", "\"\\xE2\\x80z\""}, + // 3-byte lead byte followed by valid 3-byte character. + {"\xE2\xE2\x80\x94", "\"\\xE2\\xE2\\x80\\x94\""}, + // 4-byte lead byte followed by valid 3-byte character. + {"\xF0\xE2\x80\x94", "\"\\xF0\\xE2\\x80\\x94\""}, + // Truncated 4-byte character. + {"\xF0\xE2\x80", "\"\\xF0\\xE2\\x80\""}, + // Invalid UTF-8 byte sequences embedded in other chars. + {"abc\xE2\x80\x94\xC3\x74xyc", "\"abc\\xE2\\x80\\x94\\xC3txyc\""}, + {"abc\xC3\x84\xE2\x80\xC3\x84xyz", + "\"abc\\xC3\\x84\\xE2\\x80\\xC3\\x84xyz\""}, + // Non-shortest UTF-8 byte sequences are also ill-formed. + // The classics: xC0, xC1 lead byte. + {"\xC0\x80", "\"\\xC0\\x80\""}, + {"\xC1\x81", "\"\\xC1\\x81\""}, + // Non-shortest sequences. + {"\xE0\x80\x80", "\"\\xE0\\x80\\x80\""}, + {"\xf0\x80\x80\x80", "\"\\xF0\\x80\\x80\\x80\""}, + // Last valid code point before surrogate range, should be printed as text, + // too. + {"\xED\x9F\xBF", "\"\\xED\\x9F\\xBF\"\n As Text: \"\""}, + // Start of surrogate lead. Surrogates are not printed as text. + {"\xED\xA0\x80", "\"\\xED\\xA0\\x80\""}, + // Last non-private surrogate lead. + {"\xED\xAD\xBF", "\"\\xED\\xAD\\xBF\""}, + // First private-use surrogate lead. + {"\xED\xAE\x80", "\"\\xED\\xAE\\x80\""}, + // Last private-use surrogate lead. + {"\xED\xAF\xBF", "\"\\xED\\xAF\\xBF\""}, + // Mid-point of surrogate trail. + {"\xED\xB3\xBF", "\"\\xED\\xB3\\xBF\""}, + // First valid code point after surrogate range, should be printed as text, + // too. + {"\xEE\x80\x80", "\"\\xEE\\x80\\x80\"\n As Text: \"\""} + }; + + for (int i = 0; i < int(sizeof(kTestdata)/sizeof(kTestdata[0])); ++i) { + EXPECT_PRINT_TO_STRING_(kTestdata[i][0], kTestdata[i][1]); + } +} + +#undef EXPECT_PRINT_TO_STRING_ + +TEST(UniversalTersePrintTest, WorksForNonReference) { + ::std::stringstream ss; + UniversalTersePrint(123, &ss); + EXPECT_EQ("123", ss.str()); +} + +TEST(UniversalTersePrintTest, WorksForReference) { + const int& n = 123; + ::std::stringstream ss; + UniversalTersePrint(n, &ss); + EXPECT_EQ("123", ss.str()); +} + +TEST(UniversalTersePrintTest, WorksForCString) { + const char* s1 = "abc"; + ::std::stringstream ss1; + UniversalTersePrint(s1, &ss1); + EXPECT_EQ("\"abc\"", ss1.str()); + + char* s2 = const_cast<char*>(s1); + ::std::stringstream ss2; + UniversalTersePrint(s2, &ss2); + EXPECT_EQ("\"abc\"", ss2.str()); + + const char* s3 = nullptr; + ::std::stringstream ss3; + UniversalTersePrint(s3, &ss3); + EXPECT_EQ("NULL", ss3.str()); +} + +TEST(UniversalPrintTest, WorksForNonReference) { + ::std::stringstream ss; + UniversalPrint(123, &ss); + EXPECT_EQ("123", ss.str()); +} + +TEST(UniversalPrintTest, WorksForReference) { + const int& n = 123; + ::std::stringstream ss; + UniversalPrint(n, &ss); + EXPECT_EQ("123", ss.str()); +} + +TEST(UniversalPrintTest, WorksForPairWithConst) { + std::pair<const Wrapper<std::string>, int> p(Wrapper<std::string>("abc"), 1); + ::std::stringstream ss; + UniversalPrint(p, &ss); + EXPECT_EQ("(Wrapper(\"abc\"), 1)", ss.str()); +} + +TEST(UniversalPrintTest, WorksForCString) { + const char* s1 = "abc"; + ::std::stringstream ss1; + UniversalPrint(s1, &ss1); + EXPECT_EQ(PrintPointer(s1) + " pointing to \"abc\"", std::string(ss1.str())); + + char* s2 = const_cast<char*>(s1); + ::std::stringstream ss2; + UniversalPrint(s2, &ss2); + EXPECT_EQ(PrintPointer(s2) + " pointing to \"abc\"", std::string(ss2.str())); + + const char* s3 = nullptr; + ::std::stringstream ss3; + UniversalPrint(s3, &ss3); + EXPECT_EQ("NULL", ss3.str()); +} + +TEST(UniversalPrintTest, WorksForCharArray) { + const char str[] = "\"Line\0 1\"\nLine 2"; + ::std::stringstream ss1; + UniversalPrint(str, &ss1); + EXPECT_EQ("\"\\\"Line\\0 1\\\"\\nLine 2\"", ss1.str()); + + const char mutable_str[] = "\"Line\0 1\"\nLine 2"; + ::std::stringstream ss2; + UniversalPrint(mutable_str, &ss2); + EXPECT_EQ("\"\\\"Line\\0 1\\\"\\nLine 2\"", ss2.str()); +} + +TEST(UniversalPrintTest, IncompleteType) { + struct Incomplete; + char some_object = 0; + EXPECT_EQ("(incomplete type)", + PrintToString(reinterpret_cast<Incomplete&>(some_object))); +} + +TEST(UniversalPrintTest, SmartPointers) { + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<int>())); + std::unique_ptr<int> p(new int(17)); + EXPECT_EQ("(ptr = " + PrintPointer(p.get()) + ", value = 17)", + PrintToString(p)); + std::unique_ptr<int[]> p2(new int[2]); + EXPECT_EQ("(" + PrintPointer(p2.get()) + ")", PrintToString(p2)); + + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<int>())); + std::shared_ptr<int> p3(new int(1979)); + EXPECT_EQ("(ptr = " + PrintPointer(p3.get()) + ", value = 1979)", + PrintToString(p3)); +#if __cpp_lib_shared_ptr_arrays >= 201611L + std::shared_ptr<int[]> p4(new int[2]); + EXPECT_EQ("(" + PrintPointer(p4.get()) + ")", PrintToString(p4)); +#endif + + // modifiers + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<const int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<volatile int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<volatile const int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<int[]>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<const int[]>())); + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<volatile int[]>())); + EXPECT_EQ("(nullptr)", + PrintToString(std::unique_ptr<volatile const int[]>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<const int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile int>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile const int>())); +#if __cpp_lib_shared_ptr_arrays >= 201611L + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<int[]>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<const int[]>())); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<volatile int[]>())); + EXPECT_EQ("(nullptr)", + PrintToString(std::shared_ptr<volatile const int[]>())); +#endif + + // void + EXPECT_EQ("(nullptr)", PrintToString(std::unique_ptr<void, void (*)(void*)>( + nullptr, nullptr))); + EXPECT_EQ("(" + PrintPointer(p.get()) + ")", + PrintToString( + std::unique_ptr<void, void (*)(void*)>(p.get(), [](void*) {}))); + EXPECT_EQ("(nullptr)", PrintToString(std::shared_ptr<void>())); + EXPECT_EQ("(" + PrintPointer(p.get()) + ")", + PrintToString(std::shared_ptr<void>(p.get(), [](void*) {}))); +} + +TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsEmptyTuple) { + Strings result = UniversalTersePrintTupleFieldsToStrings(::std::make_tuple()); + EXPECT_EQ(0u, result.size()); +} + +TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsOneTuple) { + Strings result = UniversalTersePrintTupleFieldsToStrings( + ::std::make_tuple(1)); + ASSERT_EQ(1u, result.size()); + EXPECT_EQ("1", result[0]); +} + +TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTwoTuple) { + Strings result = UniversalTersePrintTupleFieldsToStrings( + ::std::make_tuple(1, 'a')); + ASSERT_EQ(2u, result.size()); + EXPECT_EQ("1", result[0]); + EXPECT_EQ("'a' (97, 0x61)", result[1]); +} + +TEST(UniversalTersePrintTupleFieldsToStringsTestWithStd, PrintsTersely) { + const int n = 1; + Strings result = UniversalTersePrintTupleFieldsToStrings( + ::std::tuple<const int&, const char*>(n, "a")); + ASSERT_EQ(2u, result.size()); + EXPECT_EQ("1", result[0]); + EXPECT_EQ("\"a\"", result[1]); +} + +#if GTEST_INTERNAL_HAS_ANY +class PrintAnyTest : public ::testing::Test { + protected: + template <typename T> + static std::string ExpectedTypeName() { +#if GTEST_HAS_RTTI + return internal::GetTypeName<T>(); +#else + return "<unknown_type>"; +#endif // GTEST_HAS_RTTI + } +}; + +TEST_F(PrintAnyTest, Empty) { + internal::Any any; + EXPECT_EQ("no value", PrintToString(any)); +} + +TEST_F(PrintAnyTest, NonEmpty) { + internal::Any any; + constexpr int val1 = 10; + const std::string val2 = "content"; + + any = val1; + EXPECT_EQ("value of type " + ExpectedTypeName<int>(), PrintToString(any)); + + any = val2; + EXPECT_EQ("value of type " + ExpectedTypeName<std::string>(), + PrintToString(any)); +} +#endif // GTEST_INTERNAL_HAS_ANY + +#if GTEST_INTERNAL_HAS_OPTIONAL +TEST(PrintOptionalTest, Basic) { + internal::Optional<int> value; + EXPECT_EQ("(nullopt)", PrintToString(value)); + value = {7}; + EXPECT_EQ("(7)", PrintToString(value)); + EXPECT_EQ("(1.1)", PrintToString(internal::Optional<double>{1.1})); + EXPECT_EQ("(\"A\")", PrintToString(internal::Optional<std::string>{"A"})); +} +#endif // GTEST_INTERNAL_HAS_OPTIONAL + +#if GTEST_INTERNAL_HAS_VARIANT +struct NonPrintable { + unsigned char contents = 17; +}; + +TEST(PrintOneofTest, Basic) { + using Type = internal::Variant<int, StreamableInGlobal, NonPrintable>; + EXPECT_EQ("('int(index = 0)' with value 7)", PrintToString(Type(7))); + EXPECT_EQ("('StreamableInGlobal(index = 1)' with value StreamableInGlobal)", + PrintToString(Type(StreamableInGlobal{}))); + EXPECT_EQ( + "('testing::gtest_printers_test::NonPrintable(index = 2)' with value " + "1-byte object <11>)", + PrintToString(Type(NonPrintable{}))); +} +#endif // GTEST_INTERNAL_HAS_VARIANT +namespace { +class string_ref; + +/** + * This is a synthetic pointer to a fixed size string. + */ +class string_ptr { + public: + string_ptr(const char* data, size_t size) : data_(data), size_(size) {} + + string_ptr& operator++() noexcept { + data_ += size_; + return *this; + } + + string_ref operator*() const noexcept; + + private: + const char* data_; + size_t size_; +}; + +/** + * This is a synthetic reference of a fixed size string. + */ +class string_ref { + public: + string_ref(const char* data, size_t size) : data_(data), size_(size) {} + + string_ptr operator&() const noexcept { return {data_, size_}; } // NOLINT + + bool operator==(const char* s) const noexcept { + if (size_ > 0 && data_[size_ - 1] != 0) { + return std::string(data_, size_) == std::string(s); + } else { + return std::string(data_) == std::string(s); + } + } + + private: + const char* data_; + size_t size_; +}; + +string_ref string_ptr::operator*() const noexcept { return {data_, size_}; } + +TEST(string_ref, compare) { + const char* s = "alex\0davidjohn\0"; + string_ptr ptr(s, 5); + EXPECT_EQ(*ptr, "alex"); + EXPECT_TRUE(*ptr == "alex"); + ++ptr; + EXPECT_EQ(*ptr, "david"); + EXPECT_TRUE(*ptr == "david"); + ++ptr; + EXPECT_EQ(*ptr, "john"); +} + +} // namespace + +} // namespace gtest_printers_test +} // namespace testing diff --git a/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test.py b/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test.py new file mode 100755 index 0000000000..c82162fc99 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2019, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that SetUpTestSuite and TearDownTestSuite errors are noticed.""" + +import gtest_test_utils + +COMMAND = gtest_test_utils.GetTestExecutablePath( + 'googletest-setuptestsuite-test_') + + +class GTestSetUpTestSuiteTest(gtest_test_utils.TestCase): + + def testSetupErrorAndTearDownError(self): + p = gtest_test_utils.Subprocess(COMMAND) + self.assertNotEqual(p.exit_code, 0, msg=p.output) + + self.assertIn( + '[ FAILED ] SetupFailTest: SetUpTestSuite or TearDownTestSuite\n' + '[ FAILED ] TearDownFailTest: SetUpTestSuite or TearDownTestSuite\n' + '\n' + ' 2 FAILED TEST SUITES\n', + p.output) + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test_.cc new file mode 100644 index 0000000000..a4bc4ef441 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-setuptestsuite-test_.cc @@ -0,0 +1,49 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "gtest/gtest.h" + +class SetupFailTest : public ::testing::Test { + protected: + static void SetUpTestSuite() { + ASSERT_EQ("", "SET_UP_FAIL"); + } +}; + +TEST_F(SetupFailTest, NoopPassingTest) {} + +class TearDownFailTest : public ::testing::Test { + protected: + static void TearDownTestSuite() { + ASSERT_EQ("", "TEAR_DOWN_FAIL"); + } +}; + +TEST_F(TearDownFailTest, NoopPassingTest) {} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test.py b/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test.py new file mode 100755 index 0000000000..573cc5eca3 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test.py @@ -0,0 +1,323 @@ +#!/usr/bin/env python +# +# Copyright 2009 Google Inc. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that test shuffling works.""" + +import os +import gtest_test_utils + +# Command to run the googletest-shuffle-test_ program. +COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-shuffle-test_') + +# The environment variables for test sharding. +TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' +SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' + +TEST_FILTER = 'A*.A:A*.B:C*' + +ALL_TESTS = [] +ACTIVE_TESTS = [] +FILTERED_TESTS = [] +SHARDED_TESTS = [] + +SHUFFLED_ALL_TESTS = [] +SHUFFLED_ACTIVE_TESTS = [] +SHUFFLED_FILTERED_TESTS = [] +SHUFFLED_SHARDED_TESTS = [] + + +def AlsoRunDisabledTestsFlag(): + return '--gtest_also_run_disabled_tests' + + +def FilterFlag(test_filter): + return '--gtest_filter=%s' % (test_filter,) + + +def RepeatFlag(n): + return '--gtest_repeat=%s' % (n,) + + +def ShuffleFlag(): + return '--gtest_shuffle' + + +def RandomSeedFlag(n): + return '--gtest_random_seed=%s' % (n,) + + +def RunAndReturnOutput(extra_env, args): + """Runs the test program and returns its output.""" + + environ_copy = os.environ.copy() + environ_copy.update(extra_env) + + return gtest_test_utils.Subprocess([COMMAND] + args, env=environ_copy).output + + +def GetTestsForAllIterations(extra_env, args): + """Runs the test program and returns a list of test lists. + + Args: + extra_env: a map from environment variables to their values + args: command line flags to pass to googletest-shuffle-test_ + + Returns: + A list where the i-th element is the list of tests run in the i-th + test iteration. + """ + + test_iterations = [] + for line in RunAndReturnOutput(extra_env, args).split('\n'): + if line.startswith('----'): + tests = [] + test_iterations.append(tests) + elif line.strip(): + tests.append(line.strip()) # 'TestCaseName.TestName' + + return test_iterations + + +def GetTestCases(tests): + """Returns a list of test cases in the given full test names. + + Args: + tests: a list of full test names + + Returns: + A list of test cases from 'tests', in their original order. + Consecutive duplicates are removed. + """ + + test_cases = [] + for test in tests: + test_case = test.split('.')[0] + if not test_case in test_cases: + test_cases.append(test_case) + + return test_cases + + +def CalculateTestLists(): + """Calculates the list of tests run under different flags.""" + + if not ALL_TESTS: + ALL_TESTS.extend( + GetTestsForAllIterations({}, [AlsoRunDisabledTestsFlag()])[0]) + + if not ACTIVE_TESTS: + ACTIVE_TESTS.extend(GetTestsForAllIterations({}, [])[0]) + + if not FILTERED_TESTS: + FILTERED_TESTS.extend( + GetTestsForAllIterations({}, [FilterFlag(TEST_FILTER)])[0]) + + if not SHARDED_TESTS: + SHARDED_TESTS.extend( + GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', + SHARD_INDEX_ENV_VAR: '1'}, + [])[0]) + + if not SHUFFLED_ALL_TESTS: + SHUFFLED_ALL_TESTS.extend(GetTestsForAllIterations( + {}, [AlsoRunDisabledTestsFlag(), ShuffleFlag(), RandomSeedFlag(1)])[0]) + + if not SHUFFLED_ACTIVE_TESTS: + SHUFFLED_ACTIVE_TESTS.extend(GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(1)])[0]) + + if not SHUFFLED_FILTERED_TESTS: + SHUFFLED_FILTERED_TESTS.extend(GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(1), FilterFlag(TEST_FILTER)])[0]) + + if not SHUFFLED_SHARDED_TESTS: + SHUFFLED_SHARDED_TESTS.extend( + GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', + SHARD_INDEX_ENV_VAR: '1'}, + [ShuffleFlag(), RandomSeedFlag(1)])[0]) + + +class GTestShuffleUnitTest(gtest_test_utils.TestCase): + """Tests test shuffling.""" + + def setUp(self): + CalculateTestLists() + + def testShufflePreservesNumberOfTests(self): + self.assertEqual(len(ALL_TESTS), len(SHUFFLED_ALL_TESTS)) + self.assertEqual(len(ACTIVE_TESTS), len(SHUFFLED_ACTIVE_TESTS)) + self.assertEqual(len(FILTERED_TESTS), len(SHUFFLED_FILTERED_TESTS)) + self.assertEqual(len(SHARDED_TESTS), len(SHUFFLED_SHARDED_TESTS)) + + def testShuffleChangesTestOrder(self): + self.assert_(SHUFFLED_ALL_TESTS != ALL_TESTS, SHUFFLED_ALL_TESTS) + self.assert_(SHUFFLED_ACTIVE_TESTS != ACTIVE_TESTS, SHUFFLED_ACTIVE_TESTS) + self.assert_(SHUFFLED_FILTERED_TESTS != FILTERED_TESTS, + SHUFFLED_FILTERED_TESTS) + self.assert_(SHUFFLED_SHARDED_TESTS != SHARDED_TESTS, + SHUFFLED_SHARDED_TESTS) + + def testShuffleChangesTestCaseOrder(self): + self.assert_(GetTestCases(SHUFFLED_ALL_TESTS) != GetTestCases(ALL_TESTS), + GetTestCases(SHUFFLED_ALL_TESTS)) + self.assert_( + GetTestCases(SHUFFLED_ACTIVE_TESTS) != GetTestCases(ACTIVE_TESTS), + GetTestCases(SHUFFLED_ACTIVE_TESTS)) + self.assert_( + GetTestCases(SHUFFLED_FILTERED_TESTS) != GetTestCases(FILTERED_TESTS), + GetTestCases(SHUFFLED_FILTERED_TESTS)) + self.assert_( + GetTestCases(SHUFFLED_SHARDED_TESTS) != GetTestCases(SHARDED_TESTS), + GetTestCases(SHUFFLED_SHARDED_TESTS)) + + def testShuffleDoesNotRepeatTest(self): + for test in SHUFFLED_ALL_TESTS: + self.assertEqual(1, SHUFFLED_ALL_TESTS.count(test), + '%s appears more than once' % (test,)) + for test in SHUFFLED_ACTIVE_TESTS: + self.assertEqual(1, SHUFFLED_ACTIVE_TESTS.count(test), + '%s appears more than once' % (test,)) + for test in SHUFFLED_FILTERED_TESTS: + self.assertEqual(1, SHUFFLED_FILTERED_TESTS.count(test), + '%s appears more than once' % (test,)) + for test in SHUFFLED_SHARDED_TESTS: + self.assertEqual(1, SHUFFLED_SHARDED_TESTS.count(test), + '%s appears more than once' % (test,)) + + def testShuffleDoesNotCreateNewTest(self): + for test in SHUFFLED_ALL_TESTS: + self.assert_(test in ALL_TESTS, '%s is an invalid test' % (test,)) + for test in SHUFFLED_ACTIVE_TESTS: + self.assert_(test in ACTIVE_TESTS, '%s is an invalid test' % (test,)) + for test in SHUFFLED_FILTERED_TESTS: + self.assert_(test in FILTERED_TESTS, '%s is an invalid test' % (test,)) + for test in SHUFFLED_SHARDED_TESTS: + self.assert_(test in SHARDED_TESTS, '%s is an invalid test' % (test,)) + + def testShuffleIncludesAllTests(self): + for test in ALL_TESTS: + self.assert_(test in SHUFFLED_ALL_TESTS, '%s is missing' % (test,)) + for test in ACTIVE_TESTS: + self.assert_(test in SHUFFLED_ACTIVE_TESTS, '%s is missing' % (test,)) + for test in FILTERED_TESTS: + self.assert_(test in SHUFFLED_FILTERED_TESTS, '%s is missing' % (test,)) + for test in SHARDED_TESTS: + self.assert_(test in SHUFFLED_SHARDED_TESTS, '%s is missing' % (test,)) + + def testShuffleLeavesDeathTestsAtFront(self): + non_death_test_found = False + for test in SHUFFLED_ACTIVE_TESTS: + if 'DeathTest.' in test: + self.assert_(not non_death_test_found, + '%s appears after a non-death test' % (test,)) + else: + non_death_test_found = True + + def _VerifyTestCasesDoNotInterleave(self, tests): + test_cases = [] + for test in tests: + [test_case, _] = test.split('.') + if test_cases and test_cases[-1] != test_case: + test_cases.append(test_case) + self.assertEqual(1, test_cases.count(test_case), + 'Test case %s is not grouped together in %s' % + (test_case, tests)) + + def testShuffleDoesNotInterleaveTestCases(self): + self._VerifyTestCasesDoNotInterleave(SHUFFLED_ALL_TESTS) + self._VerifyTestCasesDoNotInterleave(SHUFFLED_ACTIVE_TESTS) + self._VerifyTestCasesDoNotInterleave(SHUFFLED_FILTERED_TESTS) + self._VerifyTestCasesDoNotInterleave(SHUFFLED_SHARDED_TESTS) + + def testShuffleRestoresOrderAfterEachIteration(self): + # Get the test lists in all 3 iterations, using random seed 1, 2, + # and 3 respectively. Google Test picks a different seed in each + # iteration, and this test depends on the current implementation + # picking successive numbers. This dependency is not ideal, but + # makes the test much easier to write. + [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = ( + GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)])) + + # Make sure running the tests with random seed 1 gets the same + # order as in iteration 1 above. + [tests_with_seed1] = GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(1)]) + self.assertEqual(tests_in_iteration1, tests_with_seed1) + + # Make sure running the tests with random seed 2 gets the same + # order as in iteration 2 above. Success means that Google Test + # correctly restores the test order before re-shuffling at the + # beginning of iteration 2. + [tests_with_seed2] = GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(2)]) + self.assertEqual(tests_in_iteration2, tests_with_seed2) + + # Make sure running the tests with random seed 3 gets the same + # order as in iteration 3 above. Success means that Google Test + # correctly restores the test order before re-shuffling at the + # beginning of iteration 3. + [tests_with_seed3] = GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(3)]) + self.assertEqual(tests_in_iteration3, tests_with_seed3) + + def testShuffleGeneratesNewOrderInEachIteration(self): + [tests_in_iteration1, tests_in_iteration2, tests_in_iteration3] = ( + GetTestsForAllIterations( + {}, [ShuffleFlag(), RandomSeedFlag(1), RepeatFlag(3)])) + + self.assert_(tests_in_iteration1 != tests_in_iteration2, + tests_in_iteration1) + self.assert_(tests_in_iteration1 != tests_in_iteration3, + tests_in_iteration1) + self.assert_(tests_in_iteration2 != tests_in_iteration3, + tests_in_iteration2) + + def testShuffleShardedTestsPreservesPartition(self): + # If we run M tests on N shards, the same M tests should be run in + # total, regardless of the random seeds used by the shards. + [tests1] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', + SHARD_INDEX_ENV_VAR: '0'}, + [ShuffleFlag(), RandomSeedFlag(1)]) + [tests2] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', + SHARD_INDEX_ENV_VAR: '1'}, + [ShuffleFlag(), RandomSeedFlag(20)]) + [tests3] = GetTestsForAllIterations({TOTAL_SHARDS_ENV_VAR: '3', + SHARD_INDEX_ENV_VAR: '2'}, + [ShuffleFlag(), RandomSeedFlag(25)]) + sorted_sharded_tests = tests1 + tests2 + tests3 + sorted_sharded_tests.sort() + sorted_active_tests = [] + sorted_active_tests.extend(ACTIVE_TESTS) + sorted_active_tests.sort() + self.assertEqual(sorted_active_tests, sorted_sharded_tests) + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test_.cc new file mode 100644 index 0000000000..4505663ae4 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-shuffle-test_.cc @@ -0,0 +1,101 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Verifies that test shuffling works. + +#include "gtest/gtest.h" + +namespace { + +using ::testing::EmptyTestEventListener; +using ::testing::InitGoogleTest; +using ::testing::Message; +using ::testing::Test; +using ::testing::TestEventListeners; +using ::testing::TestInfo; +using ::testing::UnitTest; + +// The test methods are empty, as the sole purpose of this program is +// to print the test names before/after shuffling. + +class A : public Test {}; +TEST_F(A, A) {} +TEST_F(A, B) {} + +TEST(ADeathTest, A) {} +TEST(ADeathTest, B) {} +TEST(ADeathTest, C) {} + +TEST(B, A) {} +TEST(B, B) {} +TEST(B, C) {} +TEST(B, DISABLED_D) {} +TEST(B, DISABLED_E) {} + +TEST(BDeathTest, A) {} +TEST(BDeathTest, B) {} + +TEST(C, A) {} +TEST(C, B) {} +TEST(C, C) {} +TEST(C, DISABLED_D) {} + +TEST(CDeathTest, A) {} + +TEST(DISABLED_D, A) {} +TEST(DISABLED_D, DISABLED_B) {} + +// This printer prints the full test names only, starting each test +// iteration with a "----" marker. +class TestNamePrinter : public EmptyTestEventListener { + public: + void OnTestIterationStart(const UnitTest& /* unit_test */, + int /* iteration */) override { + printf("----\n"); + } + + void OnTestStart(const TestInfo& test_info) override { + printf("%s.%s\n", test_info.test_suite_name(), test_info.name()); + } +}; + +} // namespace + +int main(int argc, char **argv) { + InitGoogleTest(&argc, argv); + + // Replaces the default printer with TestNamePrinter, which prints + // the test name only. + TestEventListeners& listeners = UnitTest::GetInstance()->listeners(); + delete listeners.Release(listeners.default_result_printer()); + listeners.Append(new TestNamePrinter); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-test-part-test.cc b/security/nss/gtests/google_test/gtest/test/googletest-test-part-test.cc new file mode 100644 index 0000000000..44cf7ca044 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-test-part-test.cc @@ -0,0 +1,230 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#include "gtest/gtest-test-part.h" + +#include "gtest/gtest.h" + +using testing::Message; +using testing::Test; +using testing::TestPartResult; +using testing::TestPartResultArray; + +namespace { + +// Tests the TestPartResult class. + +// The test fixture for testing TestPartResult. +class TestPartResultTest : public Test { + protected: + TestPartResultTest() + : r1_(TestPartResult::kSuccess, "foo/bar.cc", 10, "Success!"), + r2_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure!"), + r3_(TestPartResult::kFatalFailure, nullptr, -1, "Failure!"), + r4_(TestPartResult::kSkip, "foo/bar.cc", 2, "Skipped!") {} + + TestPartResult r1_, r2_, r3_, r4_; +}; + + +TEST_F(TestPartResultTest, ConstructorWorks) { + Message message; + message << "something is terribly wrong"; + message << static_cast<const char*>(testing::internal::kStackTraceMarker); + message << "some unimportant stack trace"; + + const TestPartResult result(TestPartResult::kNonFatalFailure, + "some_file.cc", + 42, + message.GetString().c_str()); + + EXPECT_EQ(TestPartResult::kNonFatalFailure, result.type()); + EXPECT_STREQ("some_file.cc", result.file_name()); + EXPECT_EQ(42, result.line_number()); + EXPECT_STREQ(message.GetString().c_str(), result.message()); + EXPECT_STREQ("something is terribly wrong", result.summary()); +} + +TEST_F(TestPartResultTest, ResultAccessorsWork) { + const TestPartResult success(TestPartResult::kSuccess, + "file.cc", + 42, + "message"); + EXPECT_TRUE(success.passed()); + EXPECT_FALSE(success.failed()); + EXPECT_FALSE(success.nonfatally_failed()); + EXPECT_FALSE(success.fatally_failed()); + EXPECT_FALSE(success.skipped()); + + const TestPartResult nonfatal_failure(TestPartResult::kNonFatalFailure, + "file.cc", + 42, + "message"); + EXPECT_FALSE(nonfatal_failure.passed()); + EXPECT_TRUE(nonfatal_failure.failed()); + EXPECT_TRUE(nonfatal_failure.nonfatally_failed()); + EXPECT_FALSE(nonfatal_failure.fatally_failed()); + EXPECT_FALSE(nonfatal_failure.skipped()); + + const TestPartResult fatal_failure(TestPartResult::kFatalFailure, + "file.cc", + 42, + "message"); + EXPECT_FALSE(fatal_failure.passed()); + EXPECT_TRUE(fatal_failure.failed()); + EXPECT_FALSE(fatal_failure.nonfatally_failed()); + EXPECT_TRUE(fatal_failure.fatally_failed()); + EXPECT_FALSE(fatal_failure.skipped()); + + const TestPartResult skip(TestPartResult::kSkip, "file.cc", 42, "message"); + EXPECT_FALSE(skip.passed()); + EXPECT_FALSE(skip.failed()); + EXPECT_FALSE(skip.nonfatally_failed()); + EXPECT_FALSE(skip.fatally_failed()); + EXPECT_TRUE(skip.skipped()); +} + +// Tests TestPartResult::type(). +TEST_F(TestPartResultTest, type) { + EXPECT_EQ(TestPartResult::kSuccess, r1_.type()); + EXPECT_EQ(TestPartResult::kNonFatalFailure, r2_.type()); + EXPECT_EQ(TestPartResult::kFatalFailure, r3_.type()); + EXPECT_EQ(TestPartResult::kSkip, r4_.type()); +} + +// Tests TestPartResult::file_name(). +TEST_F(TestPartResultTest, file_name) { + EXPECT_STREQ("foo/bar.cc", r1_.file_name()); + EXPECT_STREQ(nullptr, r3_.file_name()); + EXPECT_STREQ("foo/bar.cc", r4_.file_name()); +} + +// Tests TestPartResult::line_number(). +TEST_F(TestPartResultTest, line_number) { + EXPECT_EQ(10, r1_.line_number()); + EXPECT_EQ(-1, r2_.line_number()); + EXPECT_EQ(2, r4_.line_number()); +} + +// Tests TestPartResult::message(). +TEST_F(TestPartResultTest, message) { + EXPECT_STREQ("Success!", r1_.message()); + EXPECT_STREQ("Skipped!", r4_.message()); +} + +// Tests TestPartResult::passed(). +TEST_F(TestPartResultTest, Passed) { + EXPECT_TRUE(r1_.passed()); + EXPECT_FALSE(r2_.passed()); + EXPECT_FALSE(r3_.passed()); + EXPECT_FALSE(r4_.passed()); +} + +// Tests TestPartResult::failed(). +TEST_F(TestPartResultTest, Failed) { + EXPECT_FALSE(r1_.failed()); + EXPECT_TRUE(r2_.failed()); + EXPECT_TRUE(r3_.failed()); + EXPECT_FALSE(r4_.failed()); +} + +// Tests TestPartResult::failed(). +TEST_F(TestPartResultTest, Skipped) { + EXPECT_FALSE(r1_.skipped()); + EXPECT_FALSE(r2_.skipped()); + EXPECT_FALSE(r3_.skipped()); + EXPECT_TRUE(r4_.skipped()); +} + +// Tests TestPartResult::fatally_failed(). +TEST_F(TestPartResultTest, FatallyFailed) { + EXPECT_FALSE(r1_.fatally_failed()); + EXPECT_FALSE(r2_.fatally_failed()); + EXPECT_TRUE(r3_.fatally_failed()); + EXPECT_FALSE(r4_.fatally_failed()); +} + +// Tests TestPartResult::nonfatally_failed(). +TEST_F(TestPartResultTest, NonfatallyFailed) { + EXPECT_FALSE(r1_.nonfatally_failed()); + EXPECT_TRUE(r2_.nonfatally_failed()); + EXPECT_FALSE(r3_.nonfatally_failed()); + EXPECT_FALSE(r4_.nonfatally_failed()); +} + +// Tests the TestPartResultArray class. + +class TestPartResultArrayTest : public Test { + protected: + TestPartResultArrayTest() + : r1_(TestPartResult::kNonFatalFailure, "foo/bar.cc", -1, "Failure 1"), + r2_(TestPartResult::kFatalFailure, "foo/bar.cc", -1, "Failure 2") {} + + const TestPartResult r1_, r2_; +}; + +// Tests that TestPartResultArray initially has size 0. +TEST_F(TestPartResultArrayTest, InitialSizeIsZero) { + TestPartResultArray results; + EXPECT_EQ(0, results.size()); +} + +// Tests that TestPartResultArray contains the given TestPartResult +// after one Append() operation. +TEST_F(TestPartResultArrayTest, ContainsGivenResultAfterAppend) { + TestPartResultArray results; + results.Append(r1_); + EXPECT_EQ(1, results.size()); + EXPECT_STREQ("Failure 1", results.GetTestPartResult(0).message()); +} + +// Tests that TestPartResultArray contains the given TestPartResults +// after two Append() operations. +TEST_F(TestPartResultArrayTest, ContainsGivenResultsAfterTwoAppends) { + TestPartResultArray results; + results.Append(r1_); + results.Append(r2_); + EXPECT_EQ(2, results.size()); + EXPECT_STREQ("Failure 1", results.GetTestPartResult(0).message()); + EXPECT_STREQ("Failure 2", results.GetTestPartResult(1).message()); +} + +typedef TestPartResultArrayTest TestPartResultArrayDeathTest; + +// Tests that the program dies when GetTestPartResult() is called with +// an invalid index. +TEST_F(TestPartResultArrayDeathTest, DiesWhenIndexIsOutOfBound) { + TestPartResultArray results; + results.Append(r1_); + + EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(-1), ""); + EXPECT_DEATH_IF_SUPPORTED(results.GetTestPartResult(1), ""); +} + +} // namespace diff --git a/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test.py b/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test.py new file mode 100755 index 0000000000..ea627c479d --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python +# +# Copyright 2009, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests Google Test's throw-on-failure mode with exceptions disabled. + +This script invokes googletest-throw-on-failure-test_ (a program written with +Google Test) with different environments and command line flags. +""" + +import os +import gtest_test_utils + + +# Constants. + +# The command line flag for enabling/disabling the throw-on-failure mode. +THROW_ON_FAILURE = 'gtest_throw_on_failure' + +# Path to the googletest-throw-on-failure-test_ program, compiled with +# exceptions disabled. +EXE_PATH = gtest_test_utils.GetTestExecutablePath( + 'googletest-throw-on-failure-test_') + + +# Utilities. + + +def SetEnvVar(env_var, value): + """Sets an environment variable to a given value; unsets it when the + given value is None. + """ + + env_var = env_var.upper() + if value is not None: + os.environ[env_var] = value + elif env_var in os.environ: + del os.environ[env_var] + + +def Run(command): + """Runs a command; returns True/False if its exit code is/isn't 0.""" + + print('Running "%s". . .' % ' '.join(command)) + p = gtest_test_utils.Subprocess(command) + return p.exited and p.exit_code == 0 + + +# The tests. +class ThrowOnFailureTest(gtest_test_utils.TestCase): + """Tests the throw-on-failure mode.""" + + def RunAndVerify(self, env_var_value, flag_value, should_fail): + """Runs googletest-throw-on-failure-test_ and verifies that it does + (or does not) exit with a non-zero code. + + Args: + env_var_value: value of the GTEST_BREAK_ON_FAILURE environment + variable; None if the variable should be unset. + flag_value: value of the --gtest_break_on_failure flag; + None if the flag should not be present. + should_fail: True if and only if the program is expected to fail. + """ + + SetEnvVar(THROW_ON_FAILURE, env_var_value) + + if env_var_value is None: + env_var_value_msg = ' is not set' + else: + env_var_value_msg = '=' + env_var_value + + if flag_value is None: + flag = '' + elif flag_value == '0': + flag = '--%s=0' % THROW_ON_FAILURE + else: + flag = '--%s' % THROW_ON_FAILURE + + command = [EXE_PATH] + if flag: + command.append(flag) + + if should_fail: + should_or_not = 'should' + else: + should_or_not = 'should not' + + failed = not Run(command) + + SetEnvVar(THROW_ON_FAILURE, None) + + msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero ' + 'exit code.' % + (THROW_ON_FAILURE, env_var_value_msg, ' '.join(command), + should_or_not)) + self.assert_(failed == should_fail, msg) + + def testDefaultBehavior(self): + """Tests the behavior of the default mode.""" + + self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False) + + def testThrowOnFailureEnvVar(self): + """Tests using the GTEST_THROW_ON_FAILURE environment variable.""" + + self.RunAndVerify(env_var_value='0', + flag_value=None, + should_fail=False) + self.RunAndVerify(env_var_value='1', + flag_value=None, + should_fail=True) + + def testThrowOnFailureFlag(self): + """Tests using the --gtest_throw_on_failure flag.""" + + self.RunAndVerify(env_var_value=None, + flag_value='0', + should_fail=False) + self.RunAndVerify(env_var_value=None, + flag_value='1', + should_fail=True) + + def testThrowOnFailureFlagOverridesEnvVar(self): + """Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE.""" + + self.RunAndVerify(env_var_value='0', + flag_value='0', + should_fail=False) + self.RunAndVerify(env_var_value='0', + flag_value='1', + should_fail=True) + self.RunAndVerify(env_var_value='1', + flag_value='0', + should_fail=False) + self.RunAndVerify(env_var_value='1', + flag_value='1', + should_fail=True) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test_.cc new file mode 100644 index 0000000000..83bb914c7e --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-throw-on-failure-test_.cc @@ -0,0 +1,71 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tests Google Test's throw-on-failure mode with exceptions disabled. +// +// This program must be compiled with exceptions disabled. It will be +// invoked by googletest-throw-on-failure-test.py, and is expected to exit +// with non-zero in the throw-on-failure mode or 0 otherwise. + +#include "gtest/gtest.h" + +#include <stdio.h> // for fflush, fprintf, NULL, etc. +#include <stdlib.h> // for exit +#include <exception> // for set_terminate + +// This terminate handler aborts the program using exit() rather than abort(). +// This avoids showing pop-ups on Windows systems and core dumps on Unix-like +// ones. +void TerminateHandler() { + fprintf(stderr, "%s\n", "Unhandled C++ exception terminating the program."); + fflush(nullptr); + exit(1); +} + +int main(int argc, char** argv) { +#if GTEST_HAS_EXCEPTIONS + std::set_terminate(&TerminateHandler); +#endif + testing::InitGoogleTest(&argc, argv); + + // We want to ensure that people can use Google Test assertions in + // other testing frameworks, as long as they initialize Google Test + // properly and set the throw-on-failure mode. Therefore, we don't + // use Google Test's constructs for defining and running tests + // (e.g. TEST and RUN_ALL_TESTS) here. + + // In the throw-on-failure mode with exceptions disabled, this + // assertion will cause the program to exit with a non-zero code. + EXPECT_EQ(2, 3); + + // When not in the throw-on-failure mode, the control will reach + // here. + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test.py b/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test.py new file mode 100755 index 0000000000..69595a0dde --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +# +# Copyright 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Verifies that Google Test warns the user when not initialized properly.""" + +import gtest_test_utils + +COMMAND = gtest_test_utils.GetTestExecutablePath('googletest-uninitialized-test_') + + +def Assert(condition): + if not condition: + raise AssertionError + + +def AssertEq(expected, actual): + if expected != actual: + print('Expected: %s' % (expected,)) + print(' Actual: %s' % (actual,)) + raise AssertionError + + +def TestExitCodeAndOutput(command): + """Runs the given command and verifies its exit code and output.""" + + # Verifies that 'command' exits with code 1. + p = gtest_test_utils.Subprocess(command) + if p.exited and p.exit_code == 0: + Assert('IMPORTANT NOTICE' in p.output); + Assert('InitGoogleTest' in p.output) + + +class GTestUninitializedTest(gtest_test_utils.TestCase): + def testExitCodeAndOutput(self): + TestExitCodeAndOutput(COMMAND) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test_.cc b/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test_.cc new file mode 100644 index 0000000000..b4434d51ee --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/googletest-uninitialized-test_.cc @@ -0,0 +1,42 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "gtest/gtest.h" + +TEST(DummyTest, Dummy) { + // This test doesn't verify anything. We just need it to create a + // realistic stage for testing the behavior of Google Test when + // RUN_ALL_TESTS() is called without + // testing::InitGoogleTest() being called first. +} + +int main() { + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest-typed-test2_test.cc b/security/nss/gtests/google_test/gtest/test/gtest-typed-test2_test.cc new file mode 100644 index 0000000000..e83ca2e11b --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest-typed-test2_test.cc @@ -0,0 +1,40 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include <vector> + +#include "test/gtest-typed-test_test.h" +#include "gtest/gtest.h" + +// Tests that the same type-parameterized test case can be +// instantiated in different translation units linked together. +// (ContainerTest is also instantiated in gtest-typed-test_test.cc.) +INSTANTIATE_TYPED_TEST_SUITE_P(Vector, ContainerTest, + testing::Types<std::vector<int> >); diff --git a/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.cc b/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.cc new file mode 100644 index 0000000000..5fc678cb0d --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.cc @@ -0,0 +1,437 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "test/gtest-typed-test_test.h" + +#include <set> +#include <type_traits> +#include <vector> + +#include "gtest/gtest.h" + +#if _MSC_VER +GTEST_DISABLE_MSC_WARNINGS_PUSH_(4127 /* conditional expression is constant */) +#endif // _MSC_VER + +using testing::Test; + +// Used for testing that SetUpTestSuite()/TearDownTestSuite(), fixture +// ctor/dtor, and SetUp()/TearDown() work correctly in typed tests and +// type-parameterized test. +template <typename T> +class CommonTest : public Test { + // For some technical reason, SetUpTestSuite() and TearDownTestSuite() + // must be public. + public: + static void SetUpTestSuite() { + shared_ = new T(5); + } + + static void TearDownTestSuite() { + delete shared_; + shared_ = nullptr; + } + + // This 'protected:' is optional. There's no harm in making all + // members of this fixture class template public. + protected: + // We used to use std::list here, but switched to std::vector since + // MSVC's <list> doesn't compile cleanly with /W4. + typedef std::vector<T> Vector; + typedef std::set<int> IntSet; + + CommonTest() : value_(1) {} + + ~CommonTest() override { EXPECT_EQ(3, value_); } + + void SetUp() override { + EXPECT_EQ(1, value_); + value_++; + } + + void TearDown() override { + EXPECT_EQ(2, value_); + value_++; + } + + T value_; + static T* shared_; +}; + +template <typename T> +T* CommonTest<T>::shared_ = nullptr; + +using testing::Types; + +// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor, +// and SetUp()/TearDown() work correctly in typed tests + +typedef Types<char, int> TwoTypes; +TYPED_TEST_SUITE(CommonTest, TwoTypes); + +TYPED_TEST(CommonTest, ValuesAreCorrect) { + // Static members of the fixture class template can be visited via + // the TestFixture:: prefix. + EXPECT_EQ(5, *TestFixture::shared_); + + // Typedefs in the fixture class template can be visited via the + // "typename TestFixture::" prefix. + typename TestFixture::Vector empty; + EXPECT_EQ(0U, empty.size()); + + typename TestFixture::IntSet empty2; + EXPECT_EQ(0U, empty2.size()); + + // Non-static members of the fixture class must be visited via + // 'this', as required by C++ for class templates. + EXPECT_EQ(2, this->value_); +} + +// The second test makes sure shared_ is not deleted after the first +// test. +TYPED_TEST(CommonTest, ValuesAreStillCorrect) { + // Static members of the fixture class template can also be visited + // via 'this'. + ASSERT_TRUE(this->shared_ != nullptr); + EXPECT_EQ(5, *this->shared_); + + // TypeParam can be used to refer to the type parameter. + EXPECT_EQ(static_cast<TypeParam>(2), this->value_); +} + +// Tests that multiple TYPED_TEST_SUITE's can be defined in the same +// translation unit. + +template <typename T> +class TypedTest1 : public Test { +}; + +// Verifies that the second argument of TYPED_TEST_SUITE can be a +// single type. +TYPED_TEST_SUITE(TypedTest1, int); +TYPED_TEST(TypedTest1, A) {} + +template <typename T> +class TypedTest2 : public Test { +}; + +// Verifies that the second argument of TYPED_TEST_SUITE can be a +// Types<...> type list. +TYPED_TEST_SUITE(TypedTest2, Types<int>); + +// This also verifies that tests from different typed test cases can +// share the same name. +TYPED_TEST(TypedTest2, A) {} + +// Tests that a typed test case can be defined in a namespace. + +namespace library1 { + +template <typename T> +class NumericTest : public Test { +}; + +typedef Types<int, long> NumericTypes; +TYPED_TEST_SUITE(NumericTest, NumericTypes); + +TYPED_TEST(NumericTest, DefaultIsZero) { + EXPECT_EQ(0, TypeParam()); +} + +} // namespace library1 + +// Tests that custom names work. +template <typename T> +class TypedTestWithNames : public Test {}; + +class TypedTestNames { + public: + template <typename T> + static std::string GetName(int i) { + if (std::is_same<T, char>::value) { + return std::string("char") + ::testing::PrintToString(i); + } + if (std::is_same<T, int>::value) { + return std::string("int") + ::testing::PrintToString(i); + } + } +}; + +TYPED_TEST_SUITE(TypedTestWithNames, TwoTypes, TypedTestNames); + +TYPED_TEST(TypedTestWithNames, TestSuiteName) { + if (std::is_same<TypeParam, char>::value) { + EXPECT_STREQ(::testing::UnitTest::GetInstance() + ->current_test_info() + ->test_suite_name(), + "TypedTestWithNames/char0"); + } + if (std::is_same<TypeParam, int>::value) { + EXPECT_STREQ(::testing::UnitTest::GetInstance() + ->current_test_info() + ->test_suite_name(), + "TypedTestWithNames/int1"); + } +} + +using testing::Types; +using testing::internal::TypedTestSuitePState; + +// Tests TypedTestSuitePState. + +class TypedTestSuitePStateTest : public Test { + protected: + void SetUp() override { + state_.AddTestName("foo.cc", 0, "FooTest", "A"); + state_.AddTestName("foo.cc", 0, "FooTest", "B"); + state_.AddTestName("foo.cc", 0, "FooTest", "C"); + } + + TypedTestSuitePState state_; +}; + +TEST_F(TypedTestSuitePStateTest, SucceedsForMatchingList) { + const char* tests = "A, B, C"; + EXPECT_EQ(tests, + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, tests)); +} + +// Makes sure that the order of the tests and spaces around the names +// don't matter. +TEST_F(TypedTestSuitePStateTest, IgnoresOrderAndSpaces) { + const char* tests = "A,C, B"; + EXPECT_EQ(tests, + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, tests)); +} + +using TypedTestSuitePStateDeathTest = TypedTestSuitePStateTest; + +TEST_F(TypedTestSuitePStateDeathTest, DetectsDuplicates) { + EXPECT_DEATH_IF_SUPPORTED( + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, "A, B, A, C"), + "foo\\.cc.1.?: Test A is listed more than once\\."); +} + +TEST_F(TypedTestSuitePStateDeathTest, DetectsExtraTest) { + EXPECT_DEATH_IF_SUPPORTED( + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, "A, B, C, D"), + "foo\\.cc.1.?: No test named D can be found in this test suite\\."); +} + +TEST_F(TypedTestSuitePStateDeathTest, DetectsMissedTest) { + EXPECT_DEATH_IF_SUPPORTED( + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, "A, C"), + "foo\\.cc.1.?: You forgot to list test B\\."); +} + +// Tests that defining a test for a parameterized test case generates +// a run-time error if the test case has been registered. +TEST_F(TypedTestSuitePStateDeathTest, DetectsTestAfterRegistration) { + state_.VerifyRegisteredTestNames("Suite", "foo.cc", 1, "A, B, C"); + EXPECT_DEATH_IF_SUPPORTED( + state_.AddTestName("foo.cc", 2, "FooTest", "D"), + "foo\\.cc.2.?: Test D must be defined before REGISTER_TYPED_TEST_SUITE_P" + "\\(FooTest, \\.\\.\\.\\)\\."); +} + +// Tests that SetUpTestSuite()/TearDownTestSuite(), fixture ctor/dtor, +// and SetUp()/TearDown() work correctly in type-parameterized tests. + +template <typename T> +class DerivedTest : public CommonTest<T> { +}; + +TYPED_TEST_SUITE_P(DerivedTest); + +TYPED_TEST_P(DerivedTest, ValuesAreCorrect) { + // Static members of the fixture class template can be visited via + // the TestFixture:: prefix. + EXPECT_EQ(5, *TestFixture::shared_); + + // Non-static members of the fixture class must be visited via + // 'this', as required by C++ for class templates. + EXPECT_EQ(2, this->value_); +} + +// The second test makes sure shared_ is not deleted after the first +// test. +TYPED_TEST_P(DerivedTest, ValuesAreStillCorrect) { + // Static members of the fixture class template can also be visited + // via 'this'. + ASSERT_TRUE(this->shared_ != nullptr); + EXPECT_EQ(5, *this->shared_); + EXPECT_EQ(2, this->value_); +} + +REGISTER_TYPED_TEST_SUITE_P(DerivedTest, + ValuesAreCorrect, ValuesAreStillCorrect); + +typedef Types<short, long> MyTwoTypes; +INSTANTIATE_TYPED_TEST_SUITE_P(My, DerivedTest, MyTwoTypes); + +// Tests that custom names work with type parametrized tests. We reuse the +// TwoTypes from above here. +template <typename T> +class TypeParametrizedTestWithNames : public Test {}; + +TYPED_TEST_SUITE_P(TypeParametrizedTestWithNames); + +TYPED_TEST_P(TypeParametrizedTestWithNames, TestSuiteName) { + if (std::is_same<TypeParam, char>::value) { + EXPECT_STREQ(::testing::UnitTest::GetInstance() + ->current_test_info() + ->test_suite_name(), + "CustomName/TypeParametrizedTestWithNames/parChar0"); + } + if (std::is_same<TypeParam, int>::value) { + EXPECT_STREQ(::testing::UnitTest::GetInstance() + ->current_test_info() + ->test_suite_name(), + "CustomName/TypeParametrizedTestWithNames/parInt1"); + } +} + +REGISTER_TYPED_TEST_SUITE_P(TypeParametrizedTestWithNames, TestSuiteName); + +class TypeParametrizedTestNames { + public: + template <typename T> + static std::string GetName(int i) { + if (std::is_same<T, char>::value) { + return std::string("parChar") + ::testing::PrintToString(i); + } + if (std::is_same<T, int>::value) { + return std::string("parInt") + ::testing::PrintToString(i); + } + } +}; + +INSTANTIATE_TYPED_TEST_SUITE_P(CustomName, TypeParametrizedTestWithNames, + TwoTypes, TypeParametrizedTestNames); + +// Tests that multiple TYPED_TEST_SUITE_P's can be defined in the same +// translation unit. + +template <typename T> +class TypedTestP1 : public Test { +}; + +TYPED_TEST_SUITE_P(TypedTestP1); + +// For testing that the code between TYPED_TEST_SUITE_P() and +// TYPED_TEST_P() is not enclosed in a namespace. +using IntAfterTypedTestSuiteP = int; + +TYPED_TEST_P(TypedTestP1, A) {} +TYPED_TEST_P(TypedTestP1, B) {} + +// For testing that the code between TYPED_TEST_P() and +// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace. +using IntBeforeRegisterTypedTestSuiteP = int; + +REGISTER_TYPED_TEST_SUITE_P(TypedTestP1, A, B); + +template <typename T> +class TypedTestP2 : public Test { +}; + +TYPED_TEST_SUITE_P(TypedTestP2); + +// This also verifies that tests from different type-parameterized +// test cases can share the same name. +TYPED_TEST_P(TypedTestP2, A) {} + +REGISTER_TYPED_TEST_SUITE_P(TypedTestP2, A); + +// Verifies that the code between TYPED_TEST_SUITE_P() and +// REGISTER_TYPED_TEST_SUITE_P() is not enclosed in a namespace. +IntAfterTypedTestSuiteP after = 0; +IntBeforeRegisterTypedTestSuiteP before = 0; + +// Verifies that the last argument of INSTANTIATE_TYPED_TEST_SUITE_P() +// can be either a single type or a Types<...> type list. +INSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP1, int); +INSTANTIATE_TYPED_TEST_SUITE_P(Int, TypedTestP2, Types<int>); + +// Tests that the same type-parameterized test case can be +// instantiated more than once in the same translation unit. +INSTANTIATE_TYPED_TEST_SUITE_P(Double, TypedTestP2, Types<double>); + +// Tests that the same type-parameterized test case can be +// instantiated in different translation units linked together. +// (ContainerTest is also instantiated in gtest-typed-test_test.cc.) +typedef Types<std::vector<double>, std::set<char> > MyContainers; +INSTANTIATE_TYPED_TEST_SUITE_P(My, ContainerTest, MyContainers); + +// Tests that a type-parameterized test case can be defined and +// instantiated in a namespace. + +namespace library2 { + +template <typename T> +class NumericTest : public Test { +}; + +TYPED_TEST_SUITE_P(NumericTest); + +TYPED_TEST_P(NumericTest, DefaultIsZero) { + EXPECT_EQ(0, TypeParam()); +} + +TYPED_TEST_P(NumericTest, ZeroIsLessThanOne) { + EXPECT_LT(TypeParam(0), TypeParam(1)); +} + +REGISTER_TYPED_TEST_SUITE_P(NumericTest, + DefaultIsZero, ZeroIsLessThanOne); +typedef Types<int, double> NumericTypes; +INSTANTIATE_TYPED_TEST_SUITE_P(My, NumericTest, NumericTypes); + +static const char* GetTestName() { + return testing::UnitTest::GetInstance()->current_test_info()->name(); +} +// Test the stripping of space from test names +template <typename T> class TrimmedTest : public Test { }; +TYPED_TEST_SUITE_P(TrimmedTest); +TYPED_TEST_P(TrimmedTest, Test1) { EXPECT_STREQ("Test1", GetTestName()); } +TYPED_TEST_P(TrimmedTest, Test2) { EXPECT_STREQ("Test2", GetTestName()); } +TYPED_TEST_P(TrimmedTest, Test3) { EXPECT_STREQ("Test3", GetTestName()); } +TYPED_TEST_P(TrimmedTest, Test4) { EXPECT_STREQ("Test4", GetTestName()); } +TYPED_TEST_P(TrimmedTest, Test5) { EXPECT_STREQ("Test5", GetTestName()); } +REGISTER_TYPED_TEST_SUITE_P( + TrimmedTest, + Test1, Test2,Test3 , Test4 ,Test5 ); // NOLINT +template <typename T1, typename T2> struct MyPair {}; +// Be sure to try a type with a comma in its name just in case it matters. +typedef Types<int, double, MyPair<int, int> > TrimTypes; +INSTANTIATE_TYPED_TEST_SUITE_P(My, TrimmedTest, TrimTypes); + +} // namespace library2 + diff --git a/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.h b/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.h new file mode 100644 index 0000000000..8ce559c99f --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest-typed-test_test.h @@ -0,0 +1,60 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +#ifndef GOOGLETEST_TEST_GTEST_TYPED_TEST_TEST_H_ +#define GOOGLETEST_TEST_GTEST_TYPED_TEST_TEST_H_ + +#include "gtest/gtest.h" + +using testing::Test; + +// For testing that the same type-parameterized test case can be +// instantiated in different translation units linked together. +// ContainerTest will be instantiated in both gtest-typed-test_test.cc +// and gtest-typed-test2_test.cc. + +template <typename T> +class ContainerTest : public Test { +}; + +TYPED_TEST_SUITE_P(ContainerTest); + +TYPED_TEST_P(ContainerTest, CanBeDefaultConstructed) { + TypeParam container; +} + +TYPED_TEST_P(ContainerTest, InitialSizeIsZero) { + TypeParam container; + EXPECT_EQ(0U, container.size()); +} + +REGISTER_TYPED_TEST_SUITE_P(ContainerTest, + CanBeDefaultConstructed, InitialSizeIsZero); + +#endif // GOOGLETEST_TEST_GTEST_TYPED_TEST_TEST_H_ diff --git a/security/nss/gtests/google_test/gtest/test/gtest-unittest-api_test.cc b/security/nss/gtests/google_test/gtest/test/gtest-unittest-api_test.cc new file mode 100644 index 0000000000..8ef505838c --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest-unittest-api_test.cc @@ -0,0 +1,328 @@ +// Copyright 2009 Google Inc. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// The Google C++ Testing and Mocking Framework (Google Test) +// +// This file contains tests verifying correctness of data provided via +// UnitTest's public methods. + +#include "gtest/gtest.h" + +#include <string.h> // For strcmp. +#include <algorithm> + +using ::testing::InitGoogleTest; + +namespace testing { +namespace internal { + +template <typename T> +struct LessByName { + bool operator()(const T* a, const T* b) { + return strcmp(a->name(), b->name()) < 0; + } +}; + +class UnitTestHelper { + public: + // Returns the array of pointers to all test suites sorted by the test suite + // name. The caller is responsible for deleting the array. + static TestSuite const** GetSortedTestSuites() { + UnitTest& unit_test = *UnitTest::GetInstance(); + auto const** const test_suites = new const TestSuite*[static_cast<size_t>( + unit_test.total_test_suite_count())]; + + for (int i = 0; i < unit_test.total_test_suite_count(); ++i) + test_suites[i] = unit_test.GetTestSuite(i); + + std::sort(test_suites, + test_suites + unit_test.total_test_suite_count(), + LessByName<TestSuite>()); + return test_suites; + } + + // Returns the test suite by its name. The caller doesn't own the returned + // pointer. + static const TestSuite* FindTestSuite(const char* name) { + UnitTest& unit_test = *UnitTest::GetInstance(); + for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { + const TestSuite* test_suite = unit_test.GetTestSuite(i); + if (0 == strcmp(test_suite->name(), name)) + return test_suite; + } + return nullptr; + } + + // Returns the array of pointers to all tests in a particular test suite + // sorted by the test name. The caller is responsible for deleting the + // array. + static TestInfo const** GetSortedTests(const TestSuite* test_suite) { + TestInfo const** const tests = new const TestInfo*[static_cast<size_t>( + test_suite->total_test_count())]; + + for (int i = 0; i < test_suite->total_test_count(); ++i) + tests[i] = test_suite->GetTestInfo(i); + + std::sort(tests, tests + test_suite->total_test_count(), + LessByName<TestInfo>()); + return tests; + } +}; + +template <typename T> class TestSuiteWithCommentTest : public Test {}; +TYPED_TEST_SUITE(TestSuiteWithCommentTest, Types<int>); +TYPED_TEST(TestSuiteWithCommentTest, Dummy) {} + +const int kTypedTestSuites = 1; +const int kTypedTests = 1; + +// We can only test the accessors that do not change value while tests run. +// Since tests can be run in any order, the values the accessors that track +// test execution (such as failed_test_count) can not be predicted. +TEST(ApiTest, UnitTestImmutableAccessorsWork) { + UnitTest* unit_test = UnitTest::GetInstance(); + + ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count()); + EXPECT_EQ(1 + kTypedTestSuites, unit_test->test_suite_to_run_count()); + EXPECT_EQ(2, unit_test->disabled_test_count()); + EXPECT_EQ(5 + kTypedTests, unit_test->total_test_count()); + EXPECT_EQ(3 + kTypedTests, unit_test->test_to_run_count()); + + const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites(); + + EXPECT_STREQ("ApiTest", test_suites[0]->name()); + EXPECT_STREQ("DISABLED_Test", test_suites[1]->name()); + EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suites[2]->name()); + + delete[] test_suites; + + // The following lines initiate actions to verify certain methods in + // FinalSuccessChecker::TearDown. + + // Records a test property to verify TestResult::GetTestProperty(). + RecordProperty("key", "value"); +} + +AssertionResult IsNull(const char* str) { + if (str != nullptr) { + return testing::AssertionFailure() << "argument is " << str; + } + return AssertionSuccess(); +} + +TEST(ApiTest, TestSuiteImmutableAccessorsWork) { + const TestSuite* test_suite = UnitTestHelper::FindTestSuite("ApiTest"); + ASSERT_TRUE(test_suite != nullptr); + + EXPECT_STREQ("ApiTest", test_suite->name()); + EXPECT_TRUE(IsNull(test_suite->type_param())); + EXPECT_TRUE(test_suite->should_run()); + EXPECT_EQ(1, test_suite->disabled_test_count()); + EXPECT_EQ(3, test_suite->test_to_run_count()); + ASSERT_EQ(4, test_suite->total_test_count()); + + const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite); + + EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name()); + EXPECT_STREQ("ApiTest", tests[0]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[0]->value_param())); + EXPECT_TRUE(IsNull(tests[0]->type_param())); + EXPECT_FALSE(tests[0]->should_run()); + + EXPECT_STREQ("TestSuiteDisabledAccessorsWork", tests[1]->name()); + EXPECT_STREQ("ApiTest", tests[1]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[1]->value_param())); + EXPECT_TRUE(IsNull(tests[1]->type_param())); + EXPECT_TRUE(tests[1]->should_run()); + + EXPECT_STREQ("TestSuiteImmutableAccessorsWork", tests[2]->name()); + EXPECT_STREQ("ApiTest", tests[2]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[2]->value_param())); + EXPECT_TRUE(IsNull(tests[2]->type_param())); + EXPECT_TRUE(tests[2]->should_run()); + + EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name()); + EXPECT_STREQ("ApiTest", tests[3]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[3]->value_param())); + EXPECT_TRUE(IsNull(tests[3]->type_param())); + EXPECT_TRUE(tests[3]->should_run()); + + delete[] tests; + tests = nullptr; + + test_suite = UnitTestHelper::FindTestSuite("TestSuiteWithCommentTest/0"); + ASSERT_TRUE(test_suite != nullptr); + + EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suite->name()); + EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), test_suite->type_param()); + EXPECT_TRUE(test_suite->should_run()); + EXPECT_EQ(0, test_suite->disabled_test_count()); + EXPECT_EQ(1, test_suite->test_to_run_count()); + ASSERT_EQ(1, test_suite->total_test_count()); + + tests = UnitTestHelper::GetSortedTests(test_suite); + + EXPECT_STREQ("Dummy", tests[0]->name()); + EXPECT_STREQ("TestSuiteWithCommentTest/0", tests[0]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[0]->value_param())); + EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), tests[0]->type_param()); + EXPECT_TRUE(tests[0]->should_run()); + + delete[] tests; +} + +TEST(ApiTest, TestSuiteDisabledAccessorsWork) { + const TestSuite* test_suite = UnitTestHelper::FindTestSuite("DISABLED_Test"); + ASSERT_TRUE(test_suite != nullptr); + + EXPECT_STREQ("DISABLED_Test", test_suite->name()); + EXPECT_TRUE(IsNull(test_suite->type_param())); + EXPECT_FALSE(test_suite->should_run()); + EXPECT_EQ(1, test_suite->disabled_test_count()); + EXPECT_EQ(0, test_suite->test_to_run_count()); + ASSERT_EQ(1, test_suite->total_test_count()); + + const TestInfo* const test_info = test_suite->GetTestInfo(0); + EXPECT_STREQ("Dummy2", test_info->name()); + EXPECT_STREQ("DISABLED_Test", test_info->test_suite_name()); + EXPECT_TRUE(IsNull(test_info->value_param())); + EXPECT_TRUE(IsNull(test_info->type_param())); + EXPECT_FALSE(test_info->should_run()); +} + +// These two tests are here to provide support for testing +// test_suite_to_run_count, disabled_test_count, and test_to_run_count. +TEST(ApiTest, DISABLED_Dummy1) {} +TEST(DISABLED_Test, Dummy2) {} + +class FinalSuccessChecker : public Environment { + protected: + void TearDown() override { + UnitTest* unit_test = UnitTest::GetInstance(); + + EXPECT_EQ(1 + kTypedTestSuites, unit_test->successful_test_suite_count()); + EXPECT_EQ(3 + kTypedTests, unit_test->successful_test_count()); + EXPECT_EQ(0, unit_test->failed_test_suite_count()); + EXPECT_EQ(0, unit_test->failed_test_count()); + EXPECT_TRUE(unit_test->Passed()); + EXPECT_FALSE(unit_test->Failed()); + ASSERT_EQ(2 + kTypedTestSuites, unit_test->total_test_suite_count()); + + const TestSuite** const test_suites = UnitTestHelper::GetSortedTestSuites(); + + EXPECT_STREQ("ApiTest", test_suites[0]->name()); + EXPECT_TRUE(IsNull(test_suites[0]->type_param())); + EXPECT_TRUE(test_suites[0]->should_run()); + EXPECT_EQ(1, test_suites[0]->disabled_test_count()); + ASSERT_EQ(4, test_suites[0]->total_test_count()); + EXPECT_EQ(3, test_suites[0]->successful_test_count()); + EXPECT_EQ(0, test_suites[0]->failed_test_count()); + EXPECT_TRUE(test_suites[0]->Passed()); + EXPECT_FALSE(test_suites[0]->Failed()); + + EXPECT_STREQ("DISABLED_Test", test_suites[1]->name()); + EXPECT_TRUE(IsNull(test_suites[1]->type_param())); + EXPECT_FALSE(test_suites[1]->should_run()); + EXPECT_EQ(1, test_suites[1]->disabled_test_count()); + ASSERT_EQ(1, test_suites[1]->total_test_count()); + EXPECT_EQ(0, test_suites[1]->successful_test_count()); + EXPECT_EQ(0, test_suites[1]->failed_test_count()); + + EXPECT_STREQ("TestSuiteWithCommentTest/0", test_suites[2]->name()); + EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), + test_suites[2]->type_param()); + EXPECT_TRUE(test_suites[2]->should_run()); + EXPECT_EQ(0, test_suites[2]->disabled_test_count()); + ASSERT_EQ(1, test_suites[2]->total_test_count()); + EXPECT_EQ(1, test_suites[2]->successful_test_count()); + EXPECT_EQ(0, test_suites[2]->failed_test_count()); + EXPECT_TRUE(test_suites[2]->Passed()); + EXPECT_FALSE(test_suites[2]->Failed()); + + const TestSuite* test_suite = UnitTestHelper::FindTestSuite("ApiTest"); + const TestInfo** tests = UnitTestHelper::GetSortedTests(test_suite); + EXPECT_STREQ("DISABLED_Dummy1", tests[0]->name()); + EXPECT_STREQ("ApiTest", tests[0]->test_suite_name()); + EXPECT_FALSE(tests[0]->should_run()); + + EXPECT_STREQ("TestSuiteDisabledAccessorsWork", tests[1]->name()); + EXPECT_STREQ("ApiTest", tests[1]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[1]->value_param())); + EXPECT_TRUE(IsNull(tests[1]->type_param())); + EXPECT_TRUE(tests[1]->should_run()); + EXPECT_TRUE(tests[1]->result()->Passed()); + EXPECT_EQ(0, tests[1]->result()->test_property_count()); + + EXPECT_STREQ("TestSuiteImmutableAccessorsWork", tests[2]->name()); + EXPECT_STREQ("ApiTest", tests[2]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[2]->value_param())); + EXPECT_TRUE(IsNull(tests[2]->type_param())); + EXPECT_TRUE(tests[2]->should_run()); + EXPECT_TRUE(tests[2]->result()->Passed()); + EXPECT_EQ(0, tests[2]->result()->test_property_count()); + + EXPECT_STREQ("UnitTestImmutableAccessorsWork", tests[3]->name()); + EXPECT_STREQ("ApiTest", tests[3]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[3]->value_param())); + EXPECT_TRUE(IsNull(tests[3]->type_param())); + EXPECT_TRUE(tests[3]->should_run()); + EXPECT_TRUE(tests[3]->result()->Passed()); + EXPECT_EQ(1, tests[3]->result()->test_property_count()); + const TestProperty& property = tests[3]->result()->GetTestProperty(0); + EXPECT_STREQ("key", property.key()); + EXPECT_STREQ("value", property.value()); + + delete[] tests; + + test_suite = UnitTestHelper::FindTestSuite("TestSuiteWithCommentTest/0"); + tests = UnitTestHelper::GetSortedTests(test_suite); + + EXPECT_STREQ("Dummy", tests[0]->name()); + EXPECT_STREQ("TestSuiteWithCommentTest/0", tests[0]->test_suite_name()); + EXPECT_TRUE(IsNull(tests[0]->value_param())); + EXPECT_STREQ(GetTypeName<Types<int>>().c_str(), tests[0]->type_param()); + EXPECT_TRUE(tests[0]->should_run()); + EXPECT_TRUE(tests[0]->result()->Passed()); + EXPECT_EQ(0, tests[0]->result()->test_property_count()); + + delete[] tests; + delete[] test_suites; + } +}; + +} // namespace internal +} // namespace testing + +int main(int argc, char **argv) { + InitGoogleTest(&argc, argv); + + AddGlobalTestEnvironment(new testing::internal::FinalSuccessChecker()); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_all_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_all_test.cc new file mode 100644 index 0000000000..615b29b706 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_all_test.cc @@ -0,0 +1,46 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for Google C++ Testing and Mocking Framework (Google Test) +// +// Sometimes it's desirable to build most of Google Test's own tests +// by compiling a single file. This file serves this purpose. +#include "test/googletest-filepath-test.cc" +#include "test/googletest-message-test.cc" +#include "test/googletest-options-test.cc" +#include "test/googletest-port-test.cc" +#include "test/googletest-test-part-test.cc" +#include "test/gtest-typed-test2_test.cc" +#include "test/gtest-typed-test_test.cc" +#include "test/gtest_pred_impl_unittest.cc" +#include "test/gtest_prod_test.cc" +#include "test/gtest_skip_test.cc" +#include "test/gtest_unittest.cc" +#include "test/production.cc" diff --git a/security/nss/gtests/google_test/gtest/test/gtest_assert_by_exception_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_assert_by_exception_test.cc new file mode 100644 index 0000000000..ada4cb30ef --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_assert_by_exception_test.cc @@ -0,0 +1,116 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tests Google Test's assert-by-exception mode with exceptions enabled. + +#include "gtest/gtest.h" + +#include <stdlib.h> +#include <stdio.h> +#include <string.h> +#include <stdexcept> + +class ThrowListener : public testing::EmptyTestEventListener { + void OnTestPartResult(const testing::TestPartResult& result) override { + if (result.type() == testing::TestPartResult::kFatalFailure) { + throw testing::AssertionException(result); + } + } +}; + +// Prints the given failure message and exits the program with +// non-zero. We use this instead of a Google Test assertion to +// indicate a failure, as the latter is been tested and cannot be +// relied on. +void Fail(const char* msg) { + printf("FAILURE: %s\n", msg); + fflush(stdout); + exit(1); +} + +static void AssertFalse() { + ASSERT_EQ(2, 3) << "Expected failure"; +} + +// Tests that an assertion failure throws a subclass of +// std::runtime_error. +TEST(Test, Test) { + // A successful assertion shouldn't throw. + try { + EXPECT_EQ(3, 3); + } catch(...) { + Fail("A successful assertion wrongfully threw."); + } + + // A successful assertion shouldn't throw. + try { + EXPECT_EQ(3, 4); + } catch(...) { + Fail("A failed non-fatal assertion wrongfully threw."); + } + + // A failed assertion should throw. + try { + AssertFalse(); + } catch(const testing::AssertionException& e) { + if (strstr(e.what(), "Expected failure") != nullptr) throw; + + printf("%s", + "A failed assertion did throw an exception of the right type, " + "but the message is incorrect. Instead of containing \"Expected " + "failure\", it is:\n"); + Fail(e.what()); + } catch(...) { + Fail("A failed assertion threw the wrong type of exception."); + } + Fail("A failed assertion should've thrown but didn't."); +} + +int kTestForContinuingTest = 0; + +TEST(Test, Test2) { + kTestForContinuingTest = 1; +} + +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + testing::UnitTest::GetInstance()->listeners().Append(new ThrowListener); + + int result = RUN_ALL_TESTS(); + if (result == 0) { + printf("RUN_ALL_TESTS returned %d\n", result); + Fail("Expected failure instead."); + } + + if (kTestForContinuingTest == 0) { + Fail("Should have continued with other tests, but did not."); + } + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_environment_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_environment_test.cc new file mode 100644 index 0000000000..064bfc50b9 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_environment_test.cc @@ -0,0 +1,188 @@ +// Copyright 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests using global test environments. + +#include <stdlib.h> +#include <stdio.h> +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +namespace testing { +GTEST_DECLARE_string_(filter); +} + +namespace { + +enum FailureType { + NO_FAILURE, NON_FATAL_FAILURE, FATAL_FAILURE +}; + +// For testing using global test environments. +class MyEnvironment : public testing::Environment { + public: + MyEnvironment() { Reset(); } + + // Depending on the value of failure_in_set_up_, SetUp() will + // generate a non-fatal failure, generate a fatal failure, or + // succeed. + void SetUp() override { + set_up_was_run_ = true; + + switch (failure_in_set_up_) { + case NON_FATAL_FAILURE: + ADD_FAILURE() << "Expected non-fatal failure in global set-up."; + break; + case FATAL_FAILURE: + FAIL() << "Expected fatal failure in global set-up."; + break; + default: + break; + } + } + + // Generates a non-fatal failure. + void TearDown() override { + tear_down_was_run_ = true; + ADD_FAILURE() << "Expected non-fatal failure in global tear-down."; + } + + // Resets the state of the environment s.t. it can be reused. + void Reset() { + failure_in_set_up_ = NO_FAILURE; + set_up_was_run_ = false; + tear_down_was_run_ = false; + } + + // We call this function to set the type of failure SetUp() should + // generate. + void set_failure_in_set_up(FailureType type) { + failure_in_set_up_ = type; + } + + // Was SetUp() run? + bool set_up_was_run() const { return set_up_was_run_; } + + // Was TearDown() run? + bool tear_down_was_run() const { return tear_down_was_run_; } + + private: + FailureType failure_in_set_up_; + bool set_up_was_run_; + bool tear_down_was_run_; +}; + +// Was the TEST run? +bool test_was_run; + +// The sole purpose of this TEST is to enable us to check whether it +// was run. +TEST(FooTest, Bar) { + test_was_run = true; +} + +// Prints the message and aborts the program if condition is false. +void Check(bool condition, const char* msg) { + if (!condition) { + printf("FAILED: %s\n", msg); + testing::internal::posix::Abort(); + } +} + +// Runs the tests. Return true if and only if successful. +// +// The 'failure' parameter specifies the type of failure that should +// be generated by the global set-up. +int RunAllTests(MyEnvironment* env, FailureType failure) { + env->Reset(); + env->set_failure_in_set_up(failure); + test_was_run = false; + testing::internal::GetUnitTestImpl()->ClearAdHocTestResult(); + return RUN_ALL_TESTS(); +} + +} // namespace + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + // Registers a global test environment, and verifies that the + // registration function returns its argument. + MyEnvironment* const env = new MyEnvironment; + Check(testing::AddGlobalTestEnvironment(env) == env, + "AddGlobalTestEnvironment() should return its argument."); + + // Verifies that RUN_ALL_TESTS() runs the tests when the global + // set-up is successful. + Check(RunAllTests(env, NO_FAILURE) != 0, + "RUN_ALL_TESTS() should return non-zero, as the global tear-down " + "should generate a failure."); + Check(test_was_run, + "The tests should run, as the global set-up should generate no " + "failure"); + Check(env->tear_down_was_run(), + "The global tear-down should run, as the global set-up was run."); + + // Verifies that RUN_ALL_TESTS() runs the tests when the global + // set-up generates no fatal failure. + Check(RunAllTests(env, NON_FATAL_FAILURE) != 0, + "RUN_ALL_TESTS() should return non-zero, as both the global set-up " + "and the global tear-down should generate a non-fatal failure."); + Check(test_was_run, + "The tests should run, as the global set-up should generate no " + "fatal failure."); + Check(env->tear_down_was_run(), + "The global tear-down should run, as the global set-up was run."); + + // Verifies that RUN_ALL_TESTS() runs no test when the global set-up + // generates a fatal failure. + Check(RunAllTests(env, FATAL_FAILURE) != 0, + "RUN_ALL_TESTS() should return non-zero, as the global set-up " + "should generate a fatal failure."); + Check(!test_was_run, + "The tests should not run, as the global set-up should generate " + "a fatal failure."); + Check(env->tear_down_was_run(), + "The global tear-down should run, as the global set-up was run."); + + // Verifies that RUN_ALL_TESTS() doesn't do global set-up or + // tear-down when there is no test to run. + testing::GTEST_FLAG(filter) = "-*"; + Check(RunAllTests(env, NO_FAILURE) == 0, + "RUN_ALL_TESTS() should return zero, as there is no test to run."); + Check(!env->set_up_was_run(), + "The global set-up should not run, as there is no test to run."); + Check(!env->tear_down_was_run(), + "The global tear-down should not run, " + "as the global set-up was not run."); + + printf("PASS\n"); + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_help_test.py b/security/nss/gtests/google_test/gtest/test/gtest_help_test.py new file mode 100755 index 0000000000..8d953bbd9d --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_help_test.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python +# +# Copyright 2009, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Tests the --help flag of Google C++ Testing and Mocking Framework. + +SYNOPSIS + gtest_help_test.py --build_dir=BUILD/DIR + # where BUILD/DIR contains the built gtest_help_test_ file. + gtest_help_test.py +""" + +import os +import re +import gtest_test_utils + + +IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux' +IS_GNUKFREEBSD = os.name == 'posix' and os.uname()[0] == 'GNU/kFreeBSD' +IS_WINDOWS = os.name == 'nt' + +PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_') +FLAG_PREFIX = '--gtest_' +DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style' +STREAM_RESULT_TO_FLAG = FLAG_PREFIX + 'stream_result_to' +UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing' +LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests' +INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG), + re.sub('^--', '/', LIST_TESTS_FLAG), + re.sub('_', '-', LIST_TESTS_FLAG)] +INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing' + +SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess( + [PROGRAM_PATH, LIST_TESTS_FLAG]).output + +# The help message must match this regex. +HELP_REGEX = re.compile( + FLAG_PREFIX + r'list_tests.*' + + FLAG_PREFIX + r'filter=.*' + + FLAG_PREFIX + r'also_run_disabled_tests.*' + + FLAG_PREFIX + r'repeat=.*' + + FLAG_PREFIX + r'shuffle.*' + + FLAG_PREFIX + r'random_seed=.*' + + FLAG_PREFIX + r'color=.*' + + FLAG_PREFIX + r'brief.*' + + FLAG_PREFIX + r'print_time.*' + + FLAG_PREFIX + r'output=.*' + + FLAG_PREFIX + r'break_on_failure.*' + + FLAG_PREFIX + r'throw_on_failure.*' + + FLAG_PREFIX + r'catch_exceptions=0.*', + re.DOTALL) + + +def RunWithFlag(flag): + """Runs gtest_help_test_ with the given flag. + + Returns: + the exit code and the text output as a tuple. + Args: + flag: the command-line flag to pass to gtest_help_test_, or None. + """ + + if flag is None: + command = [PROGRAM_PATH] + else: + command = [PROGRAM_PATH, flag] + child = gtest_test_utils.Subprocess(command) + return child.exit_code, child.output + + +class GTestHelpTest(gtest_test_utils.TestCase): + """Tests the --help flag and its equivalent forms.""" + + def TestHelpFlag(self, flag): + """Verifies correct behavior when help flag is specified. + + The right message must be printed and the tests must + skipped when the given flag is specified. + + Args: + flag: A flag to pass to the binary or None. + """ + + exit_code, output = RunWithFlag(flag) + self.assertEquals(0, exit_code) + self.assert_(HELP_REGEX.search(output), output) + + if IS_LINUX or IS_GNUKFREEBSD: + self.assert_(STREAM_RESULT_TO_FLAG in output, output) + else: + self.assert_(STREAM_RESULT_TO_FLAG not in output, output) + + if SUPPORTS_DEATH_TESTS and not IS_WINDOWS: + self.assert_(DEATH_TEST_STYLE_FLAG in output, output) + else: + self.assert_(DEATH_TEST_STYLE_FLAG not in output, output) + + def TestNonHelpFlag(self, flag): + """Verifies correct behavior when no help flag is specified. + + Verifies that when no help flag is specified, the tests are run + and the help message is not printed. + + Args: + flag: A flag to pass to the binary or None. + """ + + exit_code, output = RunWithFlag(flag) + self.assert_(exit_code != 0) + self.assert_(not HELP_REGEX.search(output), output) + + def testPrintsHelpWithFullFlag(self): + self.TestHelpFlag('--help') + + def testPrintsHelpWithShortFlag(self): + self.TestHelpFlag('-h') + + def testPrintsHelpWithQuestionFlag(self): + self.TestHelpFlag('-?') + + def testPrintsHelpWithWindowsStyleQuestionFlag(self): + self.TestHelpFlag('/?') + + def testPrintsHelpWithUnrecognizedGoogleTestFlag(self): + self.TestHelpFlag(UNKNOWN_FLAG) + + def testPrintsHelpWithIncorrectFlagStyle(self): + for incorrect_flag in INCORRECT_FLAG_VARIANTS: + self.TestHelpFlag(incorrect_flag) + + def testRunsTestsWithoutHelpFlag(self): + """Verifies that when no help flag is specified, the tests are run + and the help message is not printed.""" + + self.TestNonHelpFlag(None) + + def testRunsTestsWithGtestInternalFlag(self): + """Verifies that the tests are run and no help message is printed when + a flag starting with Google Test prefix and 'internal_' is supplied.""" + + self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_help_test_.cc b/security/nss/gtests/google_test/gtest/test/gtest_help_test_.cc new file mode 100644 index 0000000000..750ae6ce95 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_help_test_.cc @@ -0,0 +1,45 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// This program is meant to be run by gtest_help_test.py. Do not run +// it directly. + +#include "gtest/gtest.h" + +// When a help flag is specified, this program should skip the tests +// and exit with 0; otherwise the following test will be executed, +// causing this program to exit with a non-zero code. +TEST(HelpFlagTest, ShouldNotBeRun) { + ASSERT_TRUE(false) << "Tests shouldn't be run when --help is specified."; +} + +#if GTEST_HAS_DEATH_TEST +TEST(DeathTest, UsedByPythonScriptToDetectSupportForDeathTestsInThisBinary) {} +#endif diff --git a/security/nss/gtests/google_test/gtest/test/gtest_json_test_utils.py b/security/nss/gtests/google_test/gtest/test/gtest_json_test_utils.py new file mode 100644 index 0000000000..62bbfc288f --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_json_test_utils.py @@ -0,0 +1,60 @@ +# Copyright 2018, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test utilities for gtest_json_output.""" + +import re + + +def normalize(obj): + """Normalize output object. + + Args: + obj: Google Test's JSON output object to normalize. + + Returns: + Normalized output without any references to transient information that may + change from run to run. + """ + def _normalize(key, value): + if key == 'time': + return re.sub(r'^\d+(\.\d+)?s$', '*', value) + elif key == 'timestamp': + return re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\dZ$', '*', value) + elif key == 'failure': + value = re.sub(r'^.*[/\\](.*:)\d+\n', '\\1*\n', value) + return re.sub(r'Stack trace:\n(.|\n)*', 'Stack trace:\n*', value) + else: + return normalize(value) + if isinstance(obj, dict): + return {k: _normalize(k, v) for k, v in obj.items()} + if isinstance(obj, list): + return [normalize(x) for x in obj] + else: + return obj diff --git a/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest.py b/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest.py new file mode 100644 index 0000000000..a442fc169c --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python +# +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +"""Unit test for Google Test's --gtest_list_tests flag. + +A user can ask Google Test to list all tests by specifying the +--gtest_list_tests flag. If output is requested, via --gtest_output=xml +or --gtest_output=json, the tests are listed, with extra information in the +output file. +This script tests such functionality by invoking gtest_list_output_unittest_ + (a program written with Google Test) the command line flags. +""" + +import os +import re +import gtest_test_utils + +GTEST_LIST_TESTS_FLAG = '--gtest_list_tests' +GTEST_OUTPUT_FLAG = '--gtest_output' + +EXPECTED_XML = """<\?xml version="1.0" encoding="UTF-8"\?> +<testsuites tests="16" name="AllTests"> + <testsuite name="FooTest" tests="2"> + <testcase name="Test1" file=".*gtest_list_output_unittest_.cc" line="43" /> + <testcase name="Test2" file=".*gtest_list_output_unittest_.cc" line="45" /> + </testsuite> + <testsuite name="FooTestFixture" tests="2"> + <testcase name="Test3" file=".*gtest_list_output_unittest_.cc" line="48" /> + <testcase name="Test4" file=".*gtest_list_output_unittest_.cc" line="49" /> + </testsuite> + <testsuite name="TypedTest/0" tests="2"> + <testcase name="Test7" type_param="int" file=".*gtest_list_output_unittest_.cc" line="60" /> + <testcase name="Test8" type_param="int" file=".*gtest_list_output_unittest_.cc" line="61" /> + </testsuite> + <testsuite name="TypedTest/1" tests="2"> + <testcase name="Test7" type_param="bool" file=".*gtest_list_output_unittest_.cc" line="60" /> + <testcase name="Test8" type_param="bool" file=".*gtest_list_output_unittest_.cc" line="61" /> + </testsuite> + <testsuite name="Single/TypeParameterizedTestSuite/0" tests="2"> + <testcase name="Test9" type_param="int" file=".*gtest_list_output_unittest_.cc" line="66" /> + <testcase name="Test10" type_param="int" file=".*gtest_list_output_unittest_.cc" line="67" /> + </testsuite> + <testsuite name="Single/TypeParameterizedTestSuite/1" tests="2"> + <testcase name="Test9" type_param="bool" file=".*gtest_list_output_unittest_.cc" line="66" /> + <testcase name="Test10" type_param="bool" file=".*gtest_list_output_unittest_.cc" line="67" /> + </testsuite> + <testsuite name="ValueParam/ValueParamTest" tests="4"> + <testcase name="Test5/0" value_param="33" file=".*gtest_list_output_unittest_.cc" line="52" /> + <testcase name="Test5/1" value_param="42" file=".*gtest_list_output_unittest_.cc" line="52" /> + <testcase name="Test6/0" value_param="33" file=".*gtest_list_output_unittest_.cc" line="53" /> + <testcase name="Test6/1" value_param="42" file=".*gtest_list_output_unittest_.cc" line="53" /> + </testsuite> +</testsuites> +""" + +EXPECTED_JSON = """{ + "tests": 16, + "name": "AllTests", + "testsuites": \[ + { + "name": "FooTest", + "tests": 2, + "testsuite": \[ + { + "name": "Test1", + "file": ".*gtest_list_output_unittest_.cc", + "line": 43 + }, + { + "name": "Test2", + "file": ".*gtest_list_output_unittest_.cc", + "line": 45 + } + \] + }, + { + "name": "FooTestFixture", + "tests": 2, + "testsuite": \[ + { + "name": "Test3", + "file": ".*gtest_list_output_unittest_.cc", + "line": 48 + }, + { + "name": "Test4", + "file": ".*gtest_list_output_unittest_.cc", + "line": 49 + } + \] + }, + { + "name": "TypedTest\\\\/0", + "tests": 2, + "testsuite": \[ + { + "name": "Test7", + "type_param": "int", + "file": ".*gtest_list_output_unittest_.cc", + "line": 60 + }, + { + "name": "Test8", + "type_param": "int", + "file": ".*gtest_list_output_unittest_.cc", + "line": 61 + } + \] + }, + { + "name": "TypedTest\\\\/1", + "tests": 2, + "testsuite": \[ + { + "name": "Test7", + "type_param": "bool", + "file": ".*gtest_list_output_unittest_.cc", + "line": 60 + }, + { + "name": "Test8", + "type_param": "bool", + "file": ".*gtest_list_output_unittest_.cc", + "line": 61 + } + \] + }, + { + "name": "Single\\\\/TypeParameterizedTestSuite\\\\/0", + "tests": 2, + "testsuite": \[ + { + "name": "Test9", + "type_param": "int", + "file": ".*gtest_list_output_unittest_.cc", + "line": 66 + }, + { + "name": "Test10", + "type_param": "int", + "file": ".*gtest_list_output_unittest_.cc", + "line": 67 + } + \] + }, + { + "name": "Single\\\\/TypeParameterizedTestSuite\\\\/1", + "tests": 2, + "testsuite": \[ + { + "name": "Test9", + "type_param": "bool", + "file": ".*gtest_list_output_unittest_.cc", + "line": 66 + }, + { + "name": "Test10", + "type_param": "bool", + "file": ".*gtest_list_output_unittest_.cc", + "line": 67 + } + \] + }, + { + "name": "ValueParam\\\\/ValueParamTest", + "tests": 4, + "testsuite": \[ + { + "name": "Test5\\\\/0", + "value_param": "33", + "file": ".*gtest_list_output_unittest_.cc", + "line": 52 + }, + { + "name": "Test5\\\\/1", + "value_param": "42", + "file": ".*gtest_list_output_unittest_.cc", + "line": 52 + }, + { + "name": "Test6\\\\/0", + "value_param": "33", + "file": ".*gtest_list_output_unittest_.cc", + "line": 53 + }, + { + "name": "Test6\\\\/1", + "value_param": "42", + "file": ".*gtest_list_output_unittest_.cc", + "line": 53 + } + \] + } + \] +} +""" + + +class GTestListTestsOutputUnitTest(gtest_test_utils.TestCase): + """Unit test for Google Test's list tests with output to file functionality. + """ + + def testXml(self): + """Verifies XML output for listing tests in a Google Test binary. + + Runs a test program that generates an empty XML output, and + tests that the XML output is expected. + """ + self._TestOutput('xml', EXPECTED_XML) + + def testJSON(self): + """Verifies XML output for listing tests in a Google Test binary. + + Runs a test program that generates an empty XML output, and + tests that the XML output is expected. + """ + self._TestOutput('json', EXPECTED_JSON) + + def _GetOutput(self, out_format): + file_path = os.path.join(gtest_test_utils.GetTempDir(), + 'test_out.' + out_format) + gtest_prog_path = gtest_test_utils.GetTestExecutablePath( + 'gtest_list_output_unittest_') + + command = ([ + gtest_prog_path, + '%s=%s:%s' % (GTEST_OUTPUT_FLAG, out_format, file_path), + '--gtest_list_tests' + ]) + environ_copy = os.environ.copy() + p = gtest_test_utils.Subprocess( + command, env=environ_copy, working_dir=gtest_test_utils.GetTempDir()) + + self.assertTrue(p.exited) + self.assertEqual(0, p.exit_code) + self.assertTrue(os.path.isfile(file_path)) + with open(file_path) as f: + result = f.read() + return result + + def _TestOutput(self, test_format, expected_output): + actual = self._GetOutput(test_format) + actual_lines = actual.splitlines() + expected_lines = expected_output.splitlines() + line_count = 0 + for actual_line in actual_lines: + expected_line = expected_lines[line_count] + expected_line_re = re.compile(expected_line.strip()) + self.assertTrue( + expected_line_re.match(actual_line.strip()), + ('actual output of "%s",\n' + 'which does not match expected regex of "%s"\n' + 'on line %d' % (actual, expected_output, line_count))) + line_count = line_count + 1 + + +if __name__ == '__main__': + os.environ['GTEST_STACK_TRACE_DEPTH'] = '1' + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest_.cc b/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest_.cc new file mode 100644 index 0000000000..92b9d4f28e --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_list_output_unittest_.cc @@ -0,0 +1,77 @@ +// Copyright 2018, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: david.schuldenfrei@gmail.com (David Schuldenfrei) + +// Unit test for Google Test's --gtest_list_tests and --gtest_output flag. +// +// A user can ask Google Test to list all tests that will run, +// and have the output saved in a Json/Xml file. +// The tests will not be run after listing. +// +// This program will be invoked from a Python unit test. +// Don't run it directly. + +#include "gtest/gtest.h" + +TEST(FooTest, Test1) {} + +TEST(FooTest, Test2) {} + +class FooTestFixture : public ::testing::Test {}; +TEST_F(FooTestFixture, Test3) {} +TEST_F(FooTestFixture, Test4) {} + +class ValueParamTest : public ::testing::TestWithParam<int> {}; +TEST_P(ValueParamTest, Test5) {} +TEST_P(ValueParamTest, Test6) {} +INSTANTIATE_TEST_SUITE_P(ValueParam, ValueParamTest, ::testing::Values(33, 42)); + +template <typename T> +class TypedTest : public ::testing::Test {}; +typedef testing::Types<int, bool> TypedTestTypes; +TYPED_TEST_SUITE(TypedTest, TypedTestTypes); +TYPED_TEST(TypedTest, Test7) {} +TYPED_TEST(TypedTest, Test8) {} + +template <typename T> +class TypeParameterizedTestSuite : public ::testing::Test {}; +TYPED_TEST_SUITE_P(TypeParameterizedTestSuite); +TYPED_TEST_P(TypeParameterizedTestSuite, Test9) {} +TYPED_TEST_P(TypeParameterizedTestSuite, Test10) {} +REGISTER_TYPED_TEST_SUITE_P(TypeParameterizedTestSuite, Test9, Test10); +typedef testing::Types<int, bool> TypeParameterizedTestSuiteTypes; // NOLINT +INSTANTIATE_TYPED_TEST_SUITE_P(Single, TypeParameterizedTestSuite, + TypeParameterizedTestSuiteTypes); + +int main(int argc, char **argv) { + ::testing::InitGoogleTest(&argc, argv); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_main_unittest.cc b/security/nss/gtests/google_test/gtest/test/gtest_main_unittest.cc new file mode 100644 index 0000000000..eddedeabe8 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_main_unittest.cc @@ -0,0 +1,44 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +#include "gtest/gtest.h" + +// Tests that we don't have to define main() when we link to +// gtest_main instead of gtest. + +namespace { + +TEST(GTestMainTest, ShouldSucceed) { +} + +} // namespace + +// We are using the main() function defined in gtest_main.cc, so we +// don't define it here. diff --git a/security/nss/gtests/google_test/gtest/test/gtest_no_test_unittest.cc b/security/nss/gtests/google_test/gtest/test/gtest_no_test_unittest.cc new file mode 100644 index 0000000000..d4f88dbfdf --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_no_test_unittest.cc @@ -0,0 +1,54 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Tests that a Google Test program that has no test defined can run +// successfully. + +#include "gtest/gtest.h" + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + // An ad-hoc assertion outside of all tests. + // + // This serves three purposes: + // + // 1. It verifies that an ad-hoc assertion can be executed even if + // no test is defined. + // 2. It verifies that a failed ad-hoc assertion causes the test + // program to fail. + // 3. We had a bug where the XML output won't be generated if an + // assertion is executed before RUN_ALL_TESTS() is called, even + // though --gtest_output=xml is specified. This makes sure the + // bug is fixed and doesn't regress. + EXPECT_EQ(1, 2); + + // The above EXPECT_EQ() should cause RUN_ALL_TESTS() to return non-zero. + return RUN_ALL_TESTS() ? 0 : 1; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_pred_impl_unittest.cc b/security/nss/gtests/google_test/gtest/test/gtest_pred_impl_unittest.cc new file mode 100644 index 0000000000..bbef9947d9 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_pred_impl_unittest.cc @@ -0,0 +1,2422 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This file is AUTOMATICALLY GENERATED on 11/05/2019 by command +// 'gen_gtest_pred_impl.py 5'. DO NOT EDIT BY HAND! + +// Regression test for gtest_pred_impl.h +// +// This file is generated by a script and quite long. If you intend to +// learn how Google Test works by reading its unit tests, read +// gtest_unittest.cc instead. +// +// This is intended as a regression test for the Google Test predicate +// assertions. We compile it as part of the gtest_unittest target +// only to keep the implementation tidy and compact, as it is quite +// involved to set up the stage for testing Google Test using Google +// Test itself. +// +// Currently, gtest_unittest takes ~11 seconds to run in the testing +// daemon. In the future, if it grows too large and needs much more +// time to finish, we should consider separating this file into a +// stand-alone regression test. + +#include <iostream> + +#include "gtest/gtest.h" +#include "gtest/gtest-spi.h" + +// A user-defined data type. +struct Bool { + explicit Bool(int val) : value(val != 0) {} + + bool operator>(int n) const { return value > Bool(n).value; } + + Bool operator+(const Bool& rhs) const { return Bool(value + rhs.value); } + + bool operator==(const Bool& rhs) const { return value == rhs.value; } + + bool value; +}; + +// Enables Bool to be used in assertions. +std::ostream& operator<<(std::ostream& os, const Bool& x) { + return os << (x.value ? "true" : "false"); +} + +// Sample functions/functors for testing unary predicate assertions. + +// A unary predicate function. +template <typename T1> +bool PredFunction1(T1 v1) { + return v1 > 0; +} + +// The following two functions are needed because a compiler doesn't have +// a context yet to know which template function must be instantiated. +bool PredFunction1Int(int v1) { + return v1 > 0; +} +bool PredFunction1Bool(Bool v1) { + return v1 > 0; +} + +// A unary predicate functor. +struct PredFunctor1 { + template <typename T1> + bool operator()(const T1& v1) { + return v1 > 0; + } +}; + +// A unary predicate-formatter function. +template <typename T1> +testing::AssertionResult PredFormatFunction1(const char* e1, + const T1& v1) { + if (PredFunction1(v1)) + return testing::AssertionSuccess(); + + return testing::AssertionFailure() + << e1 + << " is expected to be positive, but evaluates to " + << v1 << "."; +} + +// A unary predicate-formatter functor. +struct PredFormatFunctor1 { + template <typename T1> + testing::AssertionResult operator()(const char* e1, + const T1& v1) const { + return PredFormatFunction1(e1, v1); + } +}; + +// Tests for {EXPECT|ASSERT}_PRED_FORMAT1. + +class Predicate1Test : public testing::Test { + protected: + void SetUp() override { + expected_to_finish_ = true; + finished_ = false; + n1_ = 0; + } + + void TearDown() override { + // Verifies that each of the predicate's arguments was evaluated + // exactly once. + EXPECT_EQ(1, n1_) << + "The predicate assertion didn't evaluate argument 2 " + "exactly once."; + + // Verifies that the control flow in the test function is expected. + if (expected_to_finish_ && !finished_) { + FAIL() << "The predicate assertion unexpactedly aborted the test."; + } else if (!expected_to_finish_ && finished_) { + FAIL() << "The failed predicate assertion didn't abort the test " + "as expected."; + } + } + + // true if and only if the test function is expected to run to finish. + static bool expected_to_finish_; + + // true if and only if the test function did run to finish. + static bool finished_; + + static int n1_; +}; + +bool Predicate1Test::expected_to_finish_; +bool Predicate1Test::finished_; +int Predicate1Test::n1_; + +typedef Predicate1Test EXPECT_PRED_FORMAT1Test; +typedef Predicate1Test ASSERT_PRED_FORMAT1Test; +typedef Predicate1Test EXPECT_PRED1Test; +typedef Predicate1Test ASSERT_PRED1Test; + +// Tests a successful EXPECT_PRED1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED1(PredFunction1Int, + ++n1_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED1(PredFunction1Bool, + Bool(++n1_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED1(PredFunctor1(), + ++n1_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED1(PredFunctor1(), + Bool(++n1_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED1Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED1(PredFunction1Int, + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED1Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED1(PredFunction1Bool, + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED1Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED1(PredFunctor1(), + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED1Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED1(PredFunctor1(), + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED1(PredFunction1Int, + ++n1_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED1(PredFunction1Bool, + Bool(++n1_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED1(PredFunctor1(), + ++n1_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED1(PredFunctor1(), + Bool(++n1_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED1Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED1(PredFunction1Int, + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED1Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED1(PredFunction1Bool, + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED1Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED1(PredFunctor1(), + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED1Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED1(PredFunctor1(), + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a successful EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT1(PredFormatFunction1, + ++n1_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED_FORMAT1(PredFormatFunction1, + Bool(++n1_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT1(PredFormatFunctor1(), + ++n1_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED_FORMAT1(PredFormatFunctor1(), + Bool(++n1_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT1(PredFormatFunction1, + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT1(PredFormatFunction1, + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT1(PredFormatFunctor1(), + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT1(PredFormatFunctor1(), + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT1(PredFormatFunction1, + ++n1_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED_FORMAT1(PredFormatFunction1, + Bool(++n1_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT1(PredFormatFunctor1(), + ++n1_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED_FORMAT1(PredFormatFunctor1(), + Bool(++n1_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(PredFormatFunction1, + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(PredFormatFunction1, + Bool(n1_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(PredFormatFunctor1(), + n1_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT1 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT1Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(PredFormatFunctor1(), + Bool(n1_++)); + finished_ = true; + }, ""); +} +// Sample functions/functors for testing binary predicate assertions. + +// A binary predicate function. +template <typename T1, typename T2> +bool PredFunction2(T1 v1, T2 v2) { + return v1 + v2 > 0; +} + +// The following two functions are needed because a compiler doesn't have +// a context yet to know which template function must be instantiated. +bool PredFunction2Int(int v1, int v2) { + return v1 + v2 > 0; +} +bool PredFunction2Bool(Bool v1, Bool v2) { + return v1 + v2 > 0; +} + +// A binary predicate functor. +struct PredFunctor2 { + template <typename T1, typename T2> + bool operator()(const T1& v1, + const T2& v2) { + return v1 + v2 > 0; + } +}; + +// A binary predicate-formatter function. +template <typename T1, typename T2> +testing::AssertionResult PredFormatFunction2(const char* e1, + const char* e2, + const T1& v1, + const T2& v2) { + if (PredFunction2(v1, v2)) + return testing::AssertionSuccess(); + + return testing::AssertionFailure() + << e1 << " + " << e2 + << " is expected to be positive, but evaluates to " + << v1 + v2 << "."; +} + +// A binary predicate-formatter functor. +struct PredFormatFunctor2 { + template <typename T1, typename T2> + testing::AssertionResult operator()(const char* e1, + const char* e2, + const T1& v1, + const T2& v2) const { + return PredFormatFunction2(e1, e2, v1, v2); + } +}; + +// Tests for {EXPECT|ASSERT}_PRED_FORMAT2. + +class Predicate2Test : public testing::Test { + protected: + void SetUp() override { + expected_to_finish_ = true; + finished_ = false; + n1_ = n2_ = 0; + } + + void TearDown() override { + // Verifies that each of the predicate's arguments was evaluated + // exactly once. + EXPECT_EQ(1, n1_) << + "The predicate assertion didn't evaluate argument 2 " + "exactly once."; + EXPECT_EQ(1, n2_) << + "The predicate assertion didn't evaluate argument 3 " + "exactly once."; + + // Verifies that the control flow in the test function is expected. + if (expected_to_finish_ && !finished_) { + FAIL() << "The predicate assertion unexpactedly aborted the test."; + } else if (!expected_to_finish_ && finished_) { + FAIL() << "The failed predicate assertion didn't abort the test " + "as expected."; + } + } + + // true if and only if the test function is expected to run to finish. + static bool expected_to_finish_; + + // true if and only if the test function did run to finish. + static bool finished_; + + static int n1_; + static int n2_; +}; + +bool Predicate2Test::expected_to_finish_; +bool Predicate2Test::finished_; +int Predicate2Test::n1_; +int Predicate2Test::n2_; + +typedef Predicate2Test EXPECT_PRED_FORMAT2Test; +typedef Predicate2Test ASSERT_PRED_FORMAT2Test; +typedef Predicate2Test EXPECT_PRED2Test; +typedef Predicate2Test ASSERT_PRED2Test; + +// Tests a successful EXPECT_PRED2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED2(PredFunction2Int, + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED2(PredFunction2Bool, + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED2(PredFunctor2(), + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED2(PredFunctor2(), + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED2Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED2(PredFunction2Int, + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED2Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED2(PredFunction2Bool, + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED2Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED2(PredFunctor2(), + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED2Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED2(PredFunctor2(), + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED2(PredFunction2Int, + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED2(PredFunction2Bool, + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED2(PredFunctor2(), + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED2(PredFunctor2(), + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED2Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED2(PredFunction2Int, + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED2Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED2(PredFunction2Bool, + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED2Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED2(PredFunctor2(), + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED2Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED2(PredFunctor2(), + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a successful EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT2(PredFormatFunction2, + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED_FORMAT2(PredFormatFunction2, + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT2(PredFormatFunctor2(), + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED_FORMAT2(PredFormatFunctor2(), + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(PredFormatFunction2, + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(PredFormatFunction2, + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(PredFormatFunctor2(), + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(PredFormatFunctor2(), + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT2(PredFormatFunction2, + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED_FORMAT2(PredFormatFunction2, + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT2(PredFormatFunctor2(), + ++n1_, + ++n2_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED_FORMAT2(PredFormatFunctor2(), + Bool(++n1_), + Bool(++n2_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(PredFormatFunction2, + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(PredFormatFunction2, + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(PredFormatFunctor2(), + n1_++, + n2_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT2 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT2Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(PredFormatFunctor2(), + Bool(n1_++), + Bool(n2_++)); + finished_ = true; + }, ""); +} +// Sample functions/functors for testing ternary predicate assertions. + +// A ternary predicate function. +template <typename T1, typename T2, typename T3> +bool PredFunction3(T1 v1, T2 v2, T3 v3) { + return v1 + v2 + v3 > 0; +} + +// The following two functions are needed because a compiler doesn't have +// a context yet to know which template function must be instantiated. +bool PredFunction3Int(int v1, int v2, int v3) { + return v1 + v2 + v3 > 0; +} +bool PredFunction3Bool(Bool v1, Bool v2, Bool v3) { + return v1 + v2 + v3 > 0; +} + +// A ternary predicate functor. +struct PredFunctor3 { + template <typename T1, typename T2, typename T3> + bool operator()(const T1& v1, + const T2& v2, + const T3& v3) { + return v1 + v2 + v3 > 0; + } +}; + +// A ternary predicate-formatter function. +template <typename T1, typename T2, typename T3> +testing::AssertionResult PredFormatFunction3(const char* e1, + const char* e2, + const char* e3, + const T1& v1, + const T2& v2, + const T3& v3) { + if (PredFunction3(v1, v2, v3)) + return testing::AssertionSuccess(); + + return testing::AssertionFailure() + << e1 << " + " << e2 << " + " << e3 + << " is expected to be positive, but evaluates to " + << v1 + v2 + v3 << "."; +} + +// A ternary predicate-formatter functor. +struct PredFormatFunctor3 { + template <typename T1, typename T2, typename T3> + testing::AssertionResult operator()(const char* e1, + const char* e2, + const char* e3, + const T1& v1, + const T2& v2, + const T3& v3) const { + return PredFormatFunction3(e1, e2, e3, v1, v2, v3); + } +}; + +// Tests for {EXPECT|ASSERT}_PRED_FORMAT3. + +class Predicate3Test : public testing::Test { + protected: + void SetUp() override { + expected_to_finish_ = true; + finished_ = false; + n1_ = n2_ = n3_ = 0; + } + + void TearDown() override { + // Verifies that each of the predicate's arguments was evaluated + // exactly once. + EXPECT_EQ(1, n1_) << + "The predicate assertion didn't evaluate argument 2 " + "exactly once."; + EXPECT_EQ(1, n2_) << + "The predicate assertion didn't evaluate argument 3 " + "exactly once."; + EXPECT_EQ(1, n3_) << + "The predicate assertion didn't evaluate argument 4 " + "exactly once."; + + // Verifies that the control flow in the test function is expected. + if (expected_to_finish_ && !finished_) { + FAIL() << "The predicate assertion unexpactedly aborted the test."; + } else if (!expected_to_finish_ && finished_) { + FAIL() << "The failed predicate assertion didn't abort the test " + "as expected."; + } + } + + // true if and only if the test function is expected to run to finish. + static bool expected_to_finish_; + + // true if and only if the test function did run to finish. + static bool finished_; + + static int n1_; + static int n2_; + static int n3_; +}; + +bool Predicate3Test::expected_to_finish_; +bool Predicate3Test::finished_; +int Predicate3Test::n1_; +int Predicate3Test::n2_; +int Predicate3Test::n3_; + +typedef Predicate3Test EXPECT_PRED_FORMAT3Test; +typedef Predicate3Test ASSERT_PRED_FORMAT3Test; +typedef Predicate3Test EXPECT_PRED3Test; +typedef Predicate3Test ASSERT_PRED3Test; + +// Tests a successful EXPECT_PRED3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED3(PredFunction3Int, + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED3(PredFunction3Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED3(PredFunctor3(), + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED3(PredFunctor3(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED3Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED3(PredFunction3Int, + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED3Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED3(PredFunction3Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED3Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED3(PredFunctor3(), + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED3Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED3(PredFunctor3(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED3(PredFunction3Int, + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED3(PredFunction3Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED3(PredFunctor3(), + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED3(PredFunctor3(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED3Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED3(PredFunction3Int, + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED3Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED3(PredFunction3Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED3Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED3(PredFunctor3(), + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED3Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED3(PredFunctor3(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a successful EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT3(PredFormatFunction3, + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED_FORMAT3(PredFormatFunction3, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT3(PredFormatFunctor3(), + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED_FORMAT3(PredFormatFunctor3(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT3(PredFormatFunction3, + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT3(PredFormatFunction3, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT3(PredFormatFunctor3(), + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT3(PredFormatFunctor3(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT3(PredFormatFunction3, + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED_FORMAT3(PredFormatFunction3, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT3(PredFormatFunctor3(), + ++n1_, + ++n2_, + ++n3_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED_FORMAT3(PredFormatFunctor3(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT3(PredFormatFunction3, + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT3(PredFormatFunction3, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT3(PredFormatFunctor3(), + n1_++, + n2_++, + n3_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT3 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT3Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT3(PredFormatFunctor3(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++)); + finished_ = true; + }, ""); +} +// Sample functions/functors for testing 4-ary predicate assertions. + +// A 4-ary predicate function. +template <typename T1, typename T2, typename T3, typename T4> +bool PredFunction4(T1 v1, T2 v2, T3 v3, T4 v4) { + return v1 + v2 + v3 + v4 > 0; +} + +// The following two functions are needed because a compiler doesn't have +// a context yet to know which template function must be instantiated. +bool PredFunction4Int(int v1, int v2, int v3, int v4) { + return v1 + v2 + v3 + v4 > 0; +} +bool PredFunction4Bool(Bool v1, Bool v2, Bool v3, Bool v4) { + return v1 + v2 + v3 + v4 > 0; +} + +// A 4-ary predicate functor. +struct PredFunctor4 { + template <typename T1, typename T2, typename T3, typename T4> + bool operator()(const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4) { + return v1 + v2 + v3 + v4 > 0; + } +}; + +// A 4-ary predicate-formatter function. +template <typename T1, typename T2, typename T3, typename T4> +testing::AssertionResult PredFormatFunction4(const char* e1, + const char* e2, + const char* e3, + const char* e4, + const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4) { + if (PredFunction4(v1, v2, v3, v4)) + return testing::AssertionSuccess(); + + return testing::AssertionFailure() + << e1 << " + " << e2 << " + " << e3 << " + " << e4 + << " is expected to be positive, but evaluates to " + << v1 + v2 + v3 + v4 << "."; +} + +// A 4-ary predicate-formatter functor. +struct PredFormatFunctor4 { + template <typename T1, typename T2, typename T3, typename T4> + testing::AssertionResult operator()(const char* e1, + const char* e2, + const char* e3, + const char* e4, + const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4) const { + return PredFormatFunction4(e1, e2, e3, e4, v1, v2, v3, v4); + } +}; + +// Tests for {EXPECT|ASSERT}_PRED_FORMAT4. + +class Predicate4Test : public testing::Test { + protected: + void SetUp() override { + expected_to_finish_ = true; + finished_ = false; + n1_ = n2_ = n3_ = n4_ = 0; + } + + void TearDown() override { + // Verifies that each of the predicate's arguments was evaluated + // exactly once. + EXPECT_EQ(1, n1_) << + "The predicate assertion didn't evaluate argument 2 " + "exactly once."; + EXPECT_EQ(1, n2_) << + "The predicate assertion didn't evaluate argument 3 " + "exactly once."; + EXPECT_EQ(1, n3_) << + "The predicate assertion didn't evaluate argument 4 " + "exactly once."; + EXPECT_EQ(1, n4_) << + "The predicate assertion didn't evaluate argument 5 " + "exactly once."; + + // Verifies that the control flow in the test function is expected. + if (expected_to_finish_ && !finished_) { + FAIL() << "The predicate assertion unexpactedly aborted the test."; + } else if (!expected_to_finish_ && finished_) { + FAIL() << "The failed predicate assertion didn't abort the test " + "as expected."; + } + } + + // true if and only if the test function is expected to run to finish. + static bool expected_to_finish_; + + // true if and only if the test function did run to finish. + static bool finished_; + + static int n1_; + static int n2_; + static int n3_; + static int n4_; +}; + +bool Predicate4Test::expected_to_finish_; +bool Predicate4Test::finished_; +int Predicate4Test::n1_; +int Predicate4Test::n2_; +int Predicate4Test::n3_; +int Predicate4Test::n4_; + +typedef Predicate4Test EXPECT_PRED_FORMAT4Test; +typedef Predicate4Test ASSERT_PRED_FORMAT4Test; +typedef Predicate4Test EXPECT_PRED4Test; +typedef Predicate4Test ASSERT_PRED4Test; + +// Tests a successful EXPECT_PRED4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED4(PredFunction4Int, + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED4(PredFunction4Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED4(PredFunctor4(), + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED4(PredFunctor4(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED4Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED4(PredFunction4Int, + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED4Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED4(PredFunction4Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED4Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED4(PredFunctor4(), + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED4Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED4(PredFunctor4(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED4(PredFunction4Int, + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED4(PredFunction4Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED4(PredFunctor4(), + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED4(PredFunctor4(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED4Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED4(PredFunction4Int, + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED4Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED4(PredFunction4Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED4Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED4(PredFunctor4(), + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED4Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED4(PredFunctor4(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a successful EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT4(PredFormatFunction4, + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED_FORMAT4(PredFormatFunction4, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT4(PredFormatFunctor4(), + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED_FORMAT4(PredFormatFunctor4(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(PredFormatFunction4, + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(PredFormatFunction4, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(PredFormatFunctor4(), + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(PredFormatFunctor4(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT4(PredFormatFunction4, + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED_FORMAT4(PredFormatFunction4, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT4(PredFormatFunctor4(), + ++n1_, + ++n2_, + ++n3_, + ++n4_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED_FORMAT4(PredFormatFunctor4(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT4(PredFormatFunction4, + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT4(PredFormatFunction4, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT4(PredFormatFunctor4(), + n1_++, + n2_++, + n3_++, + n4_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT4 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT4Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT4(PredFormatFunctor4(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++)); + finished_ = true; + }, ""); +} +// Sample functions/functors for testing 5-ary predicate assertions. + +// A 5-ary predicate function. +template <typename T1, typename T2, typename T3, typename T4, typename T5> +bool PredFunction5(T1 v1, T2 v2, T3 v3, T4 v4, T5 v5) { + return v1 + v2 + v3 + v4 + v5 > 0; +} + +// The following two functions are needed because a compiler doesn't have +// a context yet to know which template function must be instantiated. +bool PredFunction5Int(int v1, int v2, int v3, int v4, int v5) { + return v1 + v2 + v3 + v4 + v5 > 0; +} +bool PredFunction5Bool(Bool v1, Bool v2, Bool v3, Bool v4, Bool v5) { + return v1 + v2 + v3 + v4 + v5 > 0; +} + +// A 5-ary predicate functor. +struct PredFunctor5 { + template <typename T1, typename T2, typename T3, typename T4, typename T5> + bool operator()(const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4, + const T5& v5) { + return v1 + v2 + v3 + v4 + v5 > 0; + } +}; + +// A 5-ary predicate-formatter function. +template <typename T1, typename T2, typename T3, typename T4, typename T5> +testing::AssertionResult PredFormatFunction5(const char* e1, + const char* e2, + const char* e3, + const char* e4, + const char* e5, + const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4, + const T5& v5) { + if (PredFunction5(v1, v2, v3, v4, v5)) + return testing::AssertionSuccess(); + + return testing::AssertionFailure() + << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5 + << " is expected to be positive, but evaluates to " + << v1 + v2 + v3 + v4 + v5 << "."; +} + +// A 5-ary predicate-formatter functor. +struct PredFormatFunctor5 { + template <typename T1, typename T2, typename T3, typename T4, typename T5> + testing::AssertionResult operator()(const char* e1, + const char* e2, + const char* e3, + const char* e4, + const char* e5, + const T1& v1, + const T2& v2, + const T3& v3, + const T4& v4, + const T5& v5) const { + return PredFormatFunction5(e1, e2, e3, e4, e5, v1, v2, v3, v4, v5); + } +}; + +// Tests for {EXPECT|ASSERT}_PRED_FORMAT5. + +class Predicate5Test : public testing::Test { + protected: + void SetUp() override { + expected_to_finish_ = true; + finished_ = false; + n1_ = n2_ = n3_ = n4_ = n5_ = 0; + } + + void TearDown() override { + // Verifies that each of the predicate's arguments was evaluated + // exactly once. + EXPECT_EQ(1, n1_) << + "The predicate assertion didn't evaluate argument 2 " + "exactly once."; + EXPECT_EQ(1, n2_) << + "The predicate assertion didn't evaluate argument 3 " + "exactly once."; + EXPECT_EQ(1, n3_) << + "The predicate assertion didn't evaluate argument 4 " + "exactly once."; + EXPECT_EQ(1, n4_) << + "The predicate assertion didn't evaluate argument 5 " + "exactly once."; + EXPECT_EQ(1, n5_) << + "The predicate assertion didn't evaluate argument 6 " + "exactly once."; + + // Verifies that the control flow in the test function is expected. + if (expected_to_finish_ && !finished_) { + FAIL() << "The predicate assertion unexpactedly aborted the test."; + } else if (!expected_to_finish_ && finished_) { + FAIL() << "The failed predicate assertion didn't abort the test " + "as expected."; + } + } + + // true if and only if the test function is expected to run to finish. + static bool expected_to_finish_; + + // true if and only if the test function did run to finish. + static bool finished_; + + static int n1_; + static int n2_; + static int n3_; + static int n4_; + static int n5_; +}; + +bool Predicate5Test::expected_to_finish_; +bool Predicate5Test::finished_; +int Predicate5Test::n1_; +int Predicate5Test::n2_; +int Predicate5Test::n3_; +int Predicate5Test::n4_; +int Predicate5Test::n5_; + +typedef Predicate5Test EXPECT_PRED_FORMAT5Test; +typedef Predicate5Test ASSERT_PRED_FORMAT5Test; +typedef Predicate5Test EXPECT_PRED5Test; +typedef Predicate5Test ASSERT_PRED5Test; + +// Tests a successful EXPECT_PRED5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED5(PredFunction5Int, + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED5(PredFunction5Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED5(PredFunctor5(), + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED5(PredFunctor5(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED5Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED5(PredFunction5Int, + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED5Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED5(PredFunction5Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED5Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED5(PredFunctor5(), + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED5Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED5(PredFunctor5(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED5(PredFunction5Int, + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED5(PredFunction5Bool, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED5(PredFunctor5(), + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED5(PredFunctor5(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED5Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED5(PredFunction5Int, + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED5Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED5(PredFunction5Bool, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED5Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED5(PredFunctor5(), + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED5Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED5(PredFunctor5(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a successful EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT5(PredFormatFunction5, + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) { + EXPECT_PRED_FORMAT5(PredFormatFunction5, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) { + EXPECT_PRED_FORMAT5(PredFormatFunctor5(), + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) { + EXPECT_PRED_FORMAT5(PredFormatFunctor5(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a failed EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT5(PredFormatFunction5, + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT5(PredFormatFunction5, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT5(PredFormatFunctor5(), + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed EXPECT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(EXPECT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT5(PredFormatFunctor5(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a successful ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT5(PredFormatFunction5, + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeSuccess) { + ASSERT_PRED_FORMAT5(PredFormatFunction5, + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeSuccess) { + ASSERT_PRED_FORMAT5(PredFormatFunctor5(), + ++n1_, + ++n2_, + ++n3_, + ++n4_, + ++n5_); + finished_ = true; +} + +// Tests a successful ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeSuccess) { + ASSERT_PRED_FORMAT5(PredFormatFunctor5(), + Bool(++n1_), + Bool(++n2_), + Bool(++n3_), + Bool(++n4_), + Bool(++n5_)); + finished_ = true; +} + +// Tests a failed ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a function on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT5(PredFormatFunction5, + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a function on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctionOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT5(PredFormatFunction5, + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a built-in type (int). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnBuiltInTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT5(PredFormatFunctor5(), + n1_++, + n2_++, + n3_++, + n4_++, + n5_++); + finished_ = true; + }, ""); +} + +// Tests a failed ASSERT_PRED_FORMAT5 where the +// predicate-formatter is a functor on a user-defined type (Bool). +TEST_F(ASSERT_PRED_FORMAT5Test, FunctorOnUserTypeFailure) { + expected_to_finish_ = false; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT5(PredFormatFunctor5(), + Bool(n1_++), + Bool(n2_++), + Bool(n3_++), + Bool(n4_++), + Bool(n5_++)); + finished_ = true; + }, ""); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_premature_exit_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_premature_exit_test.cc new file mode 100644 index 0000000000..1d1187eff0 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_premature_exit_test.cc @@ -0,0 +1,126 @@ +// Copyright 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests that Google Test manipulates the premature-exit-detection +// file correctly. + +#include <stdio.h> + +#include "gtest/gtest.h" + +using ::testing::InitGoogleTest; +using ::testing::Test; +using ::testing::internal::posix::GetEnv; +using ::testing::internal::posix::Stat; +using ::testing::internal::posix::StatStruct; + +namespace { + +class PrematureExitTest : public Test { + public: + // Returns true if and only if the given file exists. + static bool FileExists(const char* filepath) { + StatStruct stat; + return Stat(filepath, &stat) == 0; + } + + protected: + PrematureExitTest() { + premature_exit_file_path_ = GetEnv("TEST_PREMATURE_EXIT_FILE"); + + // Normalize NULL to "" for ease of handling. + if (premature_exit_file_path_ == nullptr) { + premature_exit_file_path_ = ""; + } + } + + // Returns true if and only if the premature-exit file exists. + bool PrematureExitFileExists() const { + return FileExists(premature_exit_file_path_); + } + + const char* premature_exit_file_path_; +}; + +typedef PrematureExitTest PrematureExitDeathTest; + +// Tests that: +// - the premature-exit file exists during the execution of a +// death test (EXPECT_DEATH*), and +// - a death test doesn't interfere with the main test process's +// handling of the premature-exit file. +TEST_F(PrematureExitDeathTest, FileExistsDuringExecutionOfDeathTest) { + if (*premature_exit_file_path_ == '\0') { + return; + } + + EXPECT_DEATH_IF_SUPPORTED({ + // If the file exists, crash the process such that the main test + // process will catch the (expected) crash and report a success; + // otherwise don't crash, which will cause the main test process + // to report that the death test has failed. + if (PrematureExitFileExists()) { + exit(1); + } + }, ""); +} + +// Tests that the premature-exit file exists during the execution of a +// normal (non-death) test. +TEST_F(PrematureExitTest, PrematureExitFileExistsDuringTestExecution) { + if (*premature_exit_file_path_ == '\0') { + return; + } + + EXPECT_TRUE(PrematureExitFileExists()) + << " file " << premature_exit_file_path_ + << " should exist during test execution, but doesn't."; +} + +} // namespace + +int main(int argc, char **argv) { + InitGoogleTest(&argc, argv); + const int exit_code = RUN_ALL_TESTS(); + + // Test that the premature-exit file is deleted upon return from + // RUN_ALL_TESTS(). + const char* const filepath = GetEnv("TEST_PREMATURE_EXIT_FILE"); + if (filepath != nullptr && *filepath != '\0') { + if (PrematureExitTest::FileExists(filepath)) { + printf( + "File %s shouldn't exist after the test program finishes, but does.", + filepath); + return 1; + } + } + + return exit_code; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_prod_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_prod_test.cc new file mode 100644 index 0000000000..ede81a0d17 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_prod_test.cc @@ -0,0 +1,56 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Unit test for gtest_prod.h. + +#include "production.h" +#include "gtest/gtest.h" + +// Tests that private members can be accessed from a TEST declared as +// a friend of the class. +TEST(PrivateCodeTest, CanAccessPrivateMembers) { + PrivateCode a; + EXPECT_EQ(0, a.x_); + + a.set_x(1); + EXPECT_EQ(1, a.x_); +} + +typedef testing::Test PrivateCodeFixtureTest; + +// Tests that private members can be accessed from a TEST_F declared +// as a friend of the class. +TEST_F(PrivateCodeFixtureTest, CanAccessPrivateMembers) { + PrivateCode a; + EXPECT_EQ(0, a.x_); + + a.set_x(2); + EXPECT_EQ(2, a.x_); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_repeat_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_repeat_test.cc new file mode 100644 index 0000000000..7da4a15ea1 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_repeat_test.cc @@ -0,0 +1,233 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tests the --gtest_repeat=number flag. + +#include <stdlib.h> +#include <iostream> +#include "gtest/gtest.h" +#include "src/gtest-internal-inl.h" + +namespace testing { + +GTEST_DECLARE_string_(death_test_style); +GTEST_DECLARE_string_(filter); +GTEST_DECLARE_int32_(repeat); + +} // namespace testing + +using testing::GTEST_FLAG(death_test_style); +using testing::GTEST_FLAG(filter); +using testing::GTEST_FLAG(repeat); + +namespace { + +// We need this when we are testing Google Test itself and therefore +// cannot use Google Test assertions. +#define GTEST_CHECK_INT_EQ_(expected, actual) \ + do {\ + const int expected_val = (expected);\ + const int actual_val = (actual);\ + if (::testing::internal::IsTrue(expected_val != actual_val)) {\ + ::std::cout << "Value of: " #actual "\n"\ + << " Actual: " << actual_val << "\n"\ + << "Expected: " #expected "\n"\ + << "Which is: " << expected_val << "\n";\ + ::testing::internal::posix::Abort();\ + }\ + } while (::testing::internal::AlwaysFalse()) + + +// Used for verifying that global environment set-up and tear-down are +// inside the --gtest_repeat loop. + +int g_environment_set_up_count = 0; +int g_environment_tear_down_count = 0; + +class MyEnvironment : public testing::Environment { + public: + MyEnvironment() {} + void SetUp() override { g_environment_set_up_count++; } + void TearDown() override { g_environment_tear_down_count++; } +}; + +// A test that should fail. + +int g_should_fail_count = 0; + +TEST(FooTest, ShouldFail) { + g_should_fail_count++; + EXPECT_EQ(0, 1) << "Expected failure."; +} + +// A test that should pass. + +int g_should_pass_count = 0; + +TEST(FooTest, ShouldPass) { + g_should_pass_count++; +} + +// A test that contains a thread-safe death test and a fast death +// test. It should pass. + +int g_death_test_count = 0; + +TEST(BarDeathTest, ThreadSafeAndFast) { + g_death_test_count++; + + GTEST_FLAG(death_test_style) = "threadsafe"; + EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), ""); + + GTEST_FLAG(death_test_style) = "fast"; + EXPECT_DEATH_IF_SUPPORTED(::testing::internal::posix::Abort(), ""); +} + +int g_param_test_count = 0; + +const int kNumberOfParamTests = 10; + +class MyParamTest : public testing::TestWithParam<int> {}; + +TEST_P(MyParamTest, ShouldPass) { + GTEST_CHECK_INT_EQ_(g_param_test_count % kNumberOfParamTests, GetParam()); + g_param_test_count++; +} +INSTANTIATE_TEST_SUITE_P(MyParamSequence, + MyParamTest, + testing::Range(0, kNumberOfParamTests)); + +// Resets the count for each test. +void ResetCounts() { + g_environment_set_up_count = 0; + g_environment_tear_down_count = 0; + g_should_fail_count = 0; + g_should_pass_count = 0; + g_death_test_count = 0; + g_param_test_count = 0; +} + +// Checks that the count for each test is expected. +void CheckCounts(int expected) { + GTEST_CHECK_INT_EQ_(expected, g_environment_set_up_count); + GTEST_CHECK_INT_EQ_(expected, g_environment_tear_down_count); + GTEST_CHECK_INT_EQ_(expected, g_should_fail_count); + GTEST_CHECK_INT_EQ_(expected, g_should_pass_count); + GTEST_CHECK_INT_EQ_(expected, g_death_test_count); + GTEST_CHECK_INT_EQ_(expected * kNumberOfParamTests, g_param_test_count); +} + +// Tests the behavior of Google Test when --gtest_repeat is not specified. +void TestRepeatUnspecified() { + ResetCounts(); + GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS()); + CheckCounts(1); +} + +// Tests the behavior of Google Test when --gtest_repeat has the given value. +void TestRepeat(int repeat) { + GTEST_FLAG(repeat) = repeat; + + ResetCounts(); + GTEST_CHECK_INT_EQ_(repeat > 0 ? 1 : 0, RUN_ALL_TESTS()); + CheckCounts(repeat); +} + +// Tests using --gtest_repeat when --gtest_filter specifies an empty +// set of tests. +void TestRepeatWithEmptyFilter(int repeat) { + GTEST_FLAG(repeat) = repeat; + GTEST_FLAG(filter) = "None"; + + ResetCounts(); + GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS()); + CheckCounts(0); +} + +// Tests using --gtest_repeat when --gtest_filter specifies a set of +// successful tests. +void TestRepeatWithFilterForSuccessfulTests(int repeat) { + GTEST_FLAG(repeat) = repeat; + GTEST_FLAG(filter) = "*-*ShouldFail"; + + ResetCounts(); + GTEST_CHECK_INT_EQ_(0, RUN_ALL_TESTS()); + GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count); + GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count); + GTEST_CHECK_INT_EQ_(0, g_should_fail_count); + GTEST_CHECK_INT_EQ_(repeat, g_should_pass_count); + GTEST_CHECK_INT_EQ_(repeat, g_death_test_count); + GTEST_CHECK_INT_EQ_(repeat * kNumberOfParamTests, g_param_test_count); +} + +// Tests using --gtest_repeat when --gtest_filter specifies a set of +// failed tests. +void TestRepeatWithFilterForFailedTests(int repeat) { + GTEST_FLAG(repeat) = repeat; + GTEST_FLAG(filter) = "*ShouldFail"; + + ResetCounts(); + GTEST_CHECK_INT_EQ_(1, RUN_ALL_TESTS()); + GTEST_CHECK_INT_EQ_(repeat, g_environment_set_up_count); + GTEST_CHECK_INT_EQ_(repeat, g_environment_tear_down_count); + GTEST_CHECK_INT_EQ_(repeat, g_should_fail_count); + GTEST_CHECK_INT_EQ_(0, g_should_pass_count); + GTEST_CHECK_INT_EQ_(0, g_death_test_count); + GTEST_CHECK_INT_EQ_(0, g_param_test_count); +} + +} // namespace + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + testing::AddGlobalTestEnvironment(new MyEnvironment); + + TestRepeatUnspecified(); + TestRepeat(0); + TestRepeat(1); + TestRepeat(5); + + TestRepeatWithEmptyFilter(2); + TestRepeatWithEmptyFilter(3); + + TestRepeatWithFilterForSuccessfulTests(3); + + TestRepeatWithFilterForFailedTests(4); + + // It would be nice to verify that the tests indeed loop forever + // when GTEST_FLAG(repeat) is negative, but this test will be quite + // complicated to write. Since this flag is for interactive + // debugging only and doesn't affect the normal test result, such a + // test would be an overkill. + + printf("PASS\n"); + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_skip_check_output_test.py b/security/nss/gtests/google_test/gtest/test/gtest_skip_check_output_test.py new file mode 100755 index 0000000000..14e63ab897 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_skip_check_output_test.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# +# Copyright 2019 Google LLC. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +"""Tests Google Test's gtest skip in environment setup behavior. + +This script invokes gtest_skip_in_environment_setup_test_ and verifies its +output. +""" + +import re + +import gtest_test_utils + +# Path to the gtest_skip_in_environment_setup_test binary +EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_skip_test') + +OUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output + + +# Test. +class SkipEntireEnvironmentTest(gtest_test_utils.TestCase): + + def testSkipEntireEnvironmentTest(self): + self.assertIn('Skipped\nskipping single test\n', OUTPUT) + skip_fixture = 'Skipped\nskipping all tests for this fixture\n' + self.assertIsNotNone( + re.search(skip_fixture + '.*' + skip_fixture, OUTPUT, flags=re.DOTALL), + repr(OUTPUT)) + self.assertNotIn('FAILED', OUTPUT) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_skip_environment_check_output_test.py b/security/nss/gtests/google_test/gtest/test/gtest_skip_environment_check_output_test.py new file mode 100755 index 0000000000..6e791556aa --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_skip_environment_check_output_test.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# +# Copyright 2019 Google LLC. All Rights Reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +"""Tests Google Test's gtest skip in environment setup behavior. + +This script invokes gtest_skip_in_environment_setup_test_ and verifies its +output. +""" + +import gtest_test_utils + +# Path to the gtest_skip_in_environment_setup_test binary +EXE_PATH = gtest_test_utils.GetTestExecutablePath( + 'gtest_skip_in_environment_setup_test') + +OUTPUT = gtest_test_utils.Subprocess([EXE_PATH]).output + + +# Test. +class SkipEntireEnvironmentTest(gtest_test_utils.TestCase): + + def testSkipEntireEnvironmentTest(self): + self.assertIn('Skipping the entire environment', OUTPUT) + self.assertNotIn('FAILED', OUTPUT) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_skip_in_environment_setup_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_skip_in_environment_setup_test.cc new file mode 100644 index 0000000000..9372310638 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_skip_in_environment_setup_test.cc @@ -0,0 +1,49 @@ +// Copyright 2019, Google LLC. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google LLC. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// This test verifies that skipping in the environment results in the +// testcases being skipped. + +#include <iostream> +#include "gtest/gtest.h" + +class SetupEnvironment : public testing::Environment { + public: + void SetUp() override { GTEST_SKIP() << "Skipping the entire environment"; } +}; + +TEST(Test, AlwaysFails) { EXPECT_EQ(true, false); } + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + testing::AddGlobalTestEnvironment(new SetupEnvironment()); + + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_skip_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_skip_test.cc new file mode 100644 index 0000000000..4a23004cca --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_skip_test.cc @@ -0,0 +1,55 @@ +// Copyright 2008 Google Inc. +// All Rights Reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// Author: arseny.aprelev@gmail.com (Arseny Aprelev) +// + +#include "gtest/gtest.h" + +using ::testing::Test; + +TEST(SkipTest, DoesSkip) { + GTEST_SKIP() << "skipping single test"; + EXPECT_EQ(0, 1); +} + +class Fixture : public Test { + protected: + void SetUp() override { + GTEST_SKIP() << "skipping all tests for this fixture"; + } +}; + +TEST_F(Fixture, SkipsOneTest) { + EXPECT_EQ(5, 7); +} + +TEST_F(Fixture, SkipsAnotherTest) { + EXPECT_EQ(99, 100); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_sole_header_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_sole_header_test.cc new file mode 100644 index 0000000000..1d94ac6b3a --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_sole_header_test.cc @@ -0,0 +1,56 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// This test verifies that it's possible to use Google Test by including +// the gtest.h header file alone. + +#include "gtest/gtest.h" + +namespace { + +void Subroutine() { + EXPECT_EQ(42, 42); +} + +TEST(NoFatalFailureTest, ExpectNoFatalFailure) { + EXPECT_NO_FATAL_FAILURE(;); + EXPECT_NO_FATAL_FAILURE(SUCCEED()); + EXPECT_NO_FATAL_FAILURE(Subroutine()); + EXPECT_NO_FATAL_FAILURE({ SUCCEED(); }); +} + +TEST(NoFatalFailureTest, AssertNoFatalFailure) { + ASSERT_NO_FATAL_FAILURE(;); + ASSERT_NO_FATAL_FAILURE(SUCCEED()); + ASSERT_NO_FATAL_FAILURE(Subroutine()); + ASSERT_NO_FATAL_FAILURE({ SUCCEED(); }); +} + +} // namespace diff --git a/security/nss/gtests/google_test/gtest/test/gtest_stress_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_stress_test.cc new file mode 100644 index 0000000000..843481910f --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_stress_test.cc @@ -0,0 +1,248 @@ +// Copyright 2007, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tests that SCOPED_TRACE() and various Google Test assertions can be +// used in a large number of threads concurrently. + +#include "gtest/gtest.h" + +#include <vector> + +#include "src/gtest-internal-inl.h" + +#if GTEST_IS_THREADSAFE + +namespace testing { +namespace { + +using internal::Notification; +using internal::TestPropertyKeyIs; +using internal::ThreadWithParam; + +// In order to run tests in this file, for platforms where Google Test is +// thread safe, implement ThreadWithParam. See the description of its API +// in gtest-port.h, where it is defined for already supported platforms. + +// How many threads to create? +const int kThreadCount = 50; + +std::string IdToKey(int id, const char* suffix) { + Message key; + key << "key_" << id << "_" << suffix; + return key.GetString(); +} + +std::string IdToString(int id) { + Message id_message; + id_message << id; + return id_message.GetString(); +} + +void ExpectKeyAndValueWereRecordedForId( + const std::vector<TestProperty>& properties, + int id, const char* suffix) { + TestPropertyKeyIs matches_key(IdToKey(id, suffix).c_str()); + const std::vector<TestProperty>::const_iterator property = + std::find_if(properties.begin(), properties.end(), matches_key); + ASSERT_TRUE(property != properties.end()) + << "expecting " << suffix << " value for id " << id; + EXPECT_STREQ(IdToString(id).c_str(), property->value()); +} + +// Calls a large number of Google Test assertions, where exactly one of them +// will fail. +void ManyAsserts(int id) { + GTEST_LOG_(INFO) << "Thread #" << id << " running..."; + + SCOPED_TRACE(Message() << "Thread #" << id); + + for (int i = 0; i < kThreadCount; i++) { + SCOPED_TRACE(Message() << "Iteration #" << i); + + // A bunch of assertions that should succeed. + EXPECT_TRUE(true); + ASSERT_FALSE(false) << "This shouldn't fail."; + EXPECT_STREQ("a", "a"); + ASSERT_LE(5, 6); + EXPECT_EQ(i, i) << "This shouldn't fail."; + + // RecordProperty() should interact safely with other threads as well. + // The shared_key forces property updates. + Test::RecordProperty(IdToKey(id, "string").c_str(), IdToString(id).c_str()); + Test::RecordProperty(IdToKey(id, "int").c_str(), id); + Test::RecordProperty("shared_key", IdToString(id).c_str()); + + // This assertion should fail kThreadCount times per thread. It + // is for testing whether Google Test can handle failed assertions in a + // multi-threaded context. + EXPECT_LT(i, 0) << "This should always fail."; + } +} + +void CheckTestFailureCount(int expected_failures) { + const TestInfo* const info = UnitTest::GetInstance()->current_test_info(); + const TestResult* const result = info->result(); + GTEST_CHECK_(expected_failures == result->total_part_count()) + << "Logged " << result->total_part_count() << " failures " + << " vs. " << expected_failures << " expected"; +} + +// Tests using SCOPED_TRACE() and Google Test assertions in many threads +// concurrently. +TEST(StressTest, CanUseScopedTraceAndAssertionsInManyThreads) { + { + std::unique_ptr<ThreadWithParam<int> > threads[kThreadCount]; + Notification threads_can_start; + for (int i = 0; i != kThreadCount; i++) + threads[i].reset(new ThreadWithParam<int>(&ManyAsserts, + i, + &threads_can_start)); + + threads_can_start.Notify(); + + // Blocks until all the threads are done. + for (int i = 0; i != kThreadCount; i++) + threads[i]->Join(); + } + + // Ensures that kThreadCount*kThreadCount failures have been reported. + const TestInfo* const info = UnitTest::GetInstance()->current_test_info(); + const TestResult* const result = info->result(); + + std::vector<TestProperty> properties; + // We have no access to the TestResult's list of properties but we can + // copy them one by one. + for (int i = 0; i < result->test_property_count(); ++i) + properties.push_back(result->GetTestProperty(i)); + + EXPECT_EQ(kThreadCount * 2 + 1, result->test_property_count()) + << "String and int values recorded on each thread, " + << "as well as one shared_key"; + for (int i = 0; i < kThreadCount; ++i) { + ExpectKeyAndValueWereRecordedForId(properties, i, "string"); + ExpectKeyAndValueWereRecordedForId(properties, i, "int"); + } + CheckTestFailureCount(kThreadCount*kThreadCount); +} + +void FailingThread(bool is_fatal) { + if (is_fatal) + FAIL() << "Fatal failure in some other thread. " + << "(This failure is expected.)"; + else + ADD_FAILURE() << "Non-fatal failure in some other thread. " + << "(This failure is expected.)"; +} + +void GenerateFatalFailureInAnotherThread(bool is_fatal) { + ThreadWithParam<bool> thread(&FailingThread, is_fatal, nullptr); + thread.Join(); +} + +TEST(NoFatalFailureTest, ExpectNoFatalFailureIgnoresFailuresInOtherThreads) { + EXPECT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true)); + // We should only have one failure (the one from + // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE + // should succeed. + CheckTestFailureCount(1); +} + +void AssertNoFatalFailureIgnoresFailuresInOtherThreads() { + ASSERT_NO_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true)); +} +TEST(NoFatalFailureTest, AssertNoFatalFailureIgnoresFailuresInOtherThreads) { + // Using a subroutine, to make sure, that the test continues. + AssertNoFatalFailureIgnoresFailuresInOtherThreads(); + // We should only have one failure (the one from + // GenerateFatalFailureInAnotherThread()), since the EXPECT_NO_FATAL_FAILURE + // should succeed. + CheckTestFailureCount(1); +} + +TEST(FatalFailureTest, ExpectFatalFailureIgnoresFailuresInOtherThreads) { + // This statement should fail, since the current thread doesn't generate a + // fatal failure, only another one does. + EXPECT_FATAL_FAILURE(GenerateFatalFailureInAnotherThread(true), "expected"); + CheckTestFailureCount(2); +} + +TEST(FatalFailureOnAllThreadsTest, ExpectFatalFailureOnAllThreads) { + // This statement should succeed, because failures in all threads are + // considered. + EXPECT_FATAL_FAILURE_ON_ALL_THREADS( + GenerateFatalFailureInAnotherThread(true), "expected"); + CheckTestFailureCount(0); + // We need to add a failure, because main() checks that there are failures. + // But when only this test is run, we shouldn't have any failures. + ADD_FAILURE() << "This is an expected non-fatal failure."; +} + +TEST(NonFatalFailureTest, ExpectNonFatalFailureIgnoresFailuresInOtherThreads) { + // This statement should fail, since the current thread doesn't generate a + // fatal failure, only another one does. + EXPECT_NONFATAL_FAILURE(GenerateFatalFailureInAnotherThread(false), + "expected"); + CheckTestFailureCount(2); +} + +TEST(NonFatalFailureOnAllThreadsTest, ExpectNonFatalFailureOnAllThreads) { + // This statement should succeed, because failures in all threads are + // considered. + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS( + GenerateFatalFailureInAnotherThread(false), "expected"); + CheckTestFailureCount(0); + // We need to add a failure, because main() checks that there are failures, + // But when only this test is run, we shouldn't have any failures. + ADD_FAILURE() << "This is an expected non-fatal failure."; +} + +} // namespace +} // namespace testing + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + const int result = RUN_ALL_TESTS(); // Expected to fail. + GTEST_CHECK_(result == 1) << "RUN_ALL_TESTS() did not fail as expected"; + + printf("\nPASS\n"); + return 0; +} + +#else +TEST(StressTest, + DISABLED_ThreadSafetyTestsAreSkippedWhenGoogleTestIsNotThreadSafe) { +} + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} +#endif // GTEST_IS_THREADSAFE diff --git a/security/nss/gtests/google_test/gtest/test/gtest_test_macro_stack_footprint_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_test_macro_stack_footprint_test.cc new file mode 100644 index 0000000000..a48db05012 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_test_macro_stack_footprint_test.cc @@ -0,0 +1,89 @@ +// Copyright 2013, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Each TEST() expands to some static registration logic. GCC puts all +// such static initialization logic for a translation unit in a common, +// internal function. Since Google's build system restricts how much +// stack space a function can use, there's a limit on how many TEST()s +// one can put in a single C++ test file. This test ensures that a large +// number of TEST()s can be defined in the same translation unit. + +#include "gtest/gtest.h" + +// This macro defines 10 dummy tests. +#define TEN_TESTS_(test_case_name) \ + TEST(test_case_name, T0) {} \ + TEST(test_case_name, T1) {} \ + TEST(test_case_name, T2) {} \ + TEST(test_case_name, T3) {} \ + TEST(test_case_name, T4) {} \ + TEST(test_case_name, T5) {} \ + TEST(test_case_name, T6) {} \ + TEST(test_case_name, T7) {} \ + TEST(test_case_name, T8) {} \ + TEST(test_case_name, T9) {} + +// This macro defines 100 dummy tests. +#define HUNDRED_TESTS_(test_case_name_prefix) \ + TEN_TESTS_(test_case_name_prefix ## 0) \ + TEN_TESTS_(test_case_name_prefix ## 1) \ + TEN_TESTS_(test_case_name_prefix ## 2) \ + TEN_TESTS_(test_case_name_prefix ## 3) \ + TEN_TESTS_(test_case_name_prefix ## 4) \ + TEN_TESTS_(test_case_name_prefix ## 5) \ + TEN_TESTS_(test_case_name_prefix ## 6) \ + TEN_TESTS_(test_case_name_prefix ## 7) \ + TEN_TESTS_(test_case_name_prefix ## 8) \ + TEN_TESTS_(test_case_name_prefix ## 9) + +// This macro defines 1000 dummy tests. +#define THOUSAND_TESTS_(test_case_name_prefix) \ + HUNDRED_TESTS_(test_case_name_prefix ## 0) \ + HUNDRED_TESTS_(test_case_name_prefix ## 1) \ + HUNDRED_TESTS_(test_case_name_prefix ## 2) \ + HUNDRED_TESTS_(test_case_name_prefix ## 3) \ + HUNDRED_TESTS_(test_case_name_prefix ## 4) \ + HUNDRED_TESTS_(test_case_name_prefix ## 5) \ + HUNDRED_TESTS_(test_case_name_prefix ## 6) \ + HUNDRED_TESTS_(test_case_name_prefix ## 7) \ + HUNDRED_TESTS_(test_case_name_prefix ## 8) \ + HUNDRED_TESTS_(test_case_name_prefix ## 9) + +// Ensures that we can define 1000 TEST()s in the same translation +// unit. +THOUSAND_TESTS_(T) + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + + // We don't actually need to run the dummy tests - the purpose is to + // ensure that they compile. + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_test_utils.py b/security/nss/gtests/google_test/gtest/test/gtest_test_utils.py new file mode 100755 index 0000000000..d0c24466a4 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_test_utils.py @@ -0,0 +1,312 @@ +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test utilities for Google C++ Testing and Mocking Framework.""" +# Suppresses the 'Import not at the top of the file' lint complaint. +# pylint: disable-msg=C6204 + +import os +import sys + +IS_WINDOWS = os.name == 'nt' +IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0] +IS_OS2 = os.name == 'os2' + +import atexit +import shutil +import tempfile +import unittest as _test_module + +try: + import subprocess + _SUBPROCESS_MODULE_AVAILABLE = True +except: + import popen2 + _SUBPROCESS_MODULE_AVAILABLE = False +# pylint: enable-msg=C6204 + +GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT' + +# The environment variable for specifying the path to the premature-exit file. +PREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE' + +environ = os.environ.copy() + + +def SetEnvVar(env_var, value): + """Sets/unsets an environment variable to a given value.""" + + if value is not None: + environ[env_var] = value + elif env_var in environ: + del environ[env_var] + + +# Here we expose a class from a particular module, depending on the +# environment. The comment suppresses the 'Invalid variable name' lint +# complaint. +TestCase = _test_module.TestCase # pylint: disable=C6409 + +# Initially maps a flag to its default value. After +# _ParseAndStripGTestFlags() is called, maps a flag to its actual value. +_flag_map = {'source_dir': os.path.dirname(sys.argv[0]), + 'build_dir': os.path.dirname(sys.argv[0])} +_gtest_flags_are_parsed = False + + +def _ParseAndStripGTestFlags(argv): + """Parses and strips Google Test flags from argv. This is idempotent.""" + + # Suppresses the lint complaint about a global variable since we need it + # here to maintain module-wide state. + global _gtest_flags_are_parsed # pylint: disable=W0603 + if _gtest_flags_are_parsed: + return + + _gtest_flags_are_parsed = True + for flag in _flag_map: + # The environment variable overrides the default value. + if flag.upper() in os.environ: + _flag_map[flag] = os.environ[flag.upper()] + + # The command line flag overrides the environment variable. + i = 1 # Skips the program name. + while i < len(argv): + prefix = '--' + flag + '=' + if argv[i].startswith(prefix): + _flag_map[flag] = argv[i][len(prefix):] + del argv[i] + break + else: + # We don't increment i in case we just found a --gtest_* flag + # and removed it from argv. + i += 1 + + +def GetFlag(flag): + """Returns the value of the given flag.""" + + # In case GetFlag() is called before Main(), we always call + # _ParseAndStripGTestFlags() here to make sure the --gtest_* flags + # are parsed. + _ParseAndStripGTestFlags(sys.argv) + + return _flag_map[flag] + + +def GetSourceDir(): + """Returns the absolute path of the directory where the .py files are.""" + + return os.path.abspath(GetFlag('source_dir')) + + +def GetBuildDir(): + """Returns the absolute path of the directory where the test binaries are.""" + + return os.path.abspath(GetFlag('build_dir')) + + +_temp_dir = None + +def _RemoveTempDir(): + if _temp_dir: + shutil.rmtree(_temp_dir, ignore_errors=True) + +atexit.register(_RemoveTempDir) + + +def GetTempDir(): + global _temp_dir + if not _temp_dir: + _temp_dir = tempfile.mkdtemp() + return _temp_dir + + +def GetTestExecutablePath(executable_name, build_dir=None): + """Returns the absolute path of the test binary given its name. + + The function will print a message and abort the program if the resulting file + doesn't exist. + + Args: + executable_name: name of the test binary that the test script runs. + build_dir: directory where to look for executables, by default + the result of GetBuildDir(). + + Returns: + The absolute path of the test binary. + """ + + path = os.path.abspath(os.path.join(build_dir or GetBuildDir(), + executable_name)) + if (IS_WINDOWS or IS_CYGWIN or IS_OS2) and not path.endswith('.exe'): + path += '.exe' + + if not os.path.exists(path): + message = ( + 'Unable to find the test binary "%s". Please make sure to provide\n' + 'a path to the binary via the --build_dir flag or the BUILD_DIR\n' + 'environment variable.' % path) + print >> sys.stderr, message + sys.exit(1) + + return path + + +def GetExitStatus(exit_code): + """Returns the argument to exit(), or -1 if exit() wasn't called. + + Args: + exit_code: the result value of os.system(command). + """ + + if os.name == 'nt': + # On Windows, os.WEXITSTATUS() doesn't work and os.system() returns + # the argument to exit() directly. + return exit_code + else: + # On Unix, os.WEXITSTATUS() must be used to extract the exit status + # from the result of os.system(). + if os.WIFEXITED(exit_code): + return os.WEXITSTATUS(exit_code) + else: + return -1 + + +class Subprocess: + def __init__(self, command, working_dir=None, capture_stderr=True, env=None): + """Changes into a specified directory, if provided, and executes a command. + + Restores the old directory afterwards. + + Args: + command: The command to run, in the form of sys.argv. + working_dir: The directory to change into. + capture_stderr: Determines whether to capture stderr in the output member + or to discard it. + env: Dictionary with environment to pass to the subprocess. + + Returns: + An object that represents outcome of the executed process. It has the + following attributes: + terminated_by_signal True if and only if the child process has been + terminated by a signal. + exited True if and only if the child process exited + normally. + exit_code The code with which the child process exited. + output Child process's stdout and stderr output + combined in a string. + """ + + # The subprocess module is the preferrable way of running programs + # since it is available and behaves consistently on all platforms, + # including Windows. But it is only available starting in python 2.4. + # In earlier python versions, we revert to the popen2 module, which is + # available in python 2.0 and later but doesn't provide required + # functionality (Popen4) under Windows. This allows us to support Mac + # OS X 10.4 Tiger, which has python 2.3 installed. + if _SUBPROCESS_MODULE_AVAILABLE: + if capture_stderr: + stderr = subprocess.STDOUT + else: + stderr = subprocess.PIPE + + p = subprocess.Popen(command, + stdout=subprocess.PIPE, stderr=stderr, + cwd=working_dir, universal_newlines=True, env=env) + # communicate returns a tuple with the file object for the child's + # output. + self.output = p.communicate()[0] + self._return_code = p.returncode + else: + old_dir = os.getcwd() + + def _ReplaceEnvDict(dest, src): + # Changes made by os.environ.clear are not inheritable by child + # processes until Python 2.6. To produce inheritable changes we have + # to delete environment items with the del statement. + for key in dest.keys(): + del dest[key] + dest.update(src) + + # When 'env' is not None, backup the environment variables and replace + # them with the passed 'env'. When 'env' is None, we simply use the + # current 'os.environ' for compatibility with the subprocess.Popen + # semantics used above. + if env is not None: + old_environ = os.environ.copy() + _ReplaceEnvDict(os.environ, env) + + try: + if working_dir is not None: + os.chdir(working_dir) + if capture_stderr: + p = popen2.Popen4(command) + else: + p = popen2.Popen3(command) + p.tochild.close() + self.output = p.fromchild.read() + ret_code = p.wait() + finally: + os.chdir(old_dir) + + # Restore the old environment variables + # if they were replaced. + if env is not None: + _ReplaceEnvDict(os.environ, old_environ) + + # Converts ret_code to match the semantics of + # subprocess.Popen.returncode. + if os.WIFSIGNALED(ret_code): + self._return_code = -os.WTERMSIG(ret_code) + else: # os.WIFEXITED(ret_code) should return True here. + self._return_code = os.WEXITSTATUS(ret_code) + + if bool(self._return_code & 0x80000000): + self.terminated_by_signal = True + self.exited = False + else: + self.terminated_by_signal = False + self.exited = True + self.exit_code = self._return_code + + +def Main(): + """Runs the unit test.""" + + # We must call _ParseAndStripGTestFlags() before calling + # unittest.main(). Otherwise the latter will be confused by the + # --gtest_* flags. + _ParseAndStripGTestFlags(sys.argv) + # The tested binaries should not be writing XML output files unless the + # script explicitly instructs them to. + if GTEST_OUTPUT_VAR_NAME in os.environ: + del os.environ[GTEST_OUTPUT_VAR_NAME] + + _test_module.main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test.py b/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test.py new file mode 100755 index 0000000000..87ffad73d4 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python +# +# Copyright 2018 Google LLC. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +"""Verifies that Google Test uses filter provided via testbridge.""" + +import os + +import gtest_test_utils + +binary_name = 'gtest_testbridge_test_' +COMMAND = gtest_test_utils.GetTestExecutablePath(binary_name) +TESTBRIDGE_NAME = 'TESTBRIDGE_TEST_ONLY' + + +def Assert(condition): + if not condition: + raise AssertionError + + +class GTestTestFilterTest(gtest_test_utils.TestCase): + + def testTestExecutionIsFiltered(self): + """Tests that the test filter is picked up from the testbridge env var.""" + subprocess_env = os.environ.copy() + + subprocess_env[TESTBRIDGE_NAME] = '*.TestThatSucceeds' + p = gtest_test_utils.Subprocess(COMMAND, env=subprocess_env) + + self.assertEquals(0, p.exit_code) + + Assert('filter = *.TestThatSucceeds' in p.output) + Assert('[ OK ] TestFilterTest.TestThatSucceeds' in p.output) + Assert('[ PASSED ] 1 test.' in p.output) + + +if __name__ == '__main__': + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test_.cc b/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test_.cc new file mode 100644 index 0000000000..24617b209e --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_testbridge_test_.cc @@ -0,0 +1,43 @@ +// Copyright 2018, Google LLC. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// This program is meant to be run by gtest_test_filter_test.py. Do not run +// it directly. + +#include "gtest/gtest.h" + +// These tests are used to detect if filtering is working. Only +// 'TestThatSucceeds' should ever run. + +TEST(TestFilterTest, TestThatSucceeds) {} + +TEST(TestFilterTest, TestThatFails) { + ASSERT_TRUE(false) << "This test should never be run."; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_throw_on_failure_ex_test.cc b/security/nss/gtests/google_test/gtest/test/gtest_throw_on_failure_ex_test.cc new file mode 100644 index 0000000000..1d95adbf53 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_throw_on_failure_ex_test.cc @@ -0,0 +1,90 @@ +// Copyright 2009, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tests Google Test's throw-on-failure mode with exceptions enabled. + +#include "gtest/gtest.h" + +#include <stdlib.h> +#include <stdio.h> +#include <string.h> +#include <stdexcept> + +// Prints the given failure message and exits the program with +// non-zero. We use this instead of a Google Test assertion to +// indicate a failure, as the latter is been tested and cannot be +// relied on. +void Fail(const char* msg) { + printf("FAILURE: %s\n", msg); + fflush(stdout); + exit(1); +} + +// Tests that an assertion failure throws a subclass of +// std::runtime_error. +void TestFailureThrowsRuntimeError() { + testing::GTEST_FLAG(throw_on_failure) = true; + + // A successful assertion shouldn't throw. + try { + EXPECT_EQ(3, 3); + } catch(...) { + Fail("A successful assertion wrongfully threw."); + } + + // A failed assertion should throw a subclass of std::runtime_error. + try { + EXPECT_EQ(2, 3) << "Expected failure"; + } catch(const std::runtime_error& e) { + if (strstr(e.what(), "Expected failure") != nullptr) return; + + printf("%s", + "A failed assertion did throw an exception of the right type, " + "but the message is incorrect. Instead of containing \"Expected " + "failure\", it is:\n"); + Fail(e.what()); + } catch(...) { + Fail("A failed assertion threw the wrong type of exception."); + } + Fail("A failed assertion should've thrown but didn't."); +} + +int main(int argc, char** argv) { + testing::InitGoogleTest(&argc, argv); + + // We want to ensure that people can use Google Test assertions in + // other testing frameworks, as long as they initialize Google Test + // properly and set the thrown-on-failure mode. Therefore, we don't + // use Google Test's constructs for defining and running tests + // (e.g. TEST and RUN_ALL_TESTS) here. + + TestFailureThrowsRuntimeError(); + return 0; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_unittest.cc b/security/nss/gtests/google_test/gtest/test/gtest_unittest.cc new file mode 100644 index 0000000000..1730e8b8c9 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_unittest.cc @@ -0,0 +1,7784 @@ +// Copyright 2005, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// Tests for Google Test itself. This verifies that the basic constructs of +// Google Test work. + +#include "gtest/gtest.h" + +// Verifies that the command line flag variables can be accessed in +// code once "gtest.h" has been #included. +// Do not move it after other gtest #includes. +TEST(CommandLineFlagsTest, CanBeAccessedInCodeOnceGTestHIsIncluded) { + bool dummy = testing::GTEST_FLAG(also_run_disabled_tests) || + testing::GTEST_FLAG(break_on_failure) || + testing::GTEST_FLAG(catch_exceptions) || + testing::GTEST_FLAG(color) != "unknown" || + testing::GTEST_FLAG(fail_fast) || + testing::GTEST_FLAG(filter) != "unknown" || + testing::GTEST_FLAG(list_tests) || + testing::GTEST_FLAG(output) != "unknown" || + testing::GTEST_FLAG(brief) || testing::GTEST_FLAG(print_time) || + testing::GTEST_FLAG(random_seed) || + testing::GTEST_FLAG(repeat) > 0 || + testing::GTEST_FLAG(show_internal_stack_frames) || + testing::GTEST_FLAG(shuffle) || + testing::GTEST_FLAG(stack_trace_depth) > 0 || + testing::GTEST_FLAG(stream_result_to) != "unknown" || + testing::GTEST_FLAG(throw_on_failure); + EXPECT_TRUE(dummy || !dummy); // Suppresses warning that dummy is unused. +} + +#include <limits.h> // For INT_MAX. +#include <stdlib.h> +#include <string.h> +#include <time.h> + +#include <cstdint> +#include <map> +#include <ostream> +#include <string> +#include <type_traits> +#include <unordered_set> +#include <vector> + +#include "gtest/gtest-spi.h" +#include "src/gtest-internal-inl.h" + +namespace testing { +namespace internal { + +#if GTEST_CAN_STREAM_RESULTS_ + +class StreamingListenerTest : public Test { + public: + class FakeSocketWriter : public StreamingListener::AbstractSocketWriter { + public: + // Sends a string to the socket. + void Send(const std::string& message) override { output_ += message; } + + std::string output_; + }; + + StreamingListenerTest() + : fake_sock_writer_(new FakeSocketWriter), + streamer_(fake_sock_writer_), + test_info_obj_("FooTest", "Bar", nullptr, nullptr, + CodeLocation(__FILE__, __LINE__), nullptr, nullptr) {} + + protected: + std::string* output() { return &(fake_sock_writer_->output_); } + + FakeSocketWriter* const fake_sock_writer_; + StreamingListener streamer_; + UnitTest unit_test_; + TestInfo test_info_obj_; // The name test_info_ was taken by testing::Test. +}; + +TEST_F(StreamingListenerTest, OnTestProgramEnd) { + *output() = ""; + streamer_.OnTestProgramEnd(unit_test_); + EXPECT_EQ("event=TestProgramEnd&passed=1\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestIterationEnd) { + *output() = ""; + streamer_.OnTestIterationEnd(unit_test_, 42); + EXPECT_EQ("event=TestIterationEnd&passed=1&elapsed_time=0ms\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestCaseStart) { + *output() = ""; + streamer_.OnTestCaseStart(TestCase("FooTest", "Bar", nullptr, nullptr)); + EXPECT_EQ("event=TestCaseStart&name=FooTest\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestCaseEnd) { + *output() = ""; + streamer_.OnTestCaseEnd(TestCase("FooTest", "Bar", nullptr, nullptr)); + EXPECT_EQ("event=TestCaseEnd&passed=1&elapsed_time=0ms\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestStart) { + *output() = ""; + streamer_.OnTestStart(test_info_obj_); + EXPECT_EQ("event=TestStart&name=Bar\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestEnd) { + *output() = ""; + streamer_.OnTestEnd(test_info_obj_); + EXPECT_EQ("event=TestEnd&passed=1&elapsed_time=0ms\n", *output()); +} + +TEST_F(StreamingListenerTest, OnTestPartResult) { + *output() = ""; + streamer_.OnTestPartResult(TestPartResult( + TestPartResult::kFatalFailure, "foo.cc", 42, "failed=\n&%")); + + // Meta characters in the failure message should be properly escaped. + EXPECT_EQ( + "event=TestPartResult&file=foo.cc&line=42&message=failed%3D%0A%26%25\n", + *output()); +} + +#endif // GTEST_CAN_STREAM_RESULTS_ + +// Provides access to otherwise private parts of the TestEventListeners class +// that are needed to test it. +class TestEventListenersAccessor { + public: + static TestEventListener* GetRepeater(TestEventListeners* listeners) { + return listeners->repeater(); + } + + static void SetDefaultResultPrinter(TestEventListeners* listeners, + TestEventListener* listener) { + listeners->SetDefaultResultPrinter(listener); + } + static void SetDefaultXmlGenerator(TestEventListeners* listeners, + TestEventListener* listener) { + listeners->SetDefaultXmlGenerator(listener); + } + + static bool EventForwardingEnabled(const TestEventListeners& listeners) { + return listeners.EventForwardingEnabled(); + } + + static void SuppressEventForwarding(TestEventListeners* listeners) { + listeners->SuppressEventForwarding(); + } +}; + +class UnitTestRecordPropertyTestHelper : public Test { + protected: + UnitTestRecordPropertyTestHelper() {} + + // Forwards to UnitTest::RecordProperty() to bypass access controls. + void UnitTestRecordProperty(const char* key, const std::string& value) { + unit_test_.RecordProperty(key, value); + } + + UnitTest unit_test_; +}; + +} // namespace internal +} // namespace testing + +using testing::AssertionFailure; +using testing::AssertionResult; +using testing::AssertionSuccess; +using testing::DoubleLE; +using testing::EmptyTestEventListener; +using testing::Environment; +using testing::FloatLE; +using testing::GTEST_FLAG(also_run_disabled_tests); +using testing::GTEST_FLAG(break_on_failure); +using testing::GTEST_FLAG(catch_exceptions); +using testing::GTEST_FLAG(color); +using testing::GTEST_FLAG(death_test_use_fork); +using testing::GTEST_FLAG(fail_fast); +using testing::GTEST_FLAG(filter); +using testing::GTEST_FLAG(list_tests); +using testing::GTEST_FLAG(output); +using testing::GTEST_FLAG(brief); +using testing::GTEST_FLAG(print_time); +using testing::GTEST_FLAG(random_seed); +using testing::GTEST_FLAG(repeat); +using testing::GTEST_FLAG(show_internal_stack_frames); +using testing::GTEST_FLAG(shuffle); +using testing::GTEST_FLAG(stack_trace_depth); +using testing::GTEST_FLAG(stream_result_to); +using testing::GTEST_FLAG(throw_on_failure); +using testing::IsNotSubstring; +using testing::IsSubstring; +using testing::kMaxStackTraceDepth; +using testing::Message; +using testing::ScopedFakeTestPartResultReporter; +using testing::StaticAssertTypeEq; +using testing::Test; +using testing::TestEventListeners; +using testing::TestInfo; +using testing::TestPartResult; +using testing::TestPartResultArray; +using testing::TestProperty; +using testing::TestResult; +using testing::TestSuite; +using testing::TimeInMillis; +using testing::UnitTest; +using testing::internal::AlwaysFalse; +using testing::internal::AlwaysTrue; +using testing::internal::AppendUserMessage; +using testing::internal::ArrayAwareFind; +using testing::internal::ArrayEq; +using testing::internal::CodePointToUtf8; +using testing::internal::CopyArray; +using testing::internal::CountIf; +using testing::internal::EqFailure; +using testing::internal::FloatingPoint; +using testing::internal::ForEach; +using testing::internal::FormatEpochTimeInMillisAsIso8601; +using testing::internal::FormatTimeInMillisAsSeconds; +using testing::internal::GetCurrentOsStackTraceExceptTop; +using testing::internal::GetElementOr; +using testing::internal::GetNextRandomSeed; +using testing::internal::GetRandomSeedFromFlag; +using testing::internal::GetTestTypeId; +using testing::internal::GetTimeInMillis; +using testing::internal::GetTypeId; +using testing::internal::GetUnitTestImpl; +using testing::internal::GTestFlagSaver; +using testing::internal::HasDebugStringAndShortDebugString; +using testing::internal::Int32FromEnvOrDie; +using testing::internal::IsContainer; +using testing::internal::IsContainerTest; +using testing::internal::IsNotContainer; +using testing::internal::kMaxRandomSeed; +using testing::internal::kTestTypeIdInGoogleTest; +using testing::internal::NativeArray; +using testing::internal::OsStackTraceGetter; +using testing::internal::OsStackTraceGetterInterface; +using testing::internal::ParseInt32Flag; +using testing::internal::RelationToSourceCopy; +using testing::internal::RelationToSourceReference; +using testing::internal::ShouldRunTestOnShard; +using testing::internal::ShouldShard; +using testing::internal::ShouldUseColor; +using testing::internal::Shuffle; +using testing::internal::ShuffleRange; +using testing::internal::SkipPrefix; +using testing::internal::StreamableToString; +using testing::internal::String; +using testing::internal::TestEventListenersAccessor; +using testing::internal::TestResultAccessor; +using testing::internal::UnitTestImpl; +using testing::internal::WideStringToUtf8; +using testing::internal::edit_distance::CalculateOptimalEdits; +using testing::internal::edit_distance::CreateUnifiedDiff; +using testing::internal::edit_distance::EditType; + +#if GTEST_HAS_STREAM_REDIRECTION +using testing::internal::CaptureStdout; +using testing::internal::GetCapturedStdout; +#endif + +#if GTEST_IS_THREADSAFE +using testing::internal::ThreadWithParam; +#endif + +class TestingVector : public std::vector<int> { +}; + +::std::ostream& operator<<(::std::ostream& os, + const TestingVector& vector) { + os << "{ "; + for (size_t i = 0; i < vector.size(); i++) { + os << vector[i] << " "; + } + os << "}"; + return os; +} + +// This line tests that we can define tests in an unnamed namespace. +namespace { + +TEST(GetRandomSeedFromFlagTest, HandlesZero) { + const int seed = GetRandomSeedFromFlag(0); + EXPECT_LE(1, seed); + EXPECT_LE(seed, static_cast<int>(kMaxRandomSeed)); +} + +TEST(GetRandomSeedFromFlagTest, PreservesValidSeed) { + EXPECT_EQ(1, GetRandomSeedFromFlag(1)); + EXPECT_EQ(2, GetRandomSeedFromFlag(2)); + EXPECT_EQ(kMaxRandomSeed - 1, GetRandomSeedFromFlag(kMaxRandomSeed - 1)); + EXPECT_EQ(static_cast<int>(kMaxRandomSeed), + GetRandomSeedFromFlag(kMaxRandomSeed)); +} + +TEST(GetRandomSeedFromFlagTest, NormalizesInvalidSeed) { + const int seed1 = GetRandomSeedFromFlag(-1); + EXPECT_LE(1, seed1); + EXPECT_LE(seed1, static_cast<int>(kMaxRandomSeed)); + + const int seed2 = GetRandomSeedFromFlag(kMaxRandomSeed + 1); + EXPECT_LE(1, seed2); + EXPECT_LE(seed2, static_cast<int>(kMaxRandomSeed)); +} + +TEST(GetNextRandomSeedTest, WorksForValidInput) { + EXPECT_EQ(2, GetNextRandomSeed(1)); + EXPECT_EQ(3, GetNextRandomSeed(2)); + EXPECT_EQ(static_cast<int>(kMaxRandomSeed), + GetNextRandomSeed(kMaxRandomSeed - 1)); + EXPECT_EQ(1, GetNextRandomSeed(kMaxRandomSeed)); + + // We deliberately don't test GetNextRandomSeed() with invalid + // inputs, as that requires death tests, which are expensive. This + // is fine as GetNextRandomSeed() is internal and has a + // straightforward definition. +} + +static void ClearCurrentTestPartResults() { + TestResultAccessor::ClearTestPartResults( + GetUnitTestImpl()->current_test_result()); +} + +// Tests GetTypeId. + +TEST(GetTypeIdTest, ReturnsSameValueForSameType) { + EXPECT_EQ(GetTypeId<int>(), GetTypeId<int>()); + EXPECT_EQ(GetTypeId<Test>(), GetTypeId<Test>()); +} + +class SubClassOfTest : public Test {}; +class AnotherSubClassOfTest : public Test {}; + +TEST(GetTypeIdTest, ReturnsDifferentValuesForDifferentTypes) { + EXPECT_NE(GetTypeId<int>(), GetTypeId<const int>()); + EXPECT_NE(GetTypeId<int>(), GetTypeId<char>()); + EXPECT_NE(GetTypeId<int>(), GetTestTypeId()); + EXPECT_NE(GetTypeId<SubClassOfTest>(), GetTestTypeId()); + EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTestTypeId()); + EXPECT_NE(GetTypeId<AnotherSubClassOfTest>(), GetTypeId<SubClassOfTest>()); +} + +// Verifies that GetTestTypeId() returns the same value, no matter it +// is called from inside Google Test or outside of it. +TEST(GetTestTypeIdTest, ReturnsTheSameValueInsideOrOutsideOfGoogleTest) { + EXPECT_EQ(kTestTypeIdInGoogleTest, GetTestTypeId()); +} + +// Tests CanonicalizeForStdLibVersioning. + +using ::testing::internal::CanonicalizeForStdLibVersioning; + +TEST(CanonicalizeForStdLibVersioning, LeavesUnversionedNamesUnchanged) { + EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::bind")); + EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::_")); + EXPECT_EQ("std::__foo", CanonicalizeForStdLibVersioning("std::__foo")); + EXPECT_EQ("gtl::__1::x", CanonicalizeForStdLibVersioning("gtl::__1::x")); + EXPECT_EQ("__1::x", CanonicalizeForStdLibVersioning("__1::x")); + EXPECT_EQ("::__1::x", CanonicalizeForStdLibVersioning("::__1::x")); +} + +TEST(CanonicalizeForStdLibVersioning, ElidesDoubleUnderNames) { + EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::__1::bind")); + EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__1::_")); + + EXPECT_EQ("std::bind", CanonicalizeForStdLibVersioning("std::__g::bind")); + EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__g::_")); + + EXPECT_EQ("std::bind", + CanonicalizeForStdLibVersioning("std::__google::bind")); + EXPECT_EQ("std::_", CanonicalizeForStdLibVersioning("std::__google::_")); +} + +// Tests FormatTimeInMillisAsSeconds(). + +TEST(FormatTimeInMillisAsSecondsTest, FormatsZero) { + EXPECT_EQ("0", FormatTimeInMillisAsSeconds(0)); +} + +TEST(FormatTimeInMillisAsSecondsTest, FormatsPositiveNumber) { + EXPECT_EQ("0.003", FormatTimeInMillisAsSeconds(3)); + EXPECT_EQ("0.01", FormatTimeInMillisAsSeconds(10)); + EXPECT_EQ("0.2", FormatTimeInMillisAsSeconds(200)); + EXPECT_EQ("1.2", FormatTimeInMillisAsSeconds(1200)); + EXPECT_EQ("3", FormatTimeInMillisAsSeconds(3000)); +} + +TEST(FormatTimeInMillisAsSecondsTest, FormatsNegativeNumber) { + EXPECT_EQ("-0.003", FormatTimeInMillisAsSeconds(-3)); + EXPECT_EQ("-0.01", FormatTimeInMillisAsSeconds(-10)); + EXPECT_EQ("-0.2", FormatTimeInMillisAsSeconds(-200)); + EXPECT_EQ("-1.2", FormatTimeInMillisAsSeconds(-1200)); + EXPECT_EQ("-3", FormatTimeInMillisAsSeconds(-3000)); +} + +// Tests FormatEpochTimeInMillisAsIso8601(). The correctness of conversion +// for particular dates below was verified in Python using +// datetime.datetime.fromutctimestamp(<timetamp>/1000). + +// FormatEpochTimeInMillisAsIso8601 depends on the current timezone, so we +// have to set up a particular timezone to obtain predictable results. +class FormatEpochTimeInMillisAsIso8601Test : public Test { + public: + // On Cygwin, GCC doesn't allow unqualified integer literals to exceed + // 32 bits, even when 64-bit integer types are available. We have to + // force the constants to have a 64-bit type here. + static const TimeInMillis kMillisPerSec = 1000; + + private: + void SetUp() override { + saved_tz_ = nullptr; + + GTEST_DISABLE_MSC_DEPRECATED_PUSH_(/* getenv, strdup: deprecated */) + if (getenv("TZ")) + saved_tz_ = strdup(getenv("TZ")); + GTEST_DISABLE_MSC_DEPRECATED_POP_() + + // Set up the time zone for FormatEpochTimeInMillisAsIso8601 to use. We + // cannot use the local time zone because the function's output depends + // on the time zone. + SetTimeZone("UTC+00"); + } + + void TearDown() override { + SetTimeZone(saved_tz_); + free(const_cast<char*>(saved_tz_)); + saved_tz_ = nullptr; + } + + static void SetTimeZone(const char* time_zone) { + // tzset() distinguishes between the TZ variable being present and empty + // and not being present, so we have to consider the case of time_zone + // being NULL. +#if _MSC_VER || GTEST_OS_WINDOWS_MINGW + // ...Unless it's MSVC, whose standard library's _putenv doesn't + // distinguish between an empty and a missing variable. + const std::string env_var = + std::string("TZ=") + (time_zone ? time_zone : ""); + _putenv(env_var.c_str()); + GTEST_DISABLE_MSC_WARNINGS_PUSH_(4996 /* deprecated function */) + tzset(); + GTEST_DISABLE_MSC_WARNINGS_POP_() +#else + if (time_zone) { + setenv(("TZ"), time_zone, 1); + } else { + unsetenv("TZ"); + } + tzset(); +#endif + } + + const char* saved_tz_; +}; + +const TimeInMillis FormatEpochTimeInMillisAsIso8601Test::kMillisPerSec; + +TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsTwoDigitSegments) { + EXPECT_EQ("2011-10-31T18:52:42.000", + FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec)); +} + +TEST_F(FormatEpochTimeInMillisAsIso8601Test, IncludesMillisecondsAfterDot) { + EXPECT_EQ( + "2011-10-31T18:52:42.234", + FormatEpochTimeInMillisAsIso8601(1320087162 * kMillisPerSec + 234)); +} + +TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsLeadingZeroes) { + EXPECT_EQ("2011-09-03T05:07:02.000", + FormatEpochTimeInMillisAsIso8601(1315026422 * kMillisPerSec)); +} + +TEST_F(FormatEpochTimeInMillisAsIso8601Test, Prints24HourTime) { + EXPECT_EQ("2011-09-28T17:08:22.000", + FormatEpochTimeInMillisAsIso8601(1317229702 * kMillisPerSec)); +} + +TEST_F(FormatEpochTimeInMillisAsIso8601Test, PrintsEpochStart) { + EXPECT_EQ("1970-01-01T00:00:00.000", FormatEpochTimeInMillisAsIso8601(0)); +} + +# ifdef __BORLANDC__ +// Silences warnings: "Condition is always true", "Unreachable code" +# pragma option push -w-ccc -w-rch +# endif + +// Tests that the LHS of EXPECT_EQ or ASSERT_EQ can be used as a null literal +// when the RHS is a pointer type. +TEST(NullLiteralTest, LHSAllowsNullLiterals) { + EXPECT_EQ(0, static_cast<void*>(nullptr)); // NOLINT + ASSERT_EQ(0, static_cast<void*>(nullptr)); // NOLINT + EXPECT_EQ(NULL, static_cast<void*>(nullptr)); // NOLINT + ASSERT_EQ(NULL, static_cast<void*>(nullptr)); // NOLINT + EXPECT_EQ(nullptr, static_cast<void*>(nullptr)); + ASSERT_EQ(nullptr, static_cast<void*>(nullptr)); + + const int* const p = nullptr; + EXPECT_EQ(0, p); // NOLINT + ASSERT_EQ(0, p); // NOLINT + EXPECT_EQ(NULL, p); // NOLINT + ASSERT_EQ(NULL, p); // NOLINT + EXPECT_EQ(nullptr, p); + ASSERT_EQ(nullptr, p); +} + +struct ConvertToAll { + template <typename T> + operator T() const { // NOLINT + return T(); + } +}; + +struct ConvertToPointer { + template <class T> + operator T*() const { // NOLINT + return nullptr; + } +}; + +struct ConvertToAllButNoPointers { + template <typename T, + typename std::enable_if<!std::is_pointer<T>::value, int>::type = 0> + operator T() const { // NOLINT + return T(); + } +}; + +struct MyType {}; +inline bool operator==(MyType const&, MyType const&) { return true; } + +TEST(NullLiteralTest, ImplicitConversion) { + EXPECT_EQ(ConvertToPointer{}, static_cast<void*>(nullptr)); +#if !defined(__GNUC__) || defined(__clang__) + // Disabled due to GCC bug gcc.gnu.org/PR89580 + EXPECT_EQ(ConvertToAll{}, static_cast<void*>(nullptr)); +#endif + EXPECT_EQ(ConvertToAll{}, MyType{}); + EXPECT_EQ(ConvertToAllButNoPointers{}, MyType{}); +} + +#ifdef __clang__ +#pragma clang diagnostic push +#if __has_warning("-Wzero-as-null-pointer-constant") +#pragma clang diagnostic error "-Wzero-as-null-pointer-constant" +#endif +#endif + +TEST(NullLiteralTest, NoConversionNoWarning) { + // Test that gtests detection and handling of null pointer constants + // doesn't trigger a warning when '0' isn't actually used as null. + EXPECT_EQ(0, 0); + ASSERT_EQ(0, 0); +} + +#ifdef __clang__ +#pragma clang diagnostic pop +#endif + +# ifdef __BORLANDC__ +// Restores warnings after previous "#pragma option push" suppressed them. +# pragma option pop +# endif + +// +// Tests CodePointToUtf8(). + +// Tests that the NUL character L'\0' is encoded correctly. +TEST(CodePointToUtf8Test, CanEncodeNul) { + EXPECT_EQ("", CodePointToUtf8(L'\0')); +} + +// Tests that ASCII characters are encoded correctly. +TEST(CodePointToUtf8Test, CanEncodeAscii) { + EXPECT_EQ("a", CodePointToUtf8(L'a')); + EXPECT_EQ("Z", CodePointToUtf8(L'Z')); + EXPECT_EQ("&", CodePointToUtf8(L'&')); + EXPECT_EQ("\x7F", CodePointToUtf8(L'\x7F')); +} + +// Tests that Unicode code-points that have 8 to 11 bits are encoded +// as 110xxxxx 10xxxxxx. +TEST(CodePointToUtf8Test, CanEncode8To11Bits) { + // 000 1101 0011 => 110-00011 10-010011 + EXPECT_EQ("\xC3\x93", CodePointToUtf8(L'\xD3')); + + // 101 0111 0110 => 110-10101 10-110110 + // Some compilers (e.g., GCC on MinGW) cannot handle non-ASCII codepoints + // in wide strings and wide chars. In order to accommodate them, we have to + // introduce such character constants as integers. + EXPECT_EQ("\xD5\xB6", + CodePointToUtf8(static_cast<wchar_t>(0x576))); +} + +// Tests that Unicode code-points that have 12 to 16 bits are encoded +// as 1110xxxx 10xxxxxx 10xxxxxx. +TEST(CodePointToUtf8Test, CanEncode12To16Bits) { + // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011 + EXPECT_EQ("\xE0\xA3\x93", + CodePointToUtf8(static_cast<wchar_t>(0x8D3))); + + // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101 + EXPECT_EQ("\xEC\x9D\x8D", + CodePointToUtf8(static_cast<wchar_t>(0xC74D))); +} + +#if !GTEST_WIDE_STRING_USES_UTF16_ +// Tests in this group require a wchar_t to hold > 16 bits, and thus +// are skipped on Windows, and Cygwin, where a wchar_t is +// 16-bit wide. This code may not compile on those systems. + +// Tests that Unicode code-points that have 17 to 21 bits are encoded +// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx. +TEST(CodePointToUtf8Test, CanEncode17To21Bits) { + // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011 + EXPECT_EQ("\xF0\x90\xA3\x93", CodePointToUtf8(L'\x108D3')); + + // 0 0001 0000 0100 0000 0000 => 11110-000 10-010000 10-010000 10-000000 + EXPECT_EQ("\xF0\x90\x90\x80", CodePointToUtf8(L'\x10400')); + + // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100 + EXPECT_EQ("\xF4\x88\x98\xB4", CodePointToUtf8(L'\x108634')); +} + +// Tests that encoding an invalid code-point generates the expected result. +TEST(CodePointToUtf8Test, CanEncodeInvalidCodePoint) { + EXPECT_EQ("(Invalid Unicode 0x1234ABCD)", CodePointToUtf8(L'\x1234ABCD')); +} + +#endif // !GTEST_WIDE_STRING_USES_UTF16_ + +// Tests WideStringToUtf8(). + +// Tests that the NUL character L'\0' is encoded correctly. +TEST(WideStringToUtf8Test, CanEncodeNul) { + EXPECT_STREQ("", WideStringToUtf8(L"", 0).c_str()); + EXPECT_STREQ("", WideStringToUtf8(L"", -1).c_str()); +} + +// Tests that ASCII strings are encoded correctly. +TEST(WideStringToUtf8Test, CanEncodeAscii) { + EXPECT_STREQ("a", WideStringToUtf8(L"a", 1).c_str()); + EXPECT_STREQ("ab", WideStringToUtf8(L"ab", 2).c_str()); + EXPECT_STREQ("a", WideStringToUtf8(L"a", -1).c_str()); + EXPECT_STREQ("ab", WideStringToUtf8(L"ab", -1).c_str()); +} + +// Tests that Unicode code-points that have 8 to 11 bits are encoded +// as 110xxxxx 10xxxxxx. +TEST(WideStringToUtf8Test, CanEncode8To11Bits) { + // 000 1101 0011 => 110-00011 10-010011 + EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", 1).c_str()); + EXPECT_STREQ("\xC3\x93", WideStringToUtf8(L"\xD3", -1).c_str()); + + // 101 0111 0110 => 110-10101 10-110110 + const wchar_t s[] = { 0x576, '\0' }; + EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, 1).c_str()); + EXPECT_STREQ("\xD5\xB6", WideStringToUtf8(s, -1).c_str()); +} + +// Tests that Unicode code-points that have 12 to 16 bits are encoded +// as 1110xxxx 10xxxxxx 10xxxxxx. +TEST(WideStringToUtf8Test, CanEncode12To16Bits) { + // 0000 1000 1101 0011 => 1110-0000 10-100011 10-010011 + const wchar_t s1[] = { 0x8D3, '\0' }; + EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, 1).c_str()); + EXPECT_STREQ("\xE0\xA3\x93", WideStringToUtf8(s1, -1).c_str()); + + // 1100 0111 0100 1101 => 1110-1100 10-011101 10-001101 + const wchar_t s2[] = { 0xC74D, '\0' }; + EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, 1).c_str()); + EXPECT_STREQ("\xEC\x9D\x8D", WideStringToUtf8(s2, -1).c_str()); +} + +// Tests that the conversion stops when the function encounters \0 character. +TEST(WideStringToUtf8Test, StopsOnNulCharacter) { + EXPECT_STREQ("ABC", WideStringToUtf8(L"ABC\0XYZ", 100).c_str()); +} + +// Tests that the conversion stops when the function reaches the limit +// specified by the 'length' parameter. +TEST(WideStringToUtf8Test, StopsWhenLengthLimitReached) { + EXPECT_STREQ("ABC", WideStringToUtf8(L"ABCDEF", 3).c_str()); +} + +#if !GTEST_WIDE_STRING_USES_UTF16_ +// Tests that Unicode code-points that have 17 to 21 bits are encoded +// as 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx. This code may not compile +// on the systems using UTF-16 encoding. +TEST(WideStringToUtf8Test, CanEncode17To21Bits) { + // 0 0001 0000 1000 1101 0011 => 11110-000 10-010000 10-100011 10-010011 + EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", 1).c_str()); + EXPECT_STREQ("\xF0\x90\xA3\x93", WideStringToUtf8(L"\x108D3", -1).c_str()); + + // 1 0000 1000 0110 0011 0100 => 11110-100 10-001000 10-011000 10-110100 + EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", 1).c_str()); + EXPECT_STREQ("\xF4\x88\x98\xB4", WideStringToUtf8(L"\x108634", -1).c_str()); +} + +// Tests that encoding an invalid code-point generates the expected result. +TEST(WideStringToUtf8Test, CanEncodeInvalidCodePoint) { + EXPECT_STREQ("(Invalid Unicode 0xABCDFF)", + WideStringToUtf8(L"\xABCDFF", -1).c_str()); +} +#else // !GTEST_WIDE_STRING_USES_UTF16_ +// Tests that surrogate pairs are encoded correctly on the systems using +// UTF-16 encoding in the wide strings. +TEST(WideStringToUtf8Test, CanEncodeValidUtf16SUrrogatePairs) { + const wchar_t s[] = { 0xD801, 0xDC00, '\0' }; + EXPECT_STREQ("\xF0\x90\x90\x80", WideStringToUtf8(s, -1).c_str()); +} + +// Tests that encoding an invalid UTF-16 surrogate pair +// generates the expected result. +TEST(WideStringToUtf8Test, CanEncodeInvalidUtf16SurrogatePair) { + // Leading surrogate is at the end of the string. + const wchar_t s1[] = { 0xD800, '\0' }; + EXPECT_STREQ("\xED\xA0\x80", WideStringToUtf8(s1, -1).c_str()); + // Leading surrogate is not followed by the trailing surrogate. + const wchar_t s2[] = { 0xD800, 'M', '\0' }; + EXPECT_STREQ("\xED\xA0\x80M", WideStringToUtf8(s2, -1).c_str()); + // Trailing surrogate appearas without a leading surrogate. + const wchar_t s3[] = { 0xDC00, 'P', 'Q', 'R', '\0' }; + EXPECT_STREQ("\xED\xB0\x80PQR", WideStringToUtf8(s3, -1).c_str()); +} +#endif // !GTEST_WIDE_STRING_USES_UTF16_ + +// Tests that codepoint concatenation works correctly. +#if !GTEST_WIDE_STRING_USES_UTF16_ +TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) { + const wchar_t s[] = { 0x108634, 0xC74D, '\n', 0x576, 0x8D3, 0x108634, '\0'}; + EXPECT_STREQ( + "\xF4\x88\x98\xB4" + "\xEC\x9D\x8D" + "\n" + "\xD5\xB6" + "\xE0\xA3\x93" + "\xF4\x88\x98\xB4", + WideStringToUtf8(s, -1).c_str()); +} +#else +TEST(WideStringToUtf8Test, ConcatenatesCodepointsCorrectly) { + const wchar_t s[] = { 0xC74D, '\n', 0x576, 0x8D3, '\0'}; + EXPECT_STREQ( + "\xEC\x9D\x8D" "\n" "\xD5\xB6" "\xE0\xA3\x93", + WideStringToUtf8(s, -1).c_str()); +} +#endif // !GTEST_WIDE_STRING_USES_UTF16_ + +// Tests the Random class. + +TEST(RandomDeathTest, GeneratesCrashesOnInvalidRange) { + testing::internal::Random random(42); + EXPECT_DEATH_IF_SUPPORTED( + random.Generate(0), + "Cannot generate a number in the range \\[0, 0\\)"); + EXPECT_DEATH_IF_SUPPORTED( + random.Generate(testing::internal::Random::kMaxRange + 1), + "Generation of a number in \\[0, 2147483649\\) was requested, " + "but this can only generate numbers in \\[0, 2147483648\\)"); +} + +TEST(RandomTest, GeneratesNumbersWithinRange) { + constexpr uint32_t kRange = 10000; + testing::internal::Random random(12345); + for (int i = 0; i < 10; i++) { + EXPECT_LT(random.Generate(kRange), kRange) << " for iteration " << i; + } + + testing::internal::Random random2(testing::internal::Random::kMaxRange); + for (int i = 0; i < 10; i++) { + EXPECT_LT(random2.Generate(kRange), kRange) << " for iteration " << i; + } +} + +TEST(RandomTest, RepeatsWhenReseeded) { + constexpr int kSeed = 123; + constexpr int kArraySize = 10; + constexpr uint32_t kRange = 10000; + uint32_t values[kArraySize]; + + testing::internal::Random random(kSeed); + for (int i = 0; i < kArraySize; i++) { + values[i] = random.Generate(kRange); + } + + random.Reseed(kSeed); + for (int i = 0; i < kArraySize; i++) { + EXPECT_EQ(values[i], random.Generate(kRange)) << " for iteration " << i; + } +} + +// Tests STL container utilities. + +// Tests CountIf(). + +static bool IsPositive(int n) { return n > 0; } + +TEST(ContainerUtilityTest, CountIf) { + std::vector<int> v; + EXPECT_EQ(0, CountIf(v, IsPositive)); // Works for an empty container. + + v.push_back(-1); + v.push_back(0); + EXPECT_EQ(0, CountIf(v, IsPositive)); // Works when no value satisfies. + + v.push_back(2); + v.push_back(-10); + v.push_back(10); + EXPECT_EQ(2, CountIf(v, IsPositive)); +} + +// Tests ForEach(). + +static int g_sum = 0; +static void Accumulate(int n) { g_sum += n; } + +TEST(ContainerUtilityTest, ForEach) { + std::vector<int> v; + g_sum = 0; + ForEach(v, Accumulate); + EXPECT_EQ(0, g_sum); // Works for an empty container; + + g_sum = 0; + v.push_back(1); + ForEach(v, Accumulate); + EXPECT_EQ(1, g_sum); // Works for a container with one element. + + g_sum = 0; + v.push_back(20); + v.push_back(300); + ForEach(v, Accumulate); + EXPECT_EQ(321, g_sum); +} + +// Tests GetElementOr(). +TEST(ContainerUtilityTest, GetElementOr) { + std::vector<char> a; + EXPECT_EQ('x', GetElementOr(a, 0, 'x')); + + a.push_back('a'); + a.push_back('b'); + EXPECT_EQ('a', GetElementOr(a, 0, 'x')); + EXPECT_EQ('b', GetElementOr(a, 1, 'x')); + EXPECT_EQ('x', GetElementOr(a, -2, 'x')); + EXPECT_EQ('x', GetElementOr(a, 2, 'x')); +} + +TEST(ContainerUtilityDeathTest, ShuffleRange) { + std::vector<int> a; + a.push_back(0); + a.push_back(1); + a.push_back(2); + testing::internal::Random random(1); + + EXPECT_DEATH_IF_SUPPORTED( + ShuffleRange(&random, -1, 1, &a), + "Invalid shuffle range start -1: must be in range \\[0, 3\\]"); + EXPECT_DEATH_IF_SUPPORTED( + ShuffleRange(&random, 4, 4, &a), + "Invalid shuffle range start 4: must be in range \\[0, 3\\]"); + EXPECT_DEATH_IF_SUPPORTED( + ShuffleRange(&random, 3, 2, &a), + "Invalid shuffle range finish 2: must be in range \\[3, 3\\]"); + EXPECT_DEATH_IF_SUPPORTED( + ShuffleRange(&random, 3, 4, &a), + "Invalid shuffle range finish 4: must be in range \\[3, 3\\]"); +} + +class VectorShuffleTest : public Test { + protected: + static const size_t kVectorSize = 20; + + VectorShuffleTest() : random_(1) { + for (int i = 0; i < static_cast<int>(kVectorSize); i++) { + vector_.push_back(i); + } + } + + static bool VectorIsCorrupt(const TestingVector& vector) { + if (kVectorSize != vector.size()) { + return true; + } + + bool found_in_vector[kVectorSize] = { false }; + for (size_t i = 0; i < vector.size(); i++) { + const int e = vector[i]; + if (e < 0 || e >= static_cast<int>(kVectorSize) || found_in_vector[e]) { + return true; + } + found_in_vector[e] = true; + } + + // Vector size is correct, elements' range is correct, no + // duplicate elements. Therefore no corruption has occurred. + return false; + } + + static bool VectorIsNotCorrupt(const TestingVector& vector) { + return !VectorIsCorrupt(vector); + } + + static bool RangeIsShuffled(const TestingVector& vector, int begin, int end) { + for (int i = begin; i < end; i++) { + if (i != vector[static_cast<size_t>(i)]) { + return true; + } + } + return false; + } + + static bool RangeIsUnshuffled( + const TestingVector& vector, int begin, int end) { + return !RangeIsShuffled(vector, begin, end); + } + + static bool VectorIsShuffled(const TestingVector& vector) { + return RangeIsShuffled(vector, 0, static_cast<int>(vector.size())); + } + + static bool VectorIsUnshuffled(const TestingVector& vector) { + return !VectorIsShuffled(vector); + } + + testing::internal::Random random_; + TestingVector vector_; +}; // class VectorShuffleTest + +const size_t VectorShuffleTest::kVectorSize; + +TEST_F(VectorShuffleTest, HandlesEmptyRange) { + // Tests an empty range at the beginning... + ShuffleRange(&random_, 0, 0, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); + + // ...in the middle... + ShuffleRange(&random_, kVectorSize/2, kVectorSize/2, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); + + // ...at the end... + ShuffleRange(&random_, kVectorSize - 1, kVectorSize - 1, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); + + // ...and past the end. + ShuffleRange(&random_, kVectorSize, kVectorSize, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); +} + +TEST_F(VectorShuffleTest, HandlesRangeOfSizeOne) { + // Tests a size one range at the beginning... + ShuffleRange(&random_, 0, 1, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); + + // ...in the middle... + ShuffleRange(&random_, kVectorSize/2, kVectorSize/2 + 1, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); + + // ...and at the end. + ShuffleRange(&random_, kVectorSize - 1, kVectorSize, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsUnshuffled, vector_); +} + +// Because we use our own random number generator and a fixed seed, +// we can guarantee that the following "random" tests will succeed. + +TEST_F(VectorShuffleTest, ShufflesEntireVector) { + Shuffle(&random_, &vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + EXPECT_FALSE(VectorIsUnshuffled(vector_)) << vector_; + + // Tests the first and last elements in particular to ensure that + // there are no off-by-one problems in our shuffle algorithm. + EXPECT_NE(0, vector_[0]); + EXPECT_NE(static_cast<int>(kVectorSize - 1), vector_[kVectorSize - 1]); +} + +TEST_F(VectorShuffleTest, ShufflesStartOfVector) { + const int kRangeSize = kVectorSize/2; + + ShuffleRange(&random_, 0, kRangeSize, &vector_); + + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + EXPECT_PRED3(RangeIsShuffled, vector_, 0, kRangeSize); + EXPECT_PRED3(RangeIsUnshuffled, vector_, kRangeSize, + static_cast<int>(kVectorSize)); +} + +TEST_F(VectorShuffleTest, ShufflesEndOfVector) { + const int kRangeSize = kVectorSize / 2; + ShuffleRange(&random_, kRangeSize, kVectorSize, &vector_); + + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize); + EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, + static_cast<int>(kVectorSize)); +} + +TEST_F(VectorShuffleTest, ShufflesMiddleOfVector) { + const int kRangeSize = static_cast<int>(kVectorSize) / 3; + ShuffleRange(&random_, kRangeSize, 2*kRangeSize, &vector_); + + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + EXPECT_PRED3(RangeIsUnshuffled, vector_, 0, kRangeSize); + EXPECT_PRED3(RangeIsShuffled, vector_, kRangeSize, 2*kRangeSize); + EXPECT_PRED3(RangeIsUnshuffled, vector_, 2 * kRangeSize, + static_cast<int>(kVectorSize)); +} + +TEST_F(VectorShuffleTest, ShufflesRepeatably) { + TestingVector vector2; + for (size_t i = 0; i < kVectorSize; i++) { + vector2.push_back(static_cast<int>(i)); + } + + random_.Reseed(1234); + Shuffle(&random_, &vector_); + random_.Reseed(1234); + Shuffle(&random_, &vector2); + + ASSERT_PRED1(VectorIsNotCorrupt, vector_); + ASSERT_PRED1(VectorIsNotCorrupt, vector2); + + for (size_t i = 0; i < kVectorSize; i++) { + EXPECT_EQ(vector_[i], vector2[i]) << " where i is " << i; + } +} + +// Tests the size of the AssertHelper class. + +TEST(AssertHelperTest, AssertHelperIsSmall) { + // To avoid breaking clients that use lots of assertions in one + // function, we cannot grow the size of AssertHelper. + EXPECT_LE(sizeof(testing::internal::AssertHelper), sizeof(void*)); +} + +// Tests String::EndsWithCaseInsensitive(). +TEST(StringTest, EndsWithCaseInsensitive) { + EXPECT_TRUE(String::EndsWithCaseInsensitive("foobar", "BAR")); + EXPECT_TRUE(String::EndsWithCaseInsensitive("foobaR", "bar")); + EXPECT_TRUE(String::EndsWithCaseInsensitive("foobar", "")); + EXPECT_TRUE(String::EndsWithCaseInsensitive("", "")); + + EXPECT_FALSE(String::EndsWithCaseInsensitive("Foobar", "foo")); + EXPECT_FALSE(String::EndsWithCaseInsensitive("foobar", "Foo")); + EXPECT_FALSE(String::EndsWithCaseInsensitive("", "foo")); +} + +// C++Builder's preprocessor is buggy; it fails to expand macros that +// appear in macro parameters after wide char literals. Provide an alias +// for NULL as a workaround. +static const wchar_t* const kNull = nullptr; + +// Tests String::CaseInsensitiveWideCStringEquals +TEST(StringTest, CaseInsensitiveWideCStringEquals) { + EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(nullptr, nullptr)); + EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L"")); + EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"", kNull)); + EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(kNull, L"foobar")); + EXPECT_FALSE(String::CaseInsensitiveWideCStringEquals(L"foobar", kNull)); + EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"foobar")); + EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"foobar", L"FOOBAR")); + EXPECT_TRUE(String::CaseInsensitiveWideCStringEquals(L"FOOBAR", L"foobar")); +} + +#if GTEST_OS_WINDOWS + +// Tests String::ShowWideCString(). +TEST(StringTest, ShowWideCString) { + EXPECT_STREQ("(null)", + String::ShowWideCString(NULL).c_str()); + EXPECT_STREQ("", String::ShowWideCString(L"").c_str()); + EXPECT_STREQ("foo", String::ShowWideCString(L"foo").c_str()); +} + +# if GTEST_OS_WINDOWS_MOBILE +TEST(StringTest, AnsiAndUtf16Null) { + EXPECT_EQ(NULL, String::AnsiToUtf16(NULL)); + EXPECT_EQ(NULL, String::Utf16ToAnsi(NULL)); +} + +TEST(StringTest, AnsiAndUtf16ConvertBasic) { + const char* ansi = String::Utf16ToAnsi(L"str"); + EXPECT_STREQ("str", ansi); + delete [] ansi; + const WCHAR* utf16 = String::AnsiToUtf16("str"); + EXPECT_EQ(0, wcsncmp(L"str", utf16, 3)); + delete [] utf16; +} + +TEST(StringTest, AnsiAndUtf16ConvertPathChars) { + const char* ansi = String::Utf16ToAnsi(L".:\\ \"*?"); + EXPECT_STREQ(".:\\ \"*?", ansi); + delete [] ansi; + const WCHAR* utf16 = String::AnsiToUtf16(".:\\ \"*?"); + EXPECT_EQ(0, wcsncmp(L".:\\ \"*?", utf16, 3)); + delete [] utf16; +} +# endif // GTEST_OS_WINDOWS_MOBILE + +#endif // GTEST_OS_WINDOWS + +// Tests TestProperty construction. +TEST(TestPropertyTest, StringValue) { + TestProperty property("key", "1"); + EXPECT_STREQ("key", property.key()); + EXPECT_STREQ("1", property.value()); +} + +// Tests TestProperty replacing a value. +TEST(TestPropertyTest, ReplaceStringValue) { + TestProperty property("key", "1"); + EXPECT_STREQ("1", property.value()); + property.SetValue("2"); + EXPECT_STREQ("2", property.value()); +} + +// AddFatalFailure() and AddNonfatalFailure() must be stand-alone +// functions (i.e. their definitions cannot be inlined at the call +// sites), or C++Builder won't compile the code. +static void AddFatalFailure() { + FAIL() << "Expected fatal failure."; +} + +static void AddNonfatalFailure() { + ADD_FAILURE() << "Expected non-fatal failure."; +} + +class ScopedFakeTestPartResultReporterTest : public Test { + public: // Must be public and not protected due to a bug in g++ 3.4.2. + enum FailureMode { + FATAL_FAILURE, + NONFATAL_FAILURE + }; + static void AddFailure(FailureMode failure) { + if (failure == FATAL_FAILURE) { + AddFatalFailure(); + } else { + AddNonfatalFailure(); + } + } +}; + +// Tests that ScopedFakeTestPartResultReporter intercepts test +// failures. +TEST_F(ScopedFakeTestPartResultReporterTest, InterceptsTestFailures) { + TestPartResultArray results; + { + ScopedFakeTestPartResultReporter reporter( + ScopedFakeTestPartResultReporter::INTERCEPT_ONLY_CURRENT_THREAD, + &results); + AddFailure(NONFATAL_FAILURE); + AddFailure(FATAL_FAILURE); + } + + EXPECT_EQ(2, results.size()); + EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed()); + EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed()); +} + +TEST_F(ScopedFakeTestPartResultReporterTest, DeprecatedConstructor) { + TestPartResultArray results; + { + // Tests, that the deprecated constructor still works. + ScopedFakeTestPartResultReporter reporter(&results); + AddFailure(NONFATAL_FAILURE); + } + EXPECT_EQ(1, results.size()); +} + +#if GTEST_IS_THREADSAFE + +class ScopedFakeTestPartResultReporterWithThreadsTest + : public ScopedFakeTestPartResultReporterTest { + protected: + static void AddFailureInOtherThread(FailureMode failure) { + ThreadWithParam<FailureMode> thread(&AddFailure, failure, nullptr); + thread.Join(); + } +}; + +TEST_F(ScopedFakeTestPartResultReporterWithThreadsTest, + InterceptsTestFailuresInAllThreads) { + TestPartResultArray results; + { + ScopedFakeTestPartResultReporter reporter( + ScopedFakeTestPartResultReporter::INTERCEPT_ALL_THREADS, &results); + AddFailure(NONFATAL_FAILURE); + AddFailure(FATAL_FAILURE); + AddFailureInOtherThread(NONFATAL_FAILURE); + AddFailureInOtherThread(FATAL_FAILURE); + } + + EXPECT_EQ(4, results.size()); + EXPECT_TRUE(results.GetTestPartResult(0).nonfatally_failed()); + EXPECT_TRUE(results.GetTestPartResult(1).fatally_failed()); + EXPECT_TRUE(results.GetTestPartResult(2).nonfatally_failed()); + EXPECT_TRUE(results.GetTestPartResult(3).fatally_failed()); +} + +#endif // GTEST_IS_THREADSAFE + +// Tests EXPECT_FATAL_FAILURE{,ON_ALL_THREADS}. Makes sure that they +// work even if the failure is generated in a called function rather than +// the current context. + +typedef ScopedFakeTestPartResultReporterTest ExpectFatalFailureTest; + +TEST_F(ExpectFatalFailureTest, CatchesFatalFaliure) { + EXPECT_FATAL_FAILURE(AddFatalFailure(), "Expected fatal failure."); +} + +TEST_F(ExpectFatalFailureTest, AcceptsStdStringObject) { + EXPECT_FATAL_FAILURE(AddFatalFailure(), + ::std::string("Expected fatal failure.")); +} + +TEST_F(ExpectFatalFailureTest, CatchesFatalFailureOnAllThreads) { + // We have another test below to verify that the macro catches fatal + // failures generated on another thread. + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFatalFailure(), + "Expected fatal failure."); +} + +#ifdef __BORLANDC__ +// Silences warnings: "Condition is always true" +# pragma option push -w-ccc +#endif + +// Tests that EXPECT_FATAL_FAILURE() can be used in a non-void +// function even when the statement in it contains ASSERT_*. + +int NonVoidFunction() { + EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), ""); + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), ""); + return 0; +} + +TEST_F(ExpectFatalFailureTest, CanBeUsedInNonVoidFunction) { + NonVoidFunction(); +} + +// Tests that EXPECT_FATAL_FAILURE(statement, ...) doesn't abort the +// current function even though 'statement' generates a fatal failure. + +void DoesNotAbortHelper(bool* aborted) { + EXPECT_FATAL_FAILURE(ASSERT_TRUE(false), ""); + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(FAIL(), ""); + + *aborted = false; +} + +#ifdef __BORLANDC__ +// Restores warnings after previous "#pragma option push" suppressed them. +# pragma option pop +#endif + +TEST_F(ExpectFatalFailureTest, DoesNotAbort) { + bool aborted = true; + DoesNotAbortHelper(&aborted); + EXPECT_FALSE(aborted); +} + +// Tests that the EXPECT_FATAL_FAILURE{,_ON_ALL_THREADS} accepts a +// statement that contains a macro which expands to code containing an +// unprotected comma. + +static int global_var = 0; +#define GTEST_USE_UNPROTECTED_COMMA_ global_var++, global_var++ + +TEST_F(ExpectFatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) { +#ifndef __BORLANDC__ + // ICE's in C++Builder. + EXPECT_FATAL_FAILURE({ + GTEST_USE_UNPROTECTED_COMMA_; + AddFatalFailure(); + }, ""); +#endif + + EXPECT_FATAL_FAILURE_ON_ALL_THREADS({ + GTEST_USE_UNPROTECTED_COMMA_; + AddFatalFailure(); + }, ""); +} + +// Tests EXPECT_NONFATAL_FAILURE{,ON_ALL_THREADS}. + +typedef ScopedFakeTestPartResultReporterTest ExpectNonfatalFailureTest; + +TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailure) { + EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(), + "Expected non-fatal failure."); +} + +TEST_F(ExpectNonfatalFailureTest, AcceptsStdStringObject) { + EXPECT_NONFATAL_FAILURE(AddNonfatalFailure(), + ::std::string("Expected non-fatal failure.")); +} + +TEST_F(ExpectNonfatalFailureTest, CatchesNonfatalFailureOnAllThreads) { + // We have another test below to verify that the macro catches + // non-fatal failures generated on another thread. + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS(AddNonfatalFailure(), + "Expected non-fatal failure."); +} + +// Tests that the EXPECT_NONFATAL_FAILURE{,_ON_ALL_THREADS} accepts a +// statement that contains a macro which expands to code containing an +// unprotected comma. +TEST_F(ExpectNonfatalFailureTest, AcceptsMacroThatExpandsToUnprotectedComma) { + EXPECT_NONFATAL_FAILURE({ + GTEST_USE_UNPROTECTED_COMMA_; + AddNonfatalFailure(); + }, ""); + + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS({ + GTEST_USE_UNPROTECTED_COMMA_; + AddNonfatalFailure(); + }, ""); +} + +#if GTEST_IS_THREADSAFE + +typedef ScopedFakeTestPartResultReporterWithThreadsTest + ExpectFailureWithThreadsTest; + +TEST_F(ExpectFailureWithThreadsTest, ExpectFatalFailureOnAllThreads) { + EXPECT_FATAL_FAILURE_ON_ALL_THREADS(AddFailureInOtherThread(FATAL_FAILURE), + "Expected fatal failure."); +} + +TEST_F(ExpectFailureWithThreadsTest, ExpectNonFatalFailureOnAllThreads) { + EXPECT_NONFATAL_FAILURE_ON_ALL_THREADS( + AddFailureInOtherThread(NONFATAL_FAILURE), "Expected non-fatal failure."); +} + +#endif // GTEST_IS_THREADSAFE + +// Tests the TestProperty class. + +TEST(TestPropertyTest, ConstructorWorks) { + const TestProperty property("key", "value"); + EXPECT_STREQ("key", property.key()); + EXPECT_STREQ("value", property.value()); +} + +TEST(TestPropertyTest, SetValue) { + TestProperty property("key", "value_1"); + EXPECT_STREQ("key", property.key()); + property.SetValue("value_2"); + EXPECT_STREQ("key", property.key()); + EXPECT_STREQ("value_2", property.value()); +} + +// Tests the TestResult class + +// The test fixture for testing TestResult. +class TestResultTest : public Test { + protected: + typedef std::vector<TestPartResult> TPRVector; + + // We make use of 2 TestPartResult objects, + TestPartResult * pr1, * pr2; + + // ... and 3 TestResult objects. + TestResult * r0, * r1, * r2; + + void SetUp() override { + // pr1 is for success. + pr1 = new TestPartResult(TestPartResult::kSuccess, + "foo/bar.cc", + 10, + "Success!"); + + // pr2 is for fatal failure. + pr2 = new TestPartResult(TestPartResult::kFatalFailure, + "foo/bar.cc", + -1, // This line number means "unknown" + "Failure!"); + + // Creates the TestResult objects. + r0 = new TestResult(); + r1 = new TestResult(); + r2 = new TestResult(); + + // In order to test TestResult, we need to modify its internal + // state, in particular the TestPartResult vector it holds. + // test_part_results() returns a const reference to this vector. + // We cast it to a non-const object s.t. it can be modified + TPRVector* results1 = const_cast<TPRVector*>( + &TestResultAccessor::test_part_results(*r1)); + TPRVector* results2 = const_cast<TPRVector*>( + &TestResultAccessor::test_part_results(*r2)); + + // r0 is an empty TestResult. + + // r1 contains a single SUCCESS TestPartResult. + results1->push_back(*pr1); + + // r2 contains a SUCCESS, and a FAILURE. + results2->push_back(*pr1); + results2->push_back(*pr2); + } + + void TearDown() override { + delete pr1; + delete pr2; + + delete r0; + delete r1; + delete r2; + } + + // Helper that compares two TestPartResults. + static void CompareTestPartResult(const TestPartResult& expected, + const TestPartResult& actual) { + EXPECT_EQ(expected.type(), actual.type()); + EXPECT_STREQ(expected.file_name(), actual.file_name()); + EXPECT_EQ(expected.line_number(), actual.line_number()); + EXPECT_STREQ(expected.summary(), actual.summary()); + EXPECT_STREQ(expected.message(), actual.message()); + EXPECT_EQ(expected.passed(), actual.passed()); + EXPECT_EQ(expected.failed(), actual.failed()); + EXPECT_EQ(expected.nonfatally_failed(), actual.nonfatally_failed()); + EXPECT_EQ(expected.fatally_failed(), actual.fatally_failed()); + } +}; + +// Tests TestResult::total_part_count(). +TEST_F(TestResultTest, total_part_count) { + ASSERT_EQ(0, r0->total_part_count()); + ASSERT_EQ(1, r1->total_part_count()); + ASSERT_EQ(2, r2->total_part_count()); +} + +// Tests TestResult::Passed(). +TEST_F(TestResultTest, Passed) { + ASSERT_TRUE(r0->Passed()); + ASSERT_TRUE(r1->Passed()); + ASSERT_FALSE(r2->Passed()); +} + +// Tests TestResult::Failed(). +TEST_F(TestResultTest, Failed) { + ASSERT_FALSE(r0->Failed()); + ASSERT_FALSE(r1->Failed()); + ASSERT_TRUE(r2->Failed()); +} + +// Tests TestResult::GetTestPartResult(). + +typedef TestResultTest TestResultDeathTest; + +TEST_F(TestResultDeathTest, GetTestPartResult) { + CompareTestPartResult(*pr1, r2->GetTestPartResult(0)); + CompareTestPartResult(*pr2, r2->GetTestPartResult(1)); + EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(2), ""); + EXPECT_DEATH_IF_SUPPORTED(r2->GetTestPartResult(-1), ""); +} + +// Tests TestResult has no properties when none are added. +TEST(TestResultPropertyTest, NoPropertiesFoundWhenNoneAreAdded) { + TestResult test_result; + ASSERT_EQ(0, test_result.test_property_count()); +} + +// Tests TestResult has the expected property when added. +TEST(TestResultPropertyTest, OnePropertyFoundWhenAdded) { + TestResult test_result; + TestProperty property("key_1", "1"); + TestResultAccessor::RecordProperty(&test_result, "testcase", property); + ASSERT_EQ(1, test_result.test_property_count()); + const TestProperty& actual_property = test_result.GetTestProperty(0); + EXPECT_STREQ("key_1", actual_property.key()); + EXPECT_STREQ("1", actual_property.value()); +} + +// Tests TestResult has multiple properties when added. +TEST(TestResultPropertyTest, MultiplePropertiesFoundWhenAdded) { + TestResult test_result; + TestProperty property_1("key_1", "1"); + TestProperty property_2("key_2", "2"); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_1); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_2); + ASSERT_EQ(2, test_result.test_property_count()); + const TestProperty& actual_property_1 = test_result.GetTestProperty(0); + EXPECT_STREQ("key_1", actual_property_1.key()); + EXPECT_STREQ("1", actual_property_1.value()); + + const TestProperty& actual_property_2 = test_result.GetTestProperty(1); + EXPECT_STREQ("key_2", actual_property_2.key()); + EXPECT_STREQ("2", actual_property_2.value()); +} + +// Tests TestResult::RecordProperty() overrides values for duplicate keys. +TEST(TestResultPropertyTest, OverridesValuesForDuplicateKeys) { + TestResult test_result; + TestProperty property_1_1("key_1", "1"); + TestProperty property_2_1("key_2", "2"); + TestProperty property_1_2("key_1", "12"); + TestProperty property_2_2("key_2", "22"); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_1_1); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_2_1); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_1_2); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_2_2); + + ASSERT_EQ(2, test_result.test_property_count()); + const TestProperty& actual_property_1 = test_result.GetTestProperty(0); + EXPECT_STREQ("key_1", actual_property_1.key()); + EXPECT_STREQ("12", actual_property_1.value()); + + const TestProperty& actual_property_2 = test_result.GetTestProperty(1); + EXPECT_STREQ("key_2", actual_property_2.key()); + EXPECT_STREQ("22", actual_property_2.value()); +} + +// Tests TestResult::GetTestProperty(). +TEST(TestResultPropertyTest, GetTestProperty) { + TestResult test_result; + TestProperty property_1("key_1", "1"); + TestProperty property_2("key_2", "2"); + TestProperty property_3("key_3", "3"); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_1); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_2); + TestResultAccessor::RecordProperty(&test_result, "testcase", property_3); + + const TestProperty& fetched_property_1 = test_result.GetTestProperty(0); + const TestProperty& fetched_property_2 = test_result.GetTestProperty(1); + const TestProperty& fetched_property_3 = test_result.GetTestProperty(2); + + EXPECT_STREQ("key_1", fetched_property_1.key()); + EXPECT_STREQ("1", fetched_property_1.value()); + + EXPECT_STREQ("key_2", fetched_property_2.key()); + EXPECT_STREQ("2", fetched_property_2.value()); + + EXPECT_STREQ("key_3", fetched_property_3.key()); + EXPECT_STREQ("3", fetched_property_3.value()); + + EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(3), ""); + EXPECT_DEATH_IF_SUPPORTED(test_result.GetTestProperty(-1), ""); +} + +// Tests the Test class. +// +// It's difficult to test every public method of this class (we are +// already stretching the limit of Google Test by using it to test itself!). +// Fortunately, we don't have to do that, as we are already testing +// the functionalities of the Test class extensively by using Google Test +// alone. +// +// Therefore, this section only contains one test. + +// Tests that GTestFlagSaver works on Windows and Mac. + +class GTestFlagSaverTest : public Test { + protected: + // Saves the Google Test flags such that we can restore them later, and + // then sets them to their default values. This will be called + // before the first test in this test case is run. + static void SetUpTestSuite() { + saver_ = new GTestFlagSaver; + + GTEST_FLAG(also_run_disabled_tests) = false; + GTEST_FLAG(break_on_failure) = false; + GTEST_FLAG(catch_exceptions) = false; + GTEST_FLAG(death_test_use_fork) = false; + GTEST_FLAG(color) = "auto"; + GTEST_FLAG(fail_fast) = false; + GTEST_FLAG(filter) = ""; + GTEST_FLAG(list_tests) = false; + GTEST_FLAG(output) = ""; + GTEST_FLAG(brief) = false; + GTEST_FLAG(print_time) = true; + GTEST_FLAG(random_seed) = 0; + GTEST_FLAG(repeat) = 1; + GTEST_FLAG(shuffle) = false; + GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth; + GTEST_FLAG(stream_result_to) = ""; + GTEST_FLAG(throw_on_failure) = false; + } + + // Restores the Google Test flags that the tests have modified. This will + // be called after the last test in this test case is run. + static void TearDownTestSuite() { + delete saver_; + saver_ = nullptr; + } + + // Verifies that the Google Test flags have their default values, and then + // modifies each of them. + void VerifyAndModifyFlags() { + EXPECT_FALSE(GTEST_FLAG(also_run_disabled_tests)); + EXPECT_FALSE(GTEST_FLAG(break_on_failure)); + EXPECT_FALSE(GTEST_FLAG(catch_exceptions)); + EXPECT_STREQ("auto", GTEST_FLAG(color).c_str()); + EXPECT_FALSE(GTEST_FLAG(death_test_use_fork)); + EXPECT_FALSE(GTEST_FLAG(fail_fast)); + EXPECT_STREQ("", GTEST_FLAG(filter).c_str()); + EXPECT_FALSE(GTEST_FLAG(list_tests)); + EXPECT_STREQ("", GTEST_FLAG(output).c_str()); + EXPECT_FALSE(GTEST_FLAG(brief)); + EXPECT_TRUE(GTEST_FLAG(print_time)); + EXPECT_EQ(0, GTEST_FLAG(random_seed)); + EXPECT_EQ(1, GTEST_FLAG(repeat)); + EXPECT_FALSE(GTEST_FLAG(shuffle)); + EXPECT_EQ(kMaxStackTraceDepth, GTEST_FLAG(stack_trace_depth)); + EXPECT_STREQ("", GTEST_FLAG(stream_result_to).c_str()); + EXPECT_FALSE(GTEST_FLAG(throw_on_failure)); + + GTEST_FLAG(also_run_disabled_tests) = true; + GTEST_FLAG(break_on_failure) = true; + GTEST_FLAG(catch_exceptions) = true; + GTEST_FLAG(color) = "no"; + GTEST_FLAG(death_test_use_fork) = true; + GTEST_FLAG(fail_fast) = true; + GTEST_FLAG(filter) = "abc"; + GTEST_FLAG(list_tests) = true; + GTEST_FLAG(output) = "xml:foo.xml"; + GTEST_FLAG(brief) = true; + GTEST_FLAG(print_time) = false; + GTEST_FLAG(random_seed) = 1; + GTEST_FLAG(repeat) = 100; + GTEST_FLAG(shuffle) = true; + GTEST_FLAG(stack_trace_depth) = 1; + GTEST_FLAG(stream_result_to) = "localhost:1234"; + GTEST_FLAG(throw_on_failure) = true; + } + + private: + // For saving Google Test flags during this test case. + static GTestFlagSaver* saver_; +}; + +GTestFlagSaver* GTestFlagSaverTest::saver_ = nullptr; + +// Google Test doesn't guarantee the order of tests. The following two +// tests are designed to work regardless of their order. + +// Modifies the Google Test flags in the test body. +TEST_F(GTestFlagSaverTest, ModifyGTestFlags) { + VerifyAndModifyFlags(); +} + +// Verifies that the Google Test flags in the body of the previous test were +// restored to their original values. +TEST_F(GTestFlagSaverTest, VerifyGTestFlags) { + VerifyAndModifyFlags(); +} + +// Sets an environment variable with the given name to the given +// value. If the value argument is "", unsets the environment +// variable. The caller must ensure that both arguments are not NULL. +static void SetEnv(const char* name, const char* value) { +#if GTEST_OS_WINDOWS_MOBILE + // Environment variables are not supported on Windows CE. + return; +#elif defined(__BORLANDC__) || defined(__SunOS_5_8) || defined(__SunOS_5_9) + // C++Builder's putenv only stores a pointer to its parameter; we have to + // ensure that the string remains valid as long as it might be needed. + // We use an std::map to do so. + static std::map<std::string, std::string*> added_env; + + // Because putenv stores a pointer to the string buffer, we can't delete the + // previous string (if present) until after it's replaced. + std::string *prev_env = NULL; + if (added_env.find(name) != added_env.end()) { + prev_env = added_env[name]; + } + added_env[name] = new std::string( + (Message() << name << "=" << value).GetString()); + + // The standard signature of putenv accepts a 'char*' argument. Other + // implementations, like C++Builder's, accept a 'const char*'. + // We cast away the 'const' since that would work for both variants. + putenv(const_cast<char*>(added_env[name]->c_str())); + delete prev_env; +#elif GTEST_OS_WINDOWS // If we are on Windows proper. + _putenv((Message() << name << "=" << value).GetString().c_str()); +#else + if (*value == '\0') { + unsetenv(name); + } else { + setenv(name, value, 1); + } +#endif // GTEST_OS_WINDOWS_MOBILE +} + +#if !GTEST_OS_WINDOWS_MOBILE +// Environment variables are not supported on Windows CE. + +using testing::internal::Int32FromGTestEnv; + +// Tests Int32FromGTestEnv(). + +// Tests that Int32FromGTestEnv() returns the default value when the +// environment variable is not set. +TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenVariableIsNotSet) { + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", ""); + EXPECT_EQ(10, Int32FromGTestEnv("temp", 10)); +} + +# if !defined(GTEST_GET_INT32_FROM_ENV_) + +// Tests that Int32FromGTestEnv() returns the default value when the +// environment variable overflows as an Int32. +TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueOverflows) { + printf("(expecting 2 warnings)\n"); + + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12345678987654321"); + EXPECT_EQ(20, Int32FromGTestEnv("temp", 20)); + + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-12345678987654321"); + EXPECT_EQ(30, Int32FromGTestEnv("temp", 30)); +} + +// Tests that Int32FromGTestEnv() returns the default value when the +// environment variable does not represent a valid decimal integer. +TEST(Int32FromGTestEnvTest, ReturnsDefaultWhenValueIsInvalid) { + printf("(expecting 2 warnings)\n"); + + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "A1"); + EXPECT_EQ(40, Int32FromGTestEnv("temp", 40)); + + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "12X"); + EXPECT_EQ(50, Int32FromGTestEnv("temp", 50)); +} + +# endif // !defined(GTEST_GET_INT32_FROM_ENV_) + +// Tests that Int32FromGTestEnv() parses and returns the value of the +// environment variable when it represents a valid decimal integer in +// the range of an Int32. +TEST(Int32FromGTestEnvTest, ParsesAndReturnsValidValue) { + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "123"); + EXPECT_EQ(123, Int32FromGTestEnv("temp", 0)); + + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "TEMP", "-321"); + EXPECT_EQ(-321, Int32FromGTestEnv("temp", 0)); +} +#endif // !GTEST_OS_WINDOWS_MOBILE + +// Tests ParseInt32Flag(). + +// Tests that ParseInt32Flag() returns false and doesn't change the +// output value when the flag has wrong format +TEST(ParseInt32FlagTest, ReturnsFalseForInvalidFlag) { + int32_t value = 123; + EXPECT_FALSE(ParseInt32Flag("--a=100", "b", &value)); + EXPECT_EQ(123, value); + + EXPECT_FALSE(ParseInt32Flag("a=100", "a", &value)); + EXPECT_EQ(123, value); +} + +// Tests that ParseInt32Flag() returns false and doesn't change the +// output value when the flag overflows as an Int32. +TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueOverflows) { + printf("(expecting 2 warnings)\n"); + + int32_t value = 123; + EXPECT_FALSE(ParseInt32Flag("--abc=12345678987654321", "abc", &value)); + EXPECT_EQ(123, value); + + EXPECT_FALSE(ParseInt32Flag("--abc=-12345678987654321", "abc", &value)); + EXPECT_EQ(123, value); +} + +// Tests that ParseInt32Flag() returns false and doesn't change the +// output value when the flag does not represent a valid decimal +// integer. +TEST(ParseInt32FlagTest, ReturnsDefaultWhenValueIsInvalid) { + printf("(expecting 2 warnings)\n"); + + int32_t value = 123; + EXPECT_FALSE(ParseInt32Flag("--abc=A1", "abc", &value)); + EXPECT_EQ(123, value); + + EXPECT_FALSE(ParseInt32Flag("--abc=12X", "abc", &value)); + EXPECT_EQ(123, value); +} + +// Tests that ParseInt32Flag() parses the value of the flag and +// returns true when the flag represents a valid decimal integer in +// the range of an Int32. +TEST(ParseInt32FlagTest, ParsesAndReturnsValidValue) { + int32_t value = 123; + EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=456", "abc", &value)); + EXPECT_EQ(456, value); + + EXPECT_TRUE(ParseInt32Flag("--" GTEST_FLAG_PREFIX_ "abc=-789", + "abc", &value)); + EXPECT_EQ(-789, value); +} + +// Tests that Int32FromEnvOrDie() parses the value of the var or +// returns the correct default. +// Environment variables are not supported on Windows CE. +#if !GTEST_OS_WINDOWS_MOBILE +TEST(Int32FromEnvOrDieTest, ParsesAndReturnsValidValue) { + EXPECT_EQ(333, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333)); + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "123"); + EXPECT_EQ(123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333)); + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", "-123"); + EXPECT_EQ(-123, Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "UnsetVar", 333)); +} +#endif // !GTEST_OS_WINDOWS_MOBILE + +// Tests that Int32FromEnvOrDie() aborts with an error message +// if the variable is not an int32_t. +TEST(Int32FromEnvOrDieDeathTest, AbortsOnFailure) { + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "xxx"); + EXPECT_DEATH_IF_SUPPORTED( + Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123), + ".*"); +} + +// Tests that Int32FromEnvOrDie() aborts with an error message +// if the variable cannot be represented by an int32_t. +TEST(Int32FromEnvOrDieDeathTest, AbortsOnInt32Overflow) { + SetEnv(GTEST_FLAG_PREFIX_UPPER_ "VAR", "1234567891234567891234"); + EXPECT_DEATH_IF_SUPPORTED( + Int32FromEnvOrDie(GTEST_FLAG_PREFIX_UPPER_ "VAR", 123), + ".*"); +} + +// Tests that ShouldRunTestOnShard() selects all tests +// where there is 1 shard. +TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereIsOneShard) { + EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 0)); + EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 1)); + EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 2)); + EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 3)); + EXPECT_TRUE(ShouldRunTestOnShard(1, 0, 4)); +} + +class ShouldShardTest : public testing::Test { + protected: + void SetUp() override { + index_var_ = GTEST_FLAG_PREFIX_UPPER_ "INDEX"; + total_var_ = GTEST_FLAG_PREFIX_UPPER_ "TOTAL"; + } + + void TearDown() override { + SetEnv(index_var_, ""); + SetEnv(total_var_, ""); + } + + const char* index_var_; + const char* total_var_; +}; + +// Tests that sharding is disabled if neither of the environment variables +// are set. +TEST_F(ShouldShardTest, ReturnsFalseWhenNeitherEnvVarIsSet) { + SetEnv(index_var_, ""); + SetEnv(total_var_, ""); + + EXPECT_FALSE(ShouldShard(total_var_, index_var_, false)); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, true)); +} + +// Tests that sharding is not enabled if total_shards == 1. +TEST_F(ShouldShardTest, ReturnsFalseWhenTotalShardIsOne) { + SetEnv(index_var_, "0"); + SetEnv(total_var_, "1"); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, false)); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, true)); +} + +// Tests that sharding is enabled if total_shards > 1 and +// we are not in a death test subprocess. +// Environment variables are not supported on Windows CE. +#if !GTEST_OS_WINDOWS_MOBILE +TEST_F(ShouldShardTest, WorksWhenShardEnvVarsAreValid) { + SetEnv(index_var_, "4"); + SetEnv(total_var_, "22"); + EXPECT_TRUE(ShouldShard(total_var_, index_var_, false)); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, true)); + + SetEnv(index_var_, "8"); + SetEnv(total_var_, "9"); + EXPECT_TRUE(ShouldShard(total_var_, index_var_, false)); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, true)); + + SetEnv(index_var_, "0"); + SetEnv(total_var_, "9"); + EXPECT_TRUE(ShouldShard(total_var_, index_var_, false)); + EXPECT_FALSE(ShouldShard(total_var_, index_var_, true)); +} +#endif // !GTEST_OS_WINDOWS_MOBILE + +// Tests that we exit in error if the sharding values are not valid. + +typedef ShouldShardTest ShouldShardDeathTest; + +TEST_F(ShouldShardDeathTest, AbortsWhenShardingEnvVarsAreInvalid) { + SetEnv(index_var_, "4"); + SetEnv(total_var_, "4"); + EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*"); + + SetEnv(index_var_, "4"); + SetEnv(total_var_, "-2"); + EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*"); + + SetEnv(index_var_, "5"); + SetEnv(total_var_, ""); + EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*"); + + SetEnv(index_var_, ""); + SetEnv(total_var_, "5"); + EXPECT_DEATH_IF_SUPPORTED(ShouldShard(total_var_, index_var_, false), ".*"); +} + +// Tests that ShouldRunTestOnShard is a partition when 5 +// shards are used. +TEST(ShouldRunTestOnShardTest, IsPartitionWhenThereAreFiveShards) { + // Choose an arbitrary number of tests and shards. + const int num_tests = 17; + const int num_shards = 5; + + // Check partitioning: each test should be on exactly 1 shard. + for (int test_id = 0; test_id < num_tests; test_id++) { + int prev_selected_shard_index = -1; + for (int shard_index = 0; shard_index < num_shards; shard_index++) { + if (ShouldRunTestOnShard(num_shards, shard_index, test_id)) { + if (prev_selected_shard_index < 0) { + prev_selected_shard_index = shard_index; + } else { + ADD_FAILURE() << "Shard " << prev_selected_shard_index << " and " + << shard_index << " are both selected to run test " << test_id; + } + } + } + } + + // Check balance: This is not required by the sharding protocol, but is a + // desirable property for performance. + for (int shard_index = 0; shard_index < num_shards; shard_index++) { + int num_tests_on_shard = 0; + for (int test_id = 0; test_id < num_tests; test_id++) { + num_tests_on_shard += + ShouldRunTestOnShard(num_shards, shard_index, test_id); + } + EXPECT_GE(num_tests_on_shard, num_tests / num_shards); + } +} + +// For the same reason we are not explicitly testing everything in the +// Test class, there are no separate tests for the following classes +// (except for some trivial cases): +// +// TestSuite, UnitTest, UnitTestResultPrinter. +// +// Similarly, there are no separate tests for the following macros: +// +// TEST, TEST_F, RUN_ALL_TESTS + +TEST(UnitTestTest, CanGetOriginalWorkingDir) { + ASSERT_TRUE(UnitTest::GetInstance()->original_working_dir() != nullptr); + EXPECT_STRNE(UnitTest::GetInstance()->original_working_dir(), ""); +} + +TEST(UnitTestTest, ReturnsPlausibleTimestamp) { + EXPECT_LT(0, UnitTest::GetInstance()->start_timestamp()); + EXPECT_LE(UnitTest::GetInstance()->start_timestamp(), GetTimeInMillis()); +} + +// When a property using a reserved key is supplied to this function, it +// tests that a non-fatal failure is added, a fatal failure is not added, +// and that the property is not recorded. +void ExpectNonFatalFailureRecordingPropertyWithReservedKey( + const TestResult& test_result, const char* key) { + EXPECT_NONFATAL_FAILURE(Test::RecordProperty(key, "1"), "Reserved key"); + ASSERT_EQ(0, test_result.test_property_count()) << "Property for key '" << key + << "' recorded unexpectedly."; +} + +void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + const char* key) { + const TestInfo* test_info = UnitTest::GetInstance()->current_test_info(); + ASSERT_TRUE(test_info != nullptr); + ExpectNonFatalFailureRecordingPropertyWithReservedKey(*test_info->result(), + key); +} + +void ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + const char* key) { + const testing::TestSuite* test_suite = + UnitTest::GetInstance()->current_test_suite(); + ASSERT_TRUE(test_suite != nullptr); + ExpectNonFatalFailureRecordingPropertyWithReservedKey( + test_suite->ad_hoc_test_result(), key); +} + +void ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + const char* key) { + ExpectNonFatalFailureRecordingPropertyWithReservedKey( + UnitTest::GetInstance()->ad_hoc_test_result(), key); +} + +// Tests that property recording functions in UnitTest outside of tests +// functions correcly. Creating a separate instance of UnitTest ensures it +// is in a state similar to the UnitTest's singleton's between tests. +class UnitTestRecordPropertyTest : + public testing::internal::UnitTestRecordPropertyTestHelper { + public: + static void SetUpTestSuite() { + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "disabled"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "errors"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "failures"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "name"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "tests"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTestSuite( + "time"); + + Test::RecordProperty("test_case_key_1", "1"); + + const testing::TestSuite* test_suite = + UnitTest::GetInstance()->current_test_suite(); + + ASSERT_TRUE(test_suite != nullptr); + + ASSERT_EQ(1, test_suite->ad_hoc_test_result().test_property_count()); + EXPECT_STREQ("test_case_key_1", + test_suite->ad_hoc_test_result().GetTestProperty(0).key()); + EXPECT_STREQ("1", + test_suite->ad_hoc_test_result().GetTestProperty(0).value()); + } +}; + +// Tests TestResult has the expected property when added. +TEST_F(UnitTestRecordPropertyTest, OnePropertyFoundWhenAdded) { + UnitTestRecordProperty("key_1", "1"); + + ASSERT_EQ(1, unit_test_.ad_hoc_test_result().test_property_count()); + + EXPECT_STREQ("key_1", + unit_test_.ad_hoc_test_result().GetTestProperty(0).key()); + EXPECT_STREQ("1", + unit_test_.ad_hoc_test_result().GetTestProperty(0).value()); +} + +// Tests TestResult has multiple properties when added. +TEST_F(UnitTestRecordPropertyTest, MultiplePropertiesFoundWhenAdded) { + UnitTestRecordProperty("key_1", "1"); + UnitTestRecordProperty("key_2", "2"); + + ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count()); + + EXPECT_STREQ("key_1", + unit_test_.ad_hoc_test_result().GetTestProperty(0).key()); + EXPECT_STREQ("1", unit_test_.ad_hoc_test_result().GetTestProperty(0).value()); + + EXPECT_STREQ("key_2", + unit_test_.ad_hoc_test_result().GetTestProperty(1).key()); + EXPECT_STREQ("2", unit_test_.ad_hoc_test_result().GetTestProperty(1).value()); +} + +// Tests TestResult::RecordProperty() overrides values for duplicate keys. +TEST_F(UnitTestRecordPropertyTest, OverridesValuesForDuplicateKeys) { + UnitTestRecordProperty("key_1", "1"); + UnitTestRecordProperty("key_2", "2"); + UnitTestRecordProperty("key_1", "12"); + UnitTestRecordProperty("key_2", "22"); + + ASSERT_EQ(2, unit_test_.ad_hoc_test_result().test_property_count()); + + EXPECT_STREQ("key_1", + unit_test_.ad_hoc_test_result().GetTestProperty(0).key()); + EXPECT_STREQ("12", + unit_test_.ad_hoc_test_result().GetTestProperty(0).value()); + + EXPECT_STREQ("key_2", + unit_test_.ad_hoc_test_result().GetTestProperty(1).key()); + EXPECT_STREQ("22", + unit_test_.ad_hoc_test_result().GetTestProperty(1).value()); +} + +TEST_F(UnitTestRecordPropertyTest, + AddFailureInsideTestsWhenUsingTestSuiteReservedKeys) { + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "name"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "value_param"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "type_param"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "status"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "time"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyForCurrentTest( + "classname"); +} + +TEST_F(UnitTestRecordPropertyTest, + AddRecordWithReservedKeysGeneratesCorrectPropertyList) { + EXPECT_NONFATAL_FAILURE( + Test::RecordProperty("name", "1"), + "'classname', 'name', 'status', 'time', 'type_param', 'value_param'," + " 'file', and 'line' are reserved"); +} + +class UnitTestRecordPropertyTestEnvironment : public Environment { + public: + void TearDown() override { + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "tests"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "failures"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "disabled"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "errors"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "name"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "timestamp"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "time"); + ExpectNonFatalFailureRecordingPropertyWithReservedKeyOutsideOfTestSuite( + "random_seed"); + } +}; + +// This will test property recording outside of any test or test case. +static Environment* record_property_env GTEST_ATTRIBUTE_UNUSED_ = + AddGlobalTestEnvironment(new UnitTestRecordPropertyTestEnvironment); + +// This group of tests is for predicate assertions (ASSERT_PRED*, etc) +// of various arities. They do not attempt to be exhaustive. Rather, +// view them as smoke tests that can be easily reviewed and verified. +// A more complete set of tests for predicate assertions can be found +// in gtest_pred_impl_unittest.cc. + +// First, some predicates and predicate-formatters needed by the tests. + +// Returns true if and only if the argument is an even number. +bool IsEven(int n) { + return (n % 2) == 0; +} + +// A functor that returns true if and only if the argument is an even number. +struct IsEvenFunctor { + bool operator()(int n) { return IsEven(n); } +}; + +// A predicate-formatter function that asserts the argument is an even +// number. +AssertionResult AssertIsEven(const char* expr, int n) { + if (IsEven(n)) { + return AssertionSuccess(); + } + + Message msg; + msg << expr << " evaluates to " << n << ", which is not even."; + return AssertionFailure(msg); +} + +// A predicate function that returns AssertionResult for use in +// EXPECT/ASSERT_TRUE/FALSE. +AssertionResult ResultIsEven(int n) { + if (IsEven(n)) + return AssertionSuccess() << n << " is even"; + else + return AssertionFailure() << n << " is odd"; +} + +// A predicate function that returns AssertionResult but gives no +// explanation why it succeeds. Needed for testing that +// EXPECT/ASSERT_FALSE handles such functions correctly. +AssertionResult ResultIsEvenNoExplanation(int n) { + if (IsEven(n)) + return AssertionSuccess(); + else + return AssertionFailure() << n << " is odd"; +} + +// A predicate-formatter functor that asserts the argument is an even +// number. +struct AssertIsEvenFunctor { + AssertionResult operator()(const char* expr, int n) { + return AssertIsEven(expr, n); + } +}; + +// Returns true if and only if the sum of the arguments is an even number. +bool SumIsEven2(int n1, int n2) { + return IsEven(n1 + n2); +} + +// A functor that returns true if and only if the sum of the arguments is an +// even number. +struct SumIsEven3Functor { + bool operator()(int n1, int n2, int n3) { + return IsEven(n1 + n2 + n3); + } +}; + +// A predicate-formatter function that asserts the sum of the +// arguments is an even number. +AssertionResult AssertSumIsEven4( + const char* e1, const char* e2, const char* e3, const char* e4, + int n1, int n2, int n3, int n4) { + const int sum = n1 + n2 + n3 + n4; + if (IsEven(sum)) { + return AssertionSuccess(); + } + + Message msg; + msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 + << " (" << n1 << " + " << n2 << " + " << n3 << " + " << n4 + << ") evaluates to " << sum << ", which is not even."; + return AssertionFailure(msg); +} + +// A predicate-formatter functor that asserts the sum of the arguments +// is an even number. +struct AssertSumIsEven5Functor { + AssertionResult operator()( + const char* e1, const char* e2, const char* e3, const char* e4, + const char* e5, int n1, int n2, int n3, int n4, int n5) { + const int sum = n1 + n2 + n3 + n4 + n5; + if (IsEven(sum)) { + return AssertionSuccess(); + } + + Message msg; + msg << e1 << " + " << e2 << " + " << e3 << " + " << e4 << " + " << e5 + << " (" + << n1 << " + " << n2 << " + " << n3 << " + " << n4 << " + " << n5 + << ") evaluates to " << sum << ", which is not even."; + return AssertionFailure(msg); + } +}; + + +// Tests unary predicate assertions. + +// Tests unary predicate assertions that don't use a custom formatter. +TEST(Pred1Test, WithoutFormat) { + // Success cases. + EXPECT_PRED1(IsEvenFunctor(), 2) << "This failure is UNEXPECTED!"; + ASSERT_PRED1(IsEven, 4); + + // Failure cases. + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED1(IsEven, 5) << "This failure is expected."; + }, "This failure is expected."); + EXPECT_FATAL_FAILURE(ASSERT_PRED1(IsEvenFunctor(), 5), + "evaluates to false"); +} + +// Tests unary predicate assertions that use a custom formatter. +TEST(Pred1Test, WithFormat) { + // Success cases. + EXPECT_PRED_FORMAT1(AssertIsEven, 2); + ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), 4) + << "This failure is UNEXPECTED!"; + + // Failure cases. + const int n = 5; + EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT1(AssertIsEvenFunctor(), n), + "n evaluates to 5, which is not even."); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(AssertIsEven, 5) << "This failure is expected."; + }, "This failure is expected."); +} + +// Tests that unary predicate assertions evaluates their arguments +// exactly once. +TEST(Pred1Test, SingleEvaluationOnFailure) { + // A success case. + static int n = 0; + EXPECT_PRED1(IsEven, n++); + EXPECT_EQ(1, n) << "The argument is not evaluated exactly once."; + + // A failure case. + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT1(AssertIsEvenFunctor(), n++) + << "This failure is expected."; + }, "This failure is expected."); + EXPECT_EQ(2, n) << "The argument is not evaluated exactly once."; +} + + +// Tests predicate assertions whose arity is >= 2. + +// Tests predicate assertions that don't use a custom formatter. +TEST(PredTest, WithoutFormat) { + // Success cases. + ASSERT_PRED2(SumIsEven2, 2, 4) << "This failure is UNEXPECTED!"; + EXPECT_PRED3(SumIsEven3Functor(), 4, 6, 8); + + // Failure cases. + const int n1 = 1; + const int n2 = 2; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED2(SumIsEven2, n1, n2) << "This failure is expected."; + }, "This failure is expected."); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED3(SumIsEven3Functor(), 1, 2, 4); + }, "evaluates to false"); +} + +// Tests predicate assertions that use a custom formatter. +TEST(PredTest, WithFormat) { + // Success cases. + ASSERT_PRED_FORMAT4(AssertSumIsEven4, 4, 6, 8, 10) << + "This failure is UNEXPECTED!"; + EXPECT_PRED_FORMAT5(AssertSumIsEven5Functor(), 2, 4, 6, 8, 10); + + // Failure cases. + const int n1 = 1; + const int n2 = 2; + const int n3 = 4; + const int n4 = 6; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(AssertSumIsEven4, n1, n2, n3, n4); + }, "evaluates to 13, which is not even."); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), 1, 2, 4, 6, 8) + << "This failure is expected."; + }, "This failure is expected."); +} + +// Tests that predicate assertions evaluates their arguments +// exactly once. +TEST(PredTest, SingleEvaluationOnFailure) { + // A success case. + int n1 = 0; + int n2 = 0; + EXPECT_PRED2(SumIsEven2, n1++, n2++); + EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once."; + EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once."; + + // Another success case. + n1 = n2 = 0; + int n3 = 0; + int n4 = 0; + int n5 = 0; + ASSERT_PRED_FORMAT5(AssertSumIsEven5Functor(), + n1++, n2++, n3++, n4++, n5++) + << "This failure is UNEXPECTED!"; + EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once."; + EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once."; + EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once."; + EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once."; + EXPECT_EQ(1, n5) << "Argument 5 is not evaluated exactly once."; + + // A failure case. + n1 = n2 = n3 = 0; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED3(SumIsEven3Functor(), ++n1, n2++, n3++) + << "This failure is expected."; + }, "This failure is expected."); + EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once."; + EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once."; + EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once."; + + // Another failure case. + n1 = n2 = n3 = n4 = 0; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT4(AssertSumIsEven4, ++n1, n2++, n3++, n4++); + }, "evaluates to 1, which is not even."); + EXPECT_EQ(1, n1) << "Argument 1 is not evaluated exactly once."; + EXPECT_EQ(1, n2) << "Argument 2 is not evaluated exactly once."; + EXPECT_EQ(1, n3) << "Argument 3 is not evaluated exactly once."; + EXPECT_EQ(1, n4) << "Argument 4 is not evaluated exactly once."; +} + +// Test predicate assertions for sets +TEST(PredTest, ExpectPredEvalFailure) { + std::set<int> set_a = {2, 1, 3, 4, 5}; + std::set<int> set_b = {0, 4, 8}; + const auto compare_sets = [] (std::set<int>, std::set<int>) { return false; }; + EXPECT_NONFATAL_FAILURE( + EXPECT_PRED2(compare_sets, set_a, set_b), + "compare_sets(set_a, set_b) evaluates to false, where\nset_a evaluates " + "to { 1, 2, 3, 4, 5 }\nset_b evaluates to { 0, 4, 8 }"); +} + +// Some helper functions for testing using overloaded/template +// functions with ASSERT_PREDn and EXPECT_PREDn. + +bool IsPositive(double x) { + return x > 0; +} + +template <typename T> +bool IsNegative(T x) { + return x < 0; +} + +template <typename T1, typename T2> +bool GreaterThan(T1 x1, T2 x2) { + return x1 > x2; +} + +// Tests that overloaded functions can be used in *_PRED* as long as +// their types are explicitly specified. +TEST(PredicateAssertionTest, AcceptsOverloadedFunction) { + // C++Builder requires C-style casts rather than static_cast. + EXPECT_PRED1((bool (*)(int))(IsPositive), 5); // NOLINT + ASSERT_PRED1((bool (*)(double))(IsPositive), 6.0); // NOLINT +} + +// Tests that template functions can be used in *_PRED* as long as +// their types are explicitly specified. +TEST(PredicateAssertionTest, AcceptsTemplateFunction) { + EXPECT_PRED1(IsNegative<int>, -5); + // Makes sure that we can handle templates with more than one + // parameter. + ASSERT_PRED2((GreaterThan<int, int>), 5, 0); +} + + +// Some helper functions for testing using overloaded/template +// functions with ASSERT_PRED_FORMATn and EXPECT_PRED_FORMATn. + +AssertionResult IsPositiveFormat(const char* /* expr */, int n) { + return n > 0 ? AssertionSuccess() : + AssertionFailure(Message() << "Failure"); +} + +AssertionResult IsPositiveFormat(const char* /* expr */, double x) { + return x > 0 ? AssertionSuccess() : + AssertionFailure(Message() << "Failure"); +} + +template <typename T> +AssertionResult IsNegativeFormat(const char* /* expr */, T x) { + return x < 0 ? AssertionSuccess() : + AssertionFailure(Message() << "Failure"); +} + +template <typename T1, typename T2> +AssertionResult EqualsFormat(const char* /* expr1 */, const char* /* expr2 */, + const T1& x1, const T2& x2) { + return x1 == x2 ? AssertionSuccess() : + AssertionFailure(Message() << "Failure"); +} + +// Tests that overloaded functions can be used in *_PRED_FORMAT* +// without explicitly specifying their types. +TEST(PredicateFormatAssertionTest, AcceptsOverloadedFunction) { + EXPECT_PRED_FORMAT1(IsPositiveFormat, 5); + ASSERT_PRED_FORMAT1(IsPositiveFormat, 6.0); +} + +// Tests that template functions can be used in *_PRED_FORMAT* without +// explicitly specifying their types. +TEST(PredicateFormatAssertionTest, AcceptsTemplateFunction) { + EXPECT_PRED_FORMAT1(IsNegativeFormat, -5); + ASSERT_PRED_FORMAT2(EqualsFormat, 3, 3); +} + + +// Tests string assertions. + +// Tests ASSERT_STREQ with non-NULL arguments. +TEST(StringAssertionTest, ASSERT_STREQ) { + const char * const p1 = "good"; + ASSERT_STREQ(p1, p1); + + // Let p2 have the same content as p1, but be at a different address. + const char p2[] = "good"; + ASSERT_STREQ(p1, p2); + + EXPECT_FATAL_FAILURE(ASSERT_STREQ("bad", "good"), + " \"bad\"\n \"good\""); +} + +// Tests ASSERT_STREQ with NULL arguments. +TEST(StringAssertionTest, ASSERT_STREQ_Null) { + ASSERT_STREQ(static_cast<const char*>(nullptr), nullptr); + EXPECT_FATAL_FAILURE(ASSERT_STREQ(nullptr, "non-null"), "non-null"); +} + +// Tests ASSERT_STREQ with NULL arguments. +TEST(StringAssertionTest, ASSERT_STREQ_Null2) { + EXPECT_FATAL_FAILURE(ASSERT_STREQ("non-null", nullptr), "non-null"); +} + +// Tests ASSERT_STRNE. +TEST(StringAssertionTest, ASSERT_STRNE) { + ASSERT_STRNE("hi", "Hi"); + ASSERT_STRNE("Hi", nullptr); + ASSERT_STRNE(nullptr, "Hi"); + ASSERT_STRNE("", nullptr); + ASSERT_STRNE(nullptr, ""); + ASSERT_STRNE("", "Hi"); + ASSERT_STRNE("Hi", ""); + EXPECT_FATAL_FAILURE(ASSERT_STRNE("Hi", "Hi"), + "\"Hi\" vs \"Hi\""); +} + +// Tests ASSERT_STRCASEEQ. +TEST(StringAssertionTest, ASSERT_STRCASEEQ) { + ASSERT_STRCASEEQ("hi", "Hi"); + ASSERT_STRCASEEQ(static_cast<const char*>(nullptr), nullptr); + + ASSERT_STRCASEEQ("", ""); + EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("Hi", "hi2"), + "Ignoring case"); +} + +// Tests ASSERT_STRCASENE. +TEST(StringAssertionTest, ASSERT_STRCASENE) { + ASSERT_STRCASENE("hi1", "Hi2"); + ASSERT_STRCASENE("Hi", nullptr); + ASSERT_STRCASENE(nullptr, "Hi"); + ASSERT_STRCASENE("", nullptr); + ASSERT_STRCASENE(nullptr, ""); + ASSERT_STRCASENE("", "Hi"); + ASSERT_STRCASENE("Hi", ""); + EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("Hi", "hi"), + "(ignoring case)"); +} + +// Tests *_STREQ on wide strings. +TEST(StringAssertionTest, STREQ_Wide) { + // NULL strings. + ASSERT_STREQ(static_cast<const wchar_t*>(nullptr), nullptr); + + // Empty strings. + ASSERT_STREQ(L"", L""); + + // Non-null vs NULL. + EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"non-null", nullptr), "non-null"); + + // Equal strings. + EXPECT_STREQ(L"Hi", L"Hi"); + + // Unequal strings. + EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc", L"Abc"), + "Abc"); + + // Strings containing wide characters. + EXPECT_NONFATAL_FAILURE(EXPECT_STREQ(L"abc\x8119", L"abc\x8120"), + "abc"); + + // The streaming variation. + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_STREQ(L"abc\x8119", L"abc\x8121") << "Expected failure"; + }, "Expected failure"); +} + +// Tests *_STRNE on wide strings. +TEST(StringAssertionTest, STRNE_Wide) { + // NULL strings. + EXPECT_NONFATAL_FAILURE( + { // NOLINT + EXPECT_STRNE(static_cast<const wchar_t*>(nullptr), nullptr); + }, + ""); + + // Empty strings. + EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"", L""), + "L\"\""); + + // Non-null vs NULL. + ASSERT_STRNE(L"non-null", nullptr); + + // Equal strings. + EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"Hi", L"Hi"), + "L\"Hi\""); + + // Unequal strings. + EXPECT_STRNE(L"abc", L"Abc"); + + // Strings containing wide characters. + EXPECT_NONFATAL_FAILURE(EXPECT_STRNE(L"abc\x8119", L"abc\x8119"), + "abc"); + + // The streaming variation. + ASSERT_STRNE(L"abc\x8119", L"abc\x8120") << "This shouldn't happen"; +} + +// Tests for ::testing::IsSubstring(). + +// Tests that IsSubstring() returns the correct result when the input +// argument type is const char*. +TEST(IsSubstringTest, ReturnsCorrectResultForCString) { + EXPECT_FALSE(IsSubstring("", "", nullptr, "a")); + EXPECT_FALSE(IsSubstring("", "", "b", nullptr)); + EXPECT_FALSE(IsSubstring("", "", "needle", "haystack")); + + EXPECT_TRUE(IsSubstring("", "", static_cast<const char*>(nullptr), nullptr)); + EXPECT_TRUE(IsSubstring("", "", "needle", "two needles")); +} + +// Tests that IsSubstring() returns the correct result when the input +// argument type is const wchar_t*. +TEST(IsSubstringTest, ReturnsCorrectResultForWideCString) { + EXPECT_FALSE(IsSubstring("", "", kNull, L"a")); + EXPECT_FALSE(IsSubstring("", "", L"b", kNull)); + EXPECT_FALSE(IsSubstring("", "", L"needle", L"haystack")); + + EXPECT_TRUE( + IsSubstring("", "", static_cast<const wchar_t*>(nullptr), nullptr)); + EXPECT_TRUE(IsSubstring("", "", L"needle", L"two needles")); +} + +// Tests that IsSubstring() generates the correct message when the input +// argument type is const char*. +TEST(IsSubstringTest, GeneratesCorrectMessageForCString) { + EXPECT_STREQ("Value of: needle_expr\n" + " Actual: \"needle\"\n" + "Expected: a substring of haystack_expr\n" + "Which is: \"haystack\"", + IsSubstring("needle_expr", "haystack_expr", + "needle", "haystack").failure_message()); +} + +// Tests that IsSubstring returns the correct result when the input +// argument type is ::std::string. +TEST(IsSubstringTest, ReturnsCorrectResultsForStdString) { + EXPECT_TRUE(IsSubstring("", "", std::string("hello"), "ahellob")); + EXPECT_FALSE(IsSubstring("", "", "hello", std::string("world"))); +} + +#if GTEST_HAS_STD_WSTRING +// Tests that IsSubstring returns the correct result when the input +// argument type is ::std::wstring. +TEST(IsSubstringTest, ReturnsCorrectResultForStdWstring) { + EXPECT_TRUE(IsSubstring("", "", ::std::wstring(L"needle"), L"two needles")); + EXPECT_FALSE(IsSubstring("", "", L"needle", ::std::wstring(L"haystack"))); +} + +// Tests that IsSubstring() generates the correct message when the input +// argument type is ::std::wstring. +TEST(IsSubstringTest, GeneratesCorrectMessageForWstring) { + EXPECT_STREQ("Value of: needle_expr\n" + " Actual: L\"needle\"\n" + "Expected: a substring of haystack_expr\n" + "Which is: L\"haystack\"", + IsSubstring( + "needle_expr", "haystack_expr", + ::std::wstring(L"needle"), L"haystack").failure_message()); +} + +#endif // GTEST_HAS_STD_WSTRING + +// Tests for ::testing::IsNotSubstring(). + +// Tests that IsNotSubstring() returns the correct result when the input +// argument type is const char*. +TEST(IsNotSubstringTest, ReturnsCorrectResultForCString) { + EXPECT_TRUE(IsNotSubstring("", "", "needle", "haystack")); + EXPECT_FALSE(IsNotSubstring("", "", "needle", "two needles")); +} + +// Tests that IsNotSubstring() returns the correct result when the input +// argument type is const wchar_t*. +TEST(IsNotSubstringTest, ReturnsCorrectResultForWideCString) { + EXPECT_TRUE(IsNotSubstring("", "", L"needle", L"haystack")); + EXPECT_FALSE(IsNotSubstring("", "", L"needle", L"two needles")); +} + +// Tests that IsNotSubstring() generates the correct message when the input +// argument type is const wchar_t*. +TEST(IsNotSubstringTest, GeneratesCorrectMessageForWideCString) { + EXPECT_STREQ("Value of: needle_expr\n" + " Actual: L\"needle\"\n" + "Expected: not a substring of haystack_expr\n" + "Which is: L\"two needles\"", + IsNotSubstring( + "needle_expr", "haystack_expr", + L"needle", L"two needles").failure_message()); +} + +// Tests that IsNotSubstring returns the correct result when the input +// argument type is ::std::string. +TEST(IsNotSubstringTest, ReturnsCorrectResultsForStdString) { + EXPECT_FALSE(IsNotSubstring("", "", std::string("hello"), "ahellob")); + EXPECT_TRUE(IsNotSubstring("", "", "hello", std::string("world"))); +} + +// Tests that IsNotSubstring() generates the correct message when the input +// argument type is ::std::string. +TEST(IsNotSubstringTest, GeneratesCorrectMessageForStdString) { + EXPECT_STREQ("Value of: needle_expr\n" + " Actual: \"needle\"\n" + "Expected: not a substring of haystack_expr\n" + "Which is: \"two needles\"", + IsNotSubstring( + "needle_expr", "haystack_expr", + ::std::string("needle"), "two needles").failure_message()); +} + +#if GTEST_HAS_STD_WSTRING + +// Tests that IsNotSubstring returns the correct result when the input +// argument type is ::std::wstring. +TEST(IsNotSubstringTest, ReturnsCorrectResultForStdWstring) { + EXPECT_FALSE( + IsNotSubstring("", "", ::std::wstring(L"needle"), L"two needles")); + EXPECT_TRUE(IsNotSubstring("", "", L"needle", ::std::wstring(L"haystack"))); +} + +#endif // GTEST_HAS_STD_WSTRING + +// Tests floating-point assertions. + +template <typename RawType> +class FloatingPointTest : public Test { + protected: + // Pre-calculated numbers to be used by the tests. + struct TestValues { + RawType close_to_positive_zero; + RawType close_to_negative_zero; + RawType further_from_negative_zero; + + RawType close_to_one; + RawType further_from_one; + + RawType infinity; + RawType close_to_infinity; + RawType further_from_infinity; + + RawType nan1; + RawType nan2; + }; + + typedef typename testing::internal::FloatingPoint<RawType> Floating; + typedef typename Floating::Bits Bits; + + void SetUp() override { + const uint32_t max_ulps = Floating::kMaxUlps; + + // The bits that represent 0.0. + const Bits zero_bits = Floating(0).bits(); + + // Makes some numbers close to 0.0. + values_.close_to_positive_zero = Floating::ReinterpretBits( + zero_bits + max_ulps/2); + values_.close_to_negative_zero = -Floating::ReinterpretBits( + zero_bits + max_ulps - max_ulps/2); + values_.further_from_negative_zero = -Floating::ReinterpretBits( + zero_bits + max_ulps + 1 - max_ulps/2); + + // The bits that represent 1.0. + const Bits one_bits = Floating(1).bits(); + + // Makes some numbers close to 1.0. + values_.close_to_one = Floating::ReinterpretBits(one_bits + max_ulps); + values_.further_from_one = Floating::ReinterpretBits( + one_bits + max_ulps + 1); + + // +infinity. + values_.infinity = Floating::Infinity(); + + // The bits that represent +infinity. + const Bits infinity_bits = Floating(values_.infinity).bits(); + + // Makes some numbers close to infinity. + values_.close_to_infinity = Floating::ReinterpretBits( + infinity_bits - max_ulps); + values_.further_from_infinity = Floating::ReinterpretBits( + infinity_bits - max_ulps - 1); + + // Makes some NAN's. Sets the most significant bit of the fraction so that + // our NaN's are quiet; trying to process a signaling NaN would raise an + // exception if our environment enables floating point exceptions. + values_.nan1 = Floating::ReinterpretBits(Floating::kExponentBitMask + | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 1); + values_.nan2 = Floating::ReinterpretBits(Floating::kExponentBitMask + | (static_cast<Bits>(1) << (Floating::kFractionBitCount - 1)) | 200); + } + + void TestSize() { + EXPECT_EQ(sizeof(RawType), sizeof(Bits)); + } + + static TestValues values_; +}; + +template <typename RawType> +typename FloatingPointTest<RawType>::TestValues + FloatingPointTest<RawType>::values_; + +// Instantiates FloatingPointTest for testing *_FLOAT_EQ. +typedef FloatingPointTest<float> FloatTest; + +// Tests that the size of Float::Bits matches the size of float. +TEST_F(FloatTest, Size) { + TestSize(); +} + +// Tests comparing with +0 and -0. +TEST_F(FloatTest, Zeros) { + EXPECT_FLOAT_EQ(0.0, -0.0); + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(-0.0, 1.0), + "1.0"); + EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.5), + "1.5"); +} + +// Tests comparing numbers close to 0. +// +// This ensures that *_FLOAT_EQ handles the sign correctly and no +// overflow occurs when comparing numbers whose absolute value is very +// small. +TEST_F(FloatTest, AlmostZeros) { + // In C++Builder, names within local classes (such as used by + // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the + // scoping class. Use a static local alias as a workaround. + // We use the assignment syntax since some compilers, like Sun Studio, + // don't allow initializing references using construction syntax + // (parentheses). + static const FloatTest::TestValues& v = this->values_; + + EXPECT_FLOAT_EQ(0.0, v.close_to_positive_zero); + EXPECT_FLOAT_EQ(-0.0, v.close_to_negative_zero); + EXPECT_FLOAT_EQ(v.close_to_positive_zero, v.close_to_negative_zero); + + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_FLOAT_EQ(v.close_to_positive_zero, + v.further_from_negative_zero); + }, "v.further_from_negative_zero"); +} + +// Tests comparing numbers close to each other. +TEST_F(FloatTest, SmallDiff) { + EXPECT_FLOAT_EQ(1.0, values_.close_to_one); + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, values_.further_from_one), + "values_.further_from_one"); +} + +// Tests comparing numbers far apart. +TEST_F(FloatTest, LargeDiff) { + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(2.5, 3.0), + "3.0"); +} + +// Tests comparing with infinity. +// +// This ensures that no overflow occurs when comparing numbers whose +// absolute value is very large. +TEST_F(FloatTest, Infinity) { + EXPECT_FLOAT_EQ(values_.infinity, values_.close_to_infinity); + EXPECT_FLOAT_EQ(-values_.infinity, -values_.close_to_infinity); + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, -values_.infinity), + "-values_.infinity"); + + // This is interesting as the representations of infinity and nan1 + // are only 1 DLP apart. + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.infinity, values_.nan1), + "values_.nan1"); +} + +// Tests that comparing with NAN always returns false. +TEST_F(FloatTest, NaN) { + // In C++Builder, names within local classes (such as used by + // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the + // scoping class. Use a static local alias as a workaround. + // We use the assignment syntax since some compilers, like Sun Studio, + // don't allow initializing references using construction syntax + // (parentheses). + static const FloatTest::TestValues& v = this->values_; + + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan1), + "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(v.nan1, v.nan2), + "v.nan2"); + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(1.0, v.nan1), + "v.nan1"); + + EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(v.nan1, v.infinity), + "v.infinity"); +} + +// Tests that *_FLOAT_EQ are reflexive. +TEST_F(FloatTest, Reflexive) { + EXPECT_FLOAT_EQ(0.0, 0.0); + EXPECT_FLOAT_EQ(1.0, 1.0); + ASSERT_FLOAT_EQ(values_.infinity, values_.infinity); +} + +// Tests that *_FLOAT_EQ are commutative. +TEST_F(FloatTest, Commutative) { + // We already tested EXPECT_FLOAT_EQ(1.0, values_.close_to_one). + EXPECT_FLOAT_EQ(values_.close_to_one, 1.0); + + // We already tested EXPECT_FLOAT_EQ(1.0, values_.further_from_one). + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(values_.further_from_one, 1.0), + "1.0"); +} + +// Tests EXPECT_NEAR. +TEST_F(FloatTest, EXPECT_NEAR) { + EXPECT_NEAR(-1.0f, -1.1f, 0.2f); + EXPECT_NEAR(2.0f, 3.0f, 1.0f); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0f, 1.5f, 0.25f), // NOLINT + "The difference between 1.0f and 1.5f is 0.5, " + "which exceeds 0.25f"); +} + +// Tests ASSERT_NEAR. +TEST_F(FloatTest, ASSERT_NEAR) { + ASSERT_NEAR(-1.0f, -1.1f, 0.2f); + ASSERT_NEAR(2.0f, 3.0f, 1.0f); + EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0f, 1.5f, 0.25f), // NOLINT + "The difference between 1.0f and 1.5f is 0.5, " + "which exceeds 0.25f"); +} + +// Tests the cases where FloatLE() should succeed. +TEST_F(FloatTest, FloatLESucceeds) { + EXPECT_PRED_FORMAT2(FloatLE, 1.0f, 2.0f); // When val1 < val2, + ASSERT_PRED_FORMAT2(FloatLE, 1.0f, 1.0f); // val1 == val2, + + // or when val1 is greater than, but almost equals to, val2. + EXPECT_PRED_FORMAT2(FloatLE, values_.close_to_positive_zero, 0.0f); +} + +// Tests the cases where FloatLE() should fail. +TEST_F(FloatTest, FloatLEFails) { + // When val1 is greater than val2 by a large margin, + EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(FloatLE, 2.0f, 1.0f), + "(2.0f) <= (1.0f)"); + + // or by a small yet non-negligible margin, + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(FloatLE, values_.further_from_one, 1.0f); + }, "(values_.further_from_one) <= (1.0f)"); + + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(FloatLE, values_.nan1, values_.infinity); + }, "(values_.nan1) <= (values_.infinity)"); + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(FloatLE, -values_.infinity, values_.nan1); + }, "(-values_.infinity) <= (values_.nan1)"); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(FloatLE, values_.nan1, values_.nan1); + }, "(values_.nan1) <= (values_.nan1)"); +} + +// Instantiates FloatingPointTest for testing *_DOUBLE_EQ. +typedef FloatingPointTest<double> DoubleTest; + +// Tests that the size of Double::Bits matches the size of double. +TEST_F(DoubleTest, Size) { + TestSize(); +} + +// Tests comparing with +0 and -0. +TEST_F(DoubleTest, Zeros) { + EXPECT_DOUBLE_EQ(0.0, -0.0); + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(-0.0, 1.0), + "1.0"); + EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(0.0, 1.0), + "1.0"); +} + +// Tests comparing numbers close to 0. +// +// This ensures that *_DOUBLE_EQ handles the sign correctly and no +// overflow occurs when comparing numbers whose absolute value is very +// small. +TEST_F(DoubleTest, AlmostZeros) { + // In C++Builder, names within local classes (such as used by + // EXPECT_FATAL_FAILURE) cannot be resolved against static members of the + // scoping class. Use a static local alias as a workaround. + // We use the assignment syntax since some compilers, like Sun Studio, + // don't allow initializing references using construction syntax + // (parentheses). + static const DoubleTest::TestValues& v = this->values_; + + EXPECT_DOUBLE_EQ(0.0, v.close_to_positive_zero); + EXPECT_DOUBLE_EQ(-0.0, v.close_to_negative_zero); + EXPECT_DOUBLE_EQ(v.close_to_positive_zero, v.close_to_negative_zero); + + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_DOUBLE_EQ(v.close_to_positive_zero, + v.further_from_negative_zero); + }, "v.further_from_negative_zero"); +} + +// Tests comparing numbers close to each other. +TEST_F(DoubleTest, SmallDiff) { + EXPECT_DOUBLE_EQ(1.0, values_.close_to_one); + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, values_.further_from_one), + "values_.further_from_one"); +} + +// Tests comparing numbers far apart. +TEST_F(DoubleTest, LargeDiff) { + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(2.0, 3.0), + "3.0"); +} + +// Tests comparing with infinity. +// +// This ensures that no overflow occurs when comparing numbers whose +// absolute value is very large. +TEST_F(DoubleTest, Infinity) { + EXPECT_DOUBLE_EQ(values_.infinity, values_.close_to_infinity); + EXPECT_DOUBLE_EQ(-values_.infinity, -values_.close_to_infinity); + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, -values_.infinity), + "-values_.infinity"); + + // This is interesting as the representations of infinity_ and nan1_ + // are only 1 DLP apart. + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.infinity, values_.nan1), + "values_.nan1"); +} + +// Tests that comparing with NAN always returns false. +TEST_F(DoubleTest, NaN) { + static const DoubleTest::TestValues& v = this->values_; + + // Nokia's STLport crashes if we try to output infinity or NaN. + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan1), + "v.nan1"); + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(v.nan1, v.nan2), "v.nan2"); + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1.0, v.nan1), "v.nan1"); + EXPECT_FATAL_FAILURE(ASSERT_DOUBLE_EQ(v.nan1, v.infinity), + "v.infinity"); +} + +// Tests that *_DOUBLE_EQ are reflexive. +TEST_F(DoubleTest, Reflexive) { + EXPECT_DOUBLE_EQ(0.0, 0.0); + EXPECT_DOUBLE_EQ(1.0, 1.0); + ASSERT_DOUBLE_EQ(values_.infinity, values_.infinity); +} + +// Tests that *_DOUBLE_EQ are commutative. +TEST_F(DoubleTest, Commutative) { + // We already tested EXPECT_DOUBLE_EQ(1.0, values_.close_to_one). + EXPECT_DOUBLE_EQ(values_.close_to_one, 1.0); + + // We already tested EXPECT_DOUBLE_EQ(1.0, values_.further_from_one). + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(values_.further_from_one, 1.0), + "1.0"); +} + +// Tests EXPECT_NEAR. +TEST_F(DoubleTest, EXPECT_NEAR) { + EXPECT_NEAR(-1.0, -1.1, 0.2); + EXPECT_NEAR(2.0, 3.0, 1.0); + EXPECT_NONFATAL_FAILURE(EXPECT_NEAR(1.0, 1.5, 0.25), // NOLINT + "The difference between 1.0 and 1.5 is 0.5, " + "which exceeds 0.25"); + // At this magnitude adjacent doubles are 512.0 apart, so this triggers a + // slightly different failure reporting path. + EXPECT_NONFATAL_FAILURE( + EXPECT_NEAR(4.2934311416234112e+18, 4.2934311416234107e+18, 1.0), + "The abs_error parameter 1.0 evaluates to 1 which is smaller than the " + "minimum distance between doubles for numbers of this magnitude which is " + "512"); +} + +// Tests ASSERT_NEAR. +TEST_F(DoubleTest, ASSERT_NEAR) { + ASSERT_NEAR(-1.0, -1.1, 0.2); + ASSERT_NEAR(2.0, 3.0, 1.0); + EXPECT_FATAL_FAILURE(ASSERT_NEAR(1.0, 1.5, 0.25), // NOLINT + "The difference between 1.0 and 1.5 is 0.5, " + "which exceeds 0.25"); +} + +// Tests the cases where DoubleLE() should succeed. +TEST_F(DoubleTest, DoubleLESucceeds) { + EXPECT_PRED_FORMAT2(DoubleLE, 1.0, 2.0); // When val1 < val2, + ASSERT_PRED_FORMAT2(DoubleLE, 1.0, 1.0); // val1 == val2, + + // or when val1 is greater than, but almost equals to, val2. + EXPECT_PRED_FORMAT2(DoubleLE, values_.close_to_positive_zero, 0.0); +} + +// Tests the cases where DoubleLE() should fail. +TEST_F(DoubleTest, DoubleLEFails) { + // When val1 is greater than val2 by a large margin, + EXPECT_NONFATAL_FAILURE(EXPECT_PRED_FORMAT2(DoubleLE, 2.0, 1.0), + "(2.0) <= (1.0)"); + + // or by a small yet non-negligible margin, + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(DoubleLE, values_.further_from_one, 1.0); + }, "(values_.further_from_one) <= (1.0)"); + + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.infinity); + }, "(values_.nan1) <= (values_.infinity)"); + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_PRED_FORMAT2(DoubleLE, -values_.infinity, values_.nan1); + }, " (-values_.infinity) <= (values_.nan1)"); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_PRED_FORMAT2(DoubleLE, values_.nan1, values_.nan1); + }, "(values_.nan1) <= (values_.nan1)"); +} + + +// Verifies that a test or test case whose name starts with DISABLED_ is +// not run. + +// A test whose name starts with DISABLED_. +// Should not run. +TEST(DisabledTest, DISABLED_TestShouldNotRun) { + FAIL() << "Unexpected failure: Disabled test should not be run."; +} + +// A test whose name does not start with DISABLED_. +// Should run. +TEST(DisabledTest, NotDISABLED_TestShouldRun) { + EXPECT_EQ(1, 1); +} + +// A test case whose name starts with DISABLED_. +// Should not run. +TEST(DISABLED_TestSuite, TestShouldNotRun) { + FAIL() << "Unexpected failure: Test in disabled test case should not be run."; +} + +// A test case and test whose names start with DISABLED_. +// Should not run. +TEST(DISABLED_TestSuite, DISABLED_TestShouldNotRun) { + FAIL() << "Unexpected failure: Test in disabled test case should not be run."; +} + +// Check that when all tests in a test case are disabled, SetUpTestSuite() and +// TearDownTestSuite() are not called. +class DisabledTestsTest : public Test { + protected: + static void SetUpTestSuite() { + FAIL() << "Unexpected failure: All tests disabled in test case. " + "SetUpTestSuite() should not be called."; + } + + static void TearDownTestSuite() { + FAIL() << "Unexpected failure: All tests disabled in test case. " + "TearDownTestSuite() should not be called."; + } +}; + +TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_1) { + FAIL() << "Unexpected failure: Disabled test should not be run."; +} + +TEST_F(DisabledTestsTest, DISABLED_TestShouldNotRun_2) { + FAIL() << "Unexpected failure: Disabled test should not be run."; +} + +// Tests that disabled typed tests aren't run. + +template <typename T> +class TypedTest : public Test { +}; + +typedef testing::Types<int, double> NumericTypes; +TYPED_TEST_SUITE(TypedTest, NumericTypes); + +TYPED_TEST(TypedTest, DISABLED_ShouldNotRun) { + FAIL() << "Unexpected failure: Disabled typed test should not run."; +} + +template <typename T> +class DISABLED_TypedTest : public Test { +}; + +TYPED_TEST_SUITE(DISABLED_TypedTest, NumericTypes); + +TYPED_TEST(DISABLED_TypedTest, ShouldNotRun) { + FAIL() << "Unexpected failure: Disabled typed test should not run."; +} + +// Tests that disabled type-parameterized tests aren't run. + +template <typename T> +class TypedTestP : public Test { +}; + +TYPED_TEST_SUITE_P(TypedTestP); + +TYPED_TEST_P(TypedTestP, DISABLED_ShouldNotRun) { + FAIL() << "Unexpected failure: " + << "Disabled type-parameterized test should not run."; +} + +REGISTER_TYPED_TEST_SUITE_P(TypedTestP, DISABLED_ShouldNotRun); + +INSTANTIATE_TYPED_TEST_SUITE_P(My, TypedTestP, NumericTypes); + +template <typename T> +class DISABLED_TypedTestP : public Test { +}; + +TYPED_TEST_SUITE_P(DISABLED_TypedTestP); + +TYPED_TEST_P(DISABLED_TypedTestP, ShouldNotRun) { + FAIL() << "Unexpected failure: " + << "Disabled type-parameterized test should not run."; +} + +REGISTER_TYPED_TEST_SUITE_P(DISABLED_TypedTestP, ShouldNotRun); + +INSTANTIATE_TYPED_TEST_SUITE_P(My, DISABLED_TypedTestP, NumericTypes); + +// Tests that assertion macros evaluate their arguments exactly once. + +class SingleEvaluationTest : public Test { + public: // Must be public and not protected due to a bug in g++ 3.4.2. + // This helper function is needed by the FailedASSERT_STREQ test + // below. It's public to work around C++Builder's bug with scoping local + // classes. + static void CompareAndIncrementCharPtrs() { + ASSERT_STREQ(p1_++, p2_++); + } + + // This helper function is needed by the FailedASSERT_NE test below. It's + // public to work around C++Builder's bug with scoping local classes. + static void CompareAndIncrementInts() { + ASSERT_NE(a_++, b_++); + } + + protected: + SingleEvaluationTest() { + p1_ = s1_; + p2_ = s2_; + a_ = 0; + b_ = 0; + } + + static const char* const s1_; + static const char* const s2_; + static const char* p1_; + static const char* p2_; + + static int a_; + static int b_; +}; + +const char* const SingleEvaluationTest::s1_ = "01234"; +const char* const SingleEvaluationTest::s2_ = "abcde"; +const char* SingleEvaluationTest::p1_; +const char* SingleEvaluationTest::p2_; +int SingleEvaluationTest::a_; +int SingleEvaluationTest::b_; + +// Tests that when ASSERT_STREQ fails, it evaluates its arguments +// exactly once. +TEST_F(SingleEvaluationTest, FailedASSERT_STREQ) { + EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementCharPtrs(), + "p2_++"); + EXPECT_EQ(s1_ + 1, p1_); + EXPECT_EQ(s2_ + 1, p2_); +} + +// Tests that string assertion arguments are evaluated exactly once. +TEST_F(SingleEvaluationTest, ASSERT_STR) { + // successful EXPECT_STRNE + EXPECT_STRNE(p1_++, p2_++); + EXPECT_EQ(s1_ + 1, p1_); + EXPECT_EQ(s2_ + 1, p2_); + + // failed EXPECT_STRCASEEQ + EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ(p1_++, p2_++), + "Ignoring case"); + EXPECT_EQ(s1_ + 2, p1_); + EXPECT_EQ(s2_ + 2, p2_); +} + +// Tests that when ASSERT_NE fails, it evaluates its arguments exactly +// once. +TEST_F(SingleEvaluationTest, FailedASSERT_NE) { + EXPECT_FATAL_FAILURE(SingleEvaluationTest::CompareAndIncrementInts(), + "(a_++) != (b_++)"); + EXPECT_EQ(1, a_); + EXPECT_EQ(1, b_); +} + +// Tests that assertion arguments are evaluated exactly once. +TEST_F(SingleEvaluationTest, OtherCases) { + // successful EXPECT_TRUE + EXPECT_TRUE(0 == a_++); // NOLINT + EXPECT_EQ(1, a_); + + // failed EXPECT_TRUE + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(-1 == a_++), "-1 == a_++"); + EXPECT_EQ(2, a_); + + // successful EXPECT_GT + EXPECT_GT(a_++, b_++); + EXPECT_EQ(3, a_); + EXPECT_EQ(1, b_); + + // failed EXPECT_LT + EXPECT_NONFATAL_FAILURE(EXPECT_LT(a_++, b_++), "(a_++) < (b_++)"); + EXPECT_EQ(4, a_); + EXPECT_EQ(2, b_); + + // successful ASSERT_TRUE + ASSERT_TRUE(0 < a_++); // NOLINT + EXPECT_EQ(5, a_); + + // successful ASSERT_GT + ASSERT_GT(a_++, b_++); + EXPECT_EQ(6, a_); + EXPECT_EQ(3, b_); +} + +#if GTEST_HAS_EXCEPTIONS + +#if GTEST_HAS_RTTI + +#ifdef _MSC_VER +#define ERROR_DESC "class std::runtime_error" +#else +#define ERROR_DESC "std::runtime_error" +#endif + +#else // GTEST_HAS_RTTI + +#define ERROR_DESC "an std::exception-derived error" + +#endif // GTEST_HAS_RTTI + +void ThrowAnInteger() { + throw 1; +} +void ThrowRuntimeError(const char* what) { + throw std::runtime_error(what); +} + +// Tests that assertion arguments are evaluated exactly once. +TEST_F(SingleEvaluationTest, ExceptionTests) { + // successful EXPECT_THROW + EXPECT_THROW({ // NOLINT + a_++; + ThrowAnInteger(); + }, int); + EXPECT_EQ(1, a_); + + // failed EXPECT_THROW, throws different + EXPECT_NONFATAL_FAILURE(EXPECT_THROW({ // NOLINT + a_++; + ThrowAnInteger(); + }, bool), "throws a different type"); + EXPECT_EQ(2, a_); + + // failed EXPECT_THROW, throws runtime error + EXPECT_NONFATAL_FAILURE(EXPECT_THROW({ // NOLINT + a_++; + ThrowRuntimeError("A description"); + }, bool), "throws " ERROR_DESC " with description \"A description\""); + EXPECT_EQ(3, a_); + + // failed EXPECT_THROW, throws nothing + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(a_++, bool), "throws nothing"); + EXPECT_EQ(4, a_); + + // successful EXPECT_NO_THROW + EXPECT_NO_THROW(a_++); + EXPECT_EQ(5, a_); + + // failed EXPECT_NO_THROW + EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW({ // NOLINT + a_++; + ThrowAnInteger(); + }), "it throws"); + EXPECT_EQ(6, a_); + + // successful EXPECT_ANY_THROW + EXPECT_ANY_THROW({ // NOLINT + a_++; + ThrowAnInteger(); + }); + EXPECT_EQ(7, a_); + + // failed EXPECT_ANY_THROW + EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(a_++), "it doesn't"); + EXPECT_EQ(8, a_); +} + +#endif // GTEST_HAS_EXCEPTIONS + +// Tests {ASSERT|EXPECT}_NO_FATAL_FAILURE. +class NoFatalFailureTest : public Test { + protected: + void Succeeds() {} + void FailsNonFatal() { + ADD_FAILURE() << "some non-fatal failure"; + } + void Fails() { + FAIL() << "some fatal failure"; + } + + void DoAssertNoFatalFailureOnFails() { + ASSERT_NO_FATAL_FAILURE(Fails()); + ADD_FAILURE() << "should not reach here."; + } + + void DoExpectNoFatalFailureOnFails() { + EXPECT_NO_FATAL_FAILURE(Fails()); + ADD_FAILURE() << "other failure"; + } +}; + +TEST_F(NoFatalFailureTest, NoFailure) { + EXPECT_NO_FATAL_FAILURE(Succeeds()); + ASSERT_NO_FATAL_FAILURE(Succeeds()); +} + +TEST_F(NoFatalFailureTest, NonFatalIsNoFailure) { + EXPECT_NONFATAL_FAILURE( + EXPECT_NO_FATAL_FAILURE(FailsNonFatal()), + "some non-fatal failure"); + EXPECT_NONFATAL_FAILURE( + ASSERT_NO_FATAL_FAILURE(FailsNonFatal()), + "some non-fatal failure"); +} + +TEST_F(NoFatalFailureTest, AssertNoFatalFailureOnFatalFailure) { + TestPartResultArray gtest_failures; + { + ScopedFakeTestPartResultReporter gtest_reporter(>est_failures); + DoAssertNoFatalFailureOnFails(); + } + ASSERT_EQ(2, gtest_failures.size()); + EXPECT_EQ(TestPartResult::kFatalFailure, + gtest_failures.GetTestPartResult(0).type()); + EXPECT_EQ(TestPartResult::kFatalFailure, + gtest_failures.GetTestPartResult(1).type()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure", + gtest_failures.GetTestPartResult(0).message()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does", + gtest_failures.GetTestPartResult(1).message()); +} + +TEST_F(NoFatalFailureTest, ExpectNoFatalFailureOnFatalFailure) { + TestPartResultArray gtest_failures; + { + ScopedFakeTestPartResultReporter gtest_reporter(>est_failures); + DoExpectNoFatalFailureOnFails(); + } + ASSERT_EQ(3, gtest_failures.size()); + EXPECT_EQ(TestPartResult::kFatalFailure, + gtest_failures.GetTestPartResult(0).type()); + EXPECT_EQ(TestPartResult::kNonFatalFailure, + gtest_failures.GetTestPartResult(1).type()); + EXPECT_EQ(TestPartResult::kNonFatalFailure, + gtest_failures.GetTestPartResult(2).type()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "some fatal failure", + gtest_failures.GetTestPartResult(0).message()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "it does", + gtest_failures.GetTestPartResult(1).message()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "other failure", + gtest_failures.GetTestPartResult(2).message()); +} + +TEST_F(NoFatalFailureTest, MessageIsStreamable) { + TestPartResultArray gtest_failures; + { + ScopedFakeTestPartResultReporter gtest_reporter(>est_failures); + EXPECT_NO_FATAL_FAILURE(FAIL() << "foo") << "my message"; + } + ASSERT_EQ(2, gtest_failures.size()); + EXPECT_EQ(TestPartResult::kNonFatalFailure, + gtest_failures.GetTestPartResult(0).type()); + EXPECT_EQ(TestPartResult::kNonFatalFailure, + gtest_failures.GetTestPartResult(1).type()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "foo", + gtest_failures.GetTestPartResult(0).message()); + EXPECT_PRED_FORMAT2(testing::IsSubstring, "my message", + gtest_failures.GetTestPartResult(1).message()); +} + +// Tests non-string assertions. + +std::string EditsToString(const std::vector<EditType>& edits) { + std::string out; + for (size_t i = 0; i < edits.size(); ++i) { + static const char kEdits[] = " +-/"; + out.append(1, kEdits[edits[i]]); + } + return out; +} + +std::vector<size_t> CharsToIndices(const std::string& str) { + std::vector<size_t> out; + for (size_t i = 0; i < str.size(); ++i) { + out.push_back(static_cast<size_t>(str[i])); + } + return out; +} + +std::vector<std::string> CharsToLines(const std::string& str) { + std::vector<std::string> out; + for (size_t i = 0; i < str.size(); ++i) { + out.push_back(str.substr(i, 1)); + } + return out; +} + +TEST(EditDistance, TestSuites) { + struct Case { + int line; + const char* left; + const char* right; + const char* expected_edits; + const char* expected_diff; + }; + static const Case kCases[] = { + // No change. + {__LINE__, "A", "A", " ", ""}, + {__LINE__, "ABCDE", "ABCDE", " ", ""}, + // Simple adds. + {__LINE__, "X", "XA", " +", "@@ +1,2 @@\n X\n+A\n"}, + {__LINE__, "X", "XABCD", " ++++", "@@ +1,5 @@\n X\n+A\n+B\n+C\n+D\n"}, + // Simple removes. + {__LINE__, "XA", "X", " -", "@@ -1,2 @@\n X\n-A\n"}, + {__LINE__, "XABCD", "X", " ----", "@@ -1,5 @@\n X\n-A\n-B\n-C\n-D\n"}, + // Simple replaces. + {__LINE__, "A", "a", "/", "@@ -1,1 +1,1 @@\n-A\n+a\n"}, + {__LINE__, "ABCD", "abcd", "////", + "@@ -1,4 +1,4 @@\n-A\n-B\n-C\n-D\n+a\n+b\n+c\n+d\n"}, + // Path finding. + {__LINE__, "ABCDEFGH", "ABXEGH1", " -/ - +", + "@@ -1,8 +1,7 @@\n A\n B\n-C\n-D\n+X\n E\n-F\n G\n H\n+1\n"}, + {__LINE__, "AAAABCCCC", "ABABCDCDC", "- / + / ", + "@@ -1,9 +1,9 @@\n-A\n A\n-A\n+B\n A\n B\n C\n+D\n C\n-C\n+D\n C\n"}, + {__LINE__, "ABCDE", "BCDCD", "- +/", + "@@ -1,5 +1,5 @@\n-A\n B\n C\n D\n-E\n+C\n+D\n"}, + {__LINE__, "ABCDEFGHIJKL", "BCDCDEFGJKLJK", "- ++ -- ++", + "@@ -1,4 +1,5 @@\n-A\n B\n+C\n+D\n C\n D\n" + "@@ -6,7 +7,7 @@\n F\n G\n-H\n-I\n J\n K\n L\n+J\n+K\n"}, + {}}; + for (const Case* c = kCases; c->left; ++c) { + EXPECT_TRUE(c->expected_edits == + EditsToString(CalculateOptimalEdits(CharsToIndices(c->left), + CharsToIndices(c->right)))) + << "Left <" << c->left << "> Right <" << c->right << "> Edits <" + << EditsToString(CalculateOptimalEdits( + CharsToIndices(c->left), CharsToIndices(c->right))) << ">"; + EXPECT_TRUE(c->expected_diff == CreateUnifiedDiff(CharsToLines(c->left), + CharsToLines(c->right))) + << "Left <" << c->left << "> Right <" << c->right << "> Diff <" + << CreateUnifiedDiff(CharsToLines(c->left), CharsToLines(c->right)) + << ">"; + } +} + +// Tests EqFailure(), used for implementing *EQ* assertions. +TEST(AssertionTest, EqFailure) { + const std::string foo_val("5"), bar_val("6"); + const std::string msg1( + EqFailure("foo", "bar", foo_val, bar_val, false) + .failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " foo\n" + " Which is: 5\n" + " bar\n" + " Which is: 6", + msg1.c_str()); + + const std::string msg2( + EqFailure("foo", "6", foo_val, bar_val, false) + .failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " foo\n" + " Which is: 5\n" + " 6", + msg2.c_str()); + + const std::string msg3( + EqFailure("5", "bar", foo_val, bar_val, false) + .failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " 5\n" + " bar\n" + " Which is: 6", + msg3.c_str()); + + const std::string msg4( + EqFailure("5", "6", foo_val, bar_val, false).failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " 5\n" + " 6", + msg4.c_str()); + + const std::string msg5( + EqFailure("foo", "bar", + std::string("\"x\""), std::string("\"y\""), + true).failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " foo\n" + " Which is: \"x\"\n" + " bar\n" + " Which is: \"y\"\n" + "Ignoring case", + msg5.c_str()); +} + +TEST(AssertionTest, EqFailureWithDiff) { + const std::string left( + "1\\n2XXX\\n3\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12XXX\\n13\\n14\\n15"); + const std::string right( + "1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n11\\n12\\n13\\n14"); + const std::string msg1( + EqFailure("left", "right", left, right, false).failure_message()); + EXPECT_STREQ( + "Expected equality of these values:\n" + " left\n" + " Which is: " + "1\\n2XXX\\n3\\n5\\n6\\n7\\n8\\n9\\n10\\n11\\n12XXX\\n13\\n14\\n15\n" + " right\n" + " Which is: 1\\n2\\n3\\n4\\n5\\n6\\n7\\n8\\n9\\n11\\n12\\n13\\n14\n" + "With diff:\n@@ -1,5 +1,6 @@\n 1\n-2XXX\n+2\n 3\n+4\n 5\n 6\n" + "@@ -7,8 +8,6 @@\n 8\n 9\n-10\n 11\n-12XXX\n+12\n 13\n 14\n-15\n", + msg1.c_str()); +} + +// Tests AppendUserMessage(), used for implementing the *EQ* macros. +TEST(AssertionTest, AppendUserMessage) { + const std::string foo("foo"); + + Message msg; + EXPECT_STREQ("foo", + AppendUserMessage(foo, msg).c_str()); + + msg << "bar"; + EXPECT_STREQ("foo\nbar", + AppendUserMessage(foo, msg).c_str()); +} + +#ifdef __BORLANDC__ +// Silences warnings: "Condition is always true", "Unreachable code" +# pragma option push -w-ccc -w-rch +#endif + +// Tests ASSERT_TRUE. +TEST(AssertionTest, ASSERT_TRUE) { + ASSERT_TRUE(2 > 1); // NOLINT + EXPECT_FATAL_FAILURE(ASSERT_TRUE(2 < 1), + "2 < 1"); +} + +// Tests ASSERT_TRUE(predicate) for predicates returning AssertionResult. +TEST(AssertionTest, AssertTrueWithAssertionResult) { + ASSERT_TRUE(ResultIsEven(2)); +#ifndef __BORLANDC__ + // ICE's in C++Builder. + EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEven(3)), + "Value of: ResultIsEven(3)\n" + " Actual: false (3 is odd)\n" + "Expected: true"); +#endif + ASSERT_TRUE(ResultIsEvenNoExplanation(2)); + EXPECT_FATAL_FAILURE(ASSERT_TRUE(ResultIsEvenNoExplanation(3)), + "Value of: ResultIsEvenNoExplanation(3)\n" + " Actual: false (3 is odd)\n" + "Expected: true"); +} + +// Tests ASSERT_FALSE. +TEST(AssertionTest, ASSERT_FALSE) { + ASSERT_FALSE(2 < 1); // NOLINT + EXPECT_FATAL_FAILURE(ASSERT_FALSE(2 > 1), + "Value of: 2 > 1\n" + " Actual: true\n" + "Expected: false"); +} + +// Tests ASSERT_FALSE(predicate) for predicates returning AssertionResult. +TEST(AssertionTest, AssertFalseWithAssertionResult) { + ASSERT_FALSE(ResultIsEven(3)); +#ifndef __BORLANDC__ + // ICE's in C++Builder. + EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEven(2)), + "Value of: ResultIsEven(2)\n" + " Actual: true (2 is even)\n" + "Expected: false"); +#endif + ASSERT_FALSE(ResultIsEvenNoExplanation(3)); + EXPECT_FATAL_FAILURE(ASSERT_FALSE(ResultIsEvenNoExplanation(2)), + "Value of: ResultIsEvenNoExplanation(2)\n" + " Actual: true\n" + "Expected: false"); +} + +#ifdef __BORLANDC__ +// Restores warnings after previous "#pragma option push" suppressed them +# pragma option pop +#endif + +// Tests using ASSERT_EQ on double values. The purpose is to make +// sure that the specialization we did for integer and anonymous enums +// isn't used for double arguments. +TEST(ExpectTest, ASSERT_EQ_Double) { + // A success. + ASSERT_EQ(5.6, 5.6); + + // A failure. + EXPECT_FATAL_FAILURE(ASSERT_EQ(5.1, 5.2), + "5.1"); +} + +// Tests ASSERT_EQ. +TEST(AssertionTest, ASSERT_EQ) { + ASSERT_EQ(5, 2 + 3); + EXPECT_FATAL_FAILURE(ASSERT_EQ(5, 2*3), + "Expected equality of these values:\n" + " 5\n" + " 2*3\n" + " Which is: 6"); +} + +// Tests ASSERT_EQ(NULL, pointer). +TEST(AssertionTest, ASSERT_EQ_NULL) { + // A success. + const char* p = nullptr; + ASSERT_EQ(nullptr, p); + + // A failure. + static int n = 0; + EXPECT_FATAL_FAILURE(ASSERT_EQ(nullptr, &n), " &n\n Which is:"); +} + +// Tests ASSERT_EQ(0, non_pointer). Since the literal 0 can be +// treated as a null pointer by the compiler, we need to make sure +// that ASSERT_EQ(0, non_pointer) isn't interpreted by Google Test as +// ASSERT_EQ(static_cast<void*>(NULL), non_pointer). +TEST(ExpectTest, ASSERT_EQ_0) { + int n = 0; + + // A success. + ASSERT_EQ(0, n); + + // A failure. + EXPECT_FATAL_FAILURE(ASSERT_EQ(0, 5.6), + " 0\n 5.6"); +} + +// Tests ASSERT_NE. +TEST(AssertionTest, ASSERT_NE) { + ASSERT_NE(6, 7); + EXPECT_FATAL_FAILURE(ASSERT_NE('a', 'a'), + "Expected: ('a') != ('a'), " + "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)"); +} + +// Tests ASSERT_LE. +TEST(AssertionTest, ASSERT_LE) { + ASSERT_LE(2, 3); + ASSERT_LE(2, 2); + EXPECT_FATAL_FAILURE(ASSERT_LE(2, 0), + "Expected: (2) <= (0), actual: 2 vs 0"); +} + +// Tests ASSERT_LT. +TEST(AssertionTest, ASSERT_LT) { + ASSERT_LT(2, 3); + EXPECT_FATAL_FAILURE(ASSERT_LT(2, 2), + "Expected: (2) < (2), actual: 2 vs 2"); +} + +// Tests ASSERT_GE. +TEST(AssertionTest, ASSERT_GE) { + ASSERT_GE(2, 1); + ASSERT_GE(2, 2); + EXPECT_FATAL_FAILURE(ASSERT_GE(2, 3), + "Expected: (2) >= (3), actual: 2 vs 3"); +} + +// Tests ASSERT_GT. +TEST(AssertionTest, ASSERT_GT) { + ASSERT_GT(2, 1); + EXPECT_FATAL_FAILURE(ASSERT_GT(2, 2), + "Expected: (2) > (2), actual: 2 vs 2"); +} + +#if GTEST_HAS_EXCEPTIONS + +void ThrowNothing() {} + +// Tests ASSERT_THROW. +TEST(AssertionTest, ASSERT_THROW) { + ASSERT_THROW(ThrowAnInteger(), int); + +# ifndef __BORLANDC__ + + // ICE's in C++Builder 2007 and 2009. + EXPECT_FATAL_FAILURE( + ASSERT_THROW(ThrowAnInteger(), bool), + "Expected: ThrowAnInteger() throws an exception of type bool.\n" + " Actual: it throws a different type."); + EXPECT_FATAL_FAILURE( + ASSERT_THROW(ThrowRuntimeError("A description"), std::logic_error), + "Expected: ThrowRuntimeError(\"A description\") " + "throws an exception of type std::logic_error.\n " + "Actual: it throws " ERROR_DESC " " + "with description \"A description\"."); +# endif + + EXPECT_FATAL_FAILURE( + ASSERT_THROW(ThrowNothing(), bool), + "Expected: ThrowNothing() throws an exception of type bool.\n" + " Actual: it throws nothing."); +} + +// Tests ASSERT_NO_THROW. +TEST(AssertionTest, ASSERT_NO_THROW) { + ASSERT_NO_THROW(ThrowNothing()); + EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()), + "Expected: ThrowAnInteger() doesn't throw an exception." + "\n Actual: it throws."); + EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowRuntimeError("A description")), + "Expected: ThrowRuntimeError(\"A description\") " + "doesn't throw an exception.\n " + "Actual: it throws " ERROR_DESC " " + "with description \"A description\"."); +} + +// Tests ASSERT_ANY_THROW. +TEST(AssertionTest, ASSERT_ANY_THROW) { + ASSERT_ANY_THROW(ThrowAnInteger()); + EXPECT_FATAL_FAILURE( + ASSERT_ANY_THROW(ThrowNothing()), + "Expected: ThrowNothing() throws an exception.\n" + " Actual: it doesn't."); +} + +#endif // GTEST_HAS_EXCEPTIONS + +// Makes sure we deal with the precedence of <<. This test should +// compile. +TEST(AssertionTest, AssertPrecedence) { + ASSERT_EQ(1 < 2, true); + bool false_value = false; + ASSERT_EQ(true && false_value, false); +} + +// A subroutine used by the following test. +void TestEq1(int x) { + ASSERT_EQ(1, x); +} + +// Tests calling a test subroutine that's not part of a fixture. +TEST(AssertionTest, NonFixtureSubroutine) { + EXPECT_FATAL_FAILURE(TestEq1(2), + " x\n Which is: 2"); +} + +// An uncopyable class. +class Uncopyable { + public: + explicit Uncopyable(int a_value) : value_(a_value) {} + + int value() const { return value_; } + bool operator==(const Uncopyable& rhs) const { + return value() == rhs.value(); + } + private: + // This constructor deliberately has no implementation, as we don't + // want this class to be copyable. + Uncopyable(const Uncopyable&); // NOLINT + + int value_; +}; + +::std::ostream& operator<<(::std::ostream& os, const Uncopyable& value) { + return os << value.value(); +} + + +bool IsPositiveUncopyable(const Uncopyable& x) { + return x.value() > 0; +} + +// A subroutine used by the following test. +void TestAssertNonPositive() { + Uncopyable y(-1); + ASSERT_PRED1(IsPositiveUncopyable, y); +} +// A subroutine used by the following test. +void TestAssertEqualsUncopyable() { + Uncopyable x(5); + Uncopyable y(-1); + ASSERT_EQ(x, y); +} + +// Tests that uncopyable objects can be used in assertions. +TEST(AssertionTest, AssertWorksWithUncopyableObject) { + Uncopyable x(5); + ASSERT_PRED1(IsPositiveUncopyable, x); + ASSERT_EQ(x, x); + EXPECT_FATAL_FAILURE(TestAssertNonPositive(), + "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1"); + EXPECT_FATAL_FAILURE(TestAssertEqualsUncopyable(), + "Expected equality of these values:\n" + " x\n Which is: 5\n y\n Which is: -1"); +} + +// Tests that uncopyable objects can be used in expects. +TEST(AssertionTest, ExpectWorksWithUncopyableObject) { + Uncopyable x(5); + EXPECT_PRED1(IsPositiveUncopyable, x); + Uncopyable y(-1); + EXPECT_NONFATAL_FAILURE(EXPECT_PRED1(IsPositiveUncopyable, y), + "IsPositiveUncopyable(y) evaluates to false, where\ny evaluates to -1"); + EXPECT_EQ(x, x); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), + "Expected equality of these values:\n" + " x\n Which is: 5\n y\n Which is: -1"); +} + +enum NamedEnum { + kE1 = 0, + kE2 = 1 +}; + +TEST(AssertionTest, NamedEnum) { + EXPECT_EQ(kE1, kE1); + EXPECT_LT(kE1, kE2); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), "Which is: 0"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(kE1, kE2), "Which is: 1"); +} + +// Sun Studio and HP aCC2reject this code. +#if !defined(__SUNPRO_CC) && !defined(__HP_aCC) + +// Tests using assertions with anonymous enums. +enum { + kCaseA = -1, + +# if GTEST_OS_LINUX + + // We want to test the case where the size of the anonymous enum is + // larger than sizeof(int), to make sure our implementation of the + // assertions doesn't truncate the enums. However, MSVC + // (incorrectly) doesn't allow an enum value to exceed the range of + // an int, so this has to be conditionally compiled. + // + // On Linux, kCaseB and kCaseA have the same value when truncated to + // int size. We want to test whether this will confuse the + // assertions. + kCaseB = testing::internal::kMaxBiggestInt, + +# else + + kCaseB = INT_MAX, + +# endif // GTEST_OS_LINUX + + kCaseC = 42 +}; + +TEST(AssertionTest, AnonymousEnum) { +# if GTEST_OS_LINUX + + EXPECT_EQ(static_cast<int>(kCaseA), static_cast<int>(kCaseB)); + +# endif // GTEST_OS_LINUX + + EXPECT_EQ(kCaseA, kCaseA); + EXPECT_NE(kCaseA, kCaseB); + EXPECT_LT(kCaseA, kCaseB); + EXPECT_LE(kCaseA, kCaseB); + EXPECT_GT(kCaseB, kCaseA); + EXPECT_GE(kCaseA, kCaseA); + EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseB), + "(kCaseA) >= (kCaseB)"); + EXPECT_NONFATAL_FAILURE(EXPECT_GE(kCaseA, kCaseC), + "-1 vs 42"); + + ASSERT_EQ(kCaseA, kCaseA); + ASSERT_NE(kCaseA, kCaseB); + ASSERT_LT(kCaseA, kCaseB); + ASSERT_LE(kCaseA, kCaseB); + ASSERT_GT(kCaseB, kCaseA); + ASSERT_GE(kCaseA, kCaseA); + +# ifndef __BORLANDC__ + + // ICE's in C++Builder. + EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseB), + " kCaseB\n Which is: "); + EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC), + "\n Which is: 42"); +# endif + + EXPECT_FATAL_FAILURE(ASSERT_EQ(kCaseA, kCaseC), + "\n Which is: -1"); +} + +#endif // !GTEST_OS_MAC && !defined(__SUNPRO_CC) + +#if GTEST_OS_WINDOWS + +static HRESULT UnexpectedHRESULTFailure() { + return E_UNEXPECTED; +} + +static HRESULT OkHRESULTSuccess() { + return S_OK; +} + +static HRESULT FalseHRESULTSuccess() { + return S_FALSE; +} + +// HRESULT assertion tests test both zero and non-zero +// success codes as well as failure message for each. +// +// Windows CE doesn't support message texts. +TEST(HRESULTAssertionTest, EXPECT_HRESULT_SUCCEEDED) { + EXPECT_HRESULT_SUCCEEDED(S_OK); + EXPECT_HRESULT_SUCCEEDED(S_FALSE); + + EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()), + "Expected: (UnexpectedHRESULTFailure()) succeeds.\n" + " Actual: 0x8000FFFF"); +} + +TEST(HRESULTAssertionTest, ASSERT_HRESULT_SUCCEEDED) { + ASSERT_HRESULT_SUCCEEDED(S_OK); + ASSERT_HRESULT_SUCCEEDED(S_FALSE); + + EXPECT_FATAL_FAILURE(ASSERT_HRESULT_SUCCEEDED(UnexpectedHRESULTFailure()), + "Expected: (UnexpectedHRESULTFailure()) succeeds.\n" + " Actual: 0x8000FFFF"); +} + +TEST(HRESULTAssertionTest, EXPECT_HRESULT_FAILED) { + EXPECT_HRESULT_FAILED(E_UNEXPECTED); + + EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(OkHRESULTSuccess()), + "Expected: (OkHRESULTSuccess()) fails.\n" + " Actual: 0x0"); + EXPECT_NONFATAL_FAILURE(EXPECT_HRESULT_FAILED(FalseHRESULTSuccess()), + "Expected: (FalseHRESULTSuccess()) fails.\n" + " Actual: 0x1"); +} + +TEST(HRESULTAssertionTest, ASSERT_HRESULT_FAILED) { + ASSERT_HRESULT_FAILED(E_UNEXPECTED); + +# ifndef __BORLANDC__ + + // ICE's in C++Builder 2007 and 2009. + EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(OkHRESULTSuccess()), + "Expected: (OkHRESULTSuccess()) fails.\n" + " Actual: 0x0"); +# endif + + EXPECT_FATAL_FAILURE(ASSERT_HRESULT_FAILED(FalseHRESULTSuccess()), + "Expected: (FalseHRESULTSuccess()) fails.\n" + " Actual: 0x1"); +} + +// Tests that streaming to the HRESULT macros works. +TEST(HRESULTAssertionTest, Streaming) { + EXPECT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure"; + ASSERT_HRESULT_SUCCEEDED(S_OK) << "unexpected failure"; + EXPECT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure"; + ASSERT_HRESULT_FAILED(E_UNEXPECTED) << "unexpected failure"; + + EXPECT_NONFATAL_FAILURE( + EXPECT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure", + "expected failure"); + +# ifndef __BORLANDC__ + + // ICE's in C++Builder 2007 and 2009. + EXPECT_FATAL_FAILURE( + ASSERT_HRESULT_SUCCEEDED(E_UNEXPECTED) << "expected failure", + "expected failure"); +# endif + + EXPECT_NONFATAL_FAILURE( + EXPECT_HRESULT_FAILED(S_OK) << "expected failure", + "expected failure"); + + EXPECT_FATAL_FAILURE( + ASSERT_HRESULT_FAILED(S_OK) << "expected failure", + "expected failure"); +} + +#endif // GTEST_OS_WINDOWS + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +// Tests that the assertion macros behave like single statements. +TEST(AssertionSyntaxTest, BasicAssertionsBehavesLikeSingleStatement) { + if (AlwaysFalse()) + ASSERT_TRUE(false) << "This should never be executed; " + "It's a compilation test only."; + + if (AlwaysTrue()) + EXPECT_FALSE(false); + else + ; // NOLINT + + if (AlwaysFalse()) + ASSERT_LT(1, 3); + + if (AlwaysFalse()) + ; // NOLINT + else + EXPECT_GT(3, 2) << ""; +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +#if GTEST_HAS_EXCEPTIONS +// Tests that the compiler will not complain about unreachable code in the +// EXPECT_THROW/EXPECT_ANY_THROW/EXPECT_NO_THROW macros. +TEST(ExpectThrowTest, DoesNotGenerateUnreachableCodeWarning) { + int n = 0; + + EXPECT_THROW(throw 1, int); + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(n++, int), ""); + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(throw 1, const char*), ""); + EXPECT_NO_THROW(n++); + EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(throw 1), ""); + EXPECT_ANY_THROW(throw 1); + EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(n++), ""); +} + +TEST(ExpectThrowTest, DoesNotGenerateDuplicateCatchClauseWarning) { + EXPECT_THROW(throw std::exception(), std::exception); +} + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +TEST(AssertionSyntaxTest, ExceptionAssertionsBehavesLikeSingleStatement) { + if (AlwaysFalse()) + EXPECT_THROW(ThrowNothing(), bool); + + if (AlwaysTrue()) + EXPECT_THROW(ThrowAnInteger(), int); + else + ; // NOLINT + + if (AlwaysFalse()) + EXPECT_NO_THROW(ThrowAnInteger()); + + if (AlwaysTrue()) + EXPECT_NO_THROW(ThrowNothing()); + else + ; // NOLINT + + if (AlwaysFalse()) + EXPECT_ANY_THROW(ThrowNothing()); + + if (AlwaysTrue()) + EXPECT_ANY_THROW(ThrowAnInteger()); + else + ; // NOLINT +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +#endif // GTEST_HAS_EXCEPTIONS + +// The following code intentionally tests a suboptimal syntax. +#ifdef __GNUC__ +#pragma GCC diagnostic push +#pragma GCC diagnostic ignored "-Wdangling-else" +#pragma GCC diagnostic ignored "-Wempty-body" +#pragma GCC diagnostic ignored "-Wpragmas" +#endif +TEST(AssertionSyntaxTest, NoFatalFailureAssertionsBehavesLikeSingleStatement) { + if (AlwaysFalse()) + EXPECT_NO_FATAL_FAILURE(FAIL()) << "This should never be executed. " + << "It's a compilation test only."; + else + ; // NOLINT + + if (AlwaysFalse()) + ASSERT_NO_FATAL_FAILURE(FAIL()) << ""; + else + ; // NOLINT + + if (AlwaysTrue()) + EXPECT_NO_FATAL_FAILURE(SUCCEED()); + else + ; // NOLINT + + if (AlwaysFalse()) + ; // NOLINT + else + ASSERT_NO_FATAL_FAILURE(SUCCEED()); +} +#ifdef __GNUC__ +#pragma GCC diagnostic pop +#endif + +// Tests that the assertion macros work well with switch statements. +TEST(AssertionSyntaxTest, WorksWithSwitch) { + switch (0) { + case 1: + break; + default: + ASSERT_TRUE(true); + } + + switch (0) + case 0: + EXPECT_FALSE(false) << "EXPECT_FALSE failed in switch case"; + + // Binary assertions are implemented using a different code path + // than the Boolean assertions. Hence we test them separately. + switch (0) { + case 1: + default: + ASSERT_EQ(1, 1) << "ASSERT_EQ failed in default switch handler"; + } + + switch (0) + case 0: + EXPECT_NE(1, 2); +} + +#if GTEST_HAS_EXCEPTIONS + +void ThrowAString() { + throw "std::string"; +} + +// Test that the exception assertion macros compile and work with const +// type qualifier. +TEST(AssertionSyntaxTest, WorksWithConst) { + ASSERT_THROW(ThrowAString(), const char*); + + EXPECT_THROW(ThrowAString(), const char*); +} + +#endif // GTEST_HAS_EXCEPTIONS + +} // namespace + +namespace testing { + +// Tests that Google Test tracks SUCCEED*. +TEST(SuccessfulAssertionTest, SUCCEED) { + SUCCEED(); + SUCCEED() << "OK"; + EXPECT_EQ(2, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +// Tests that Google Test doesn't track successful EXPECT_*. +TEST(SuccessfulAssertionTest, EXPECT) { + EXPECT_TRUE(true); + EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +// Tests that Google Test doesn't track successful EXPECT_STR*. +TEST(SuccessfulAssertionTest, EXPECT_STR) { + EXPECT_STREQ("", ""); + EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +// Tests that Google Test doesn't track successful ASSERT_*. +TEST(SuccessfulAssertionTest, ASSERT) { + ASSERT_TRUE(true); + EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +// Tests that Google Test doesn't track successful ASSERT_STR*. +TEST(SuccessfulAssertionTest, ASSERT_STR) { + ASSERT_STREQ("", ""); + EXPECT_EQ(0, GetUnitTestImpl()->current_test_result()->total_part_count()); +} + +} // namespace testing + +namespace { + +// Tests the message streaming variation of assertions. + +TEST(AssertionWithMessageTest, EXPECT) { + EXPECT_EQ(1, 1) << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_NE(1, 1) << "Expected failure #1.", + "Expected failure #1"); + EXPECT_LE(1, 2) << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_LT(1, 0) << "Expected failure #2.", + "Expected failure #2."); + EXPECT_GE(1, 0) << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_GT(1, 2) << "Expected failure #3.", + "Expected failure #3."); + + EXPECT_STREQ("1", "1") << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_STRNE("1", "1") << "Expected failure #4.", + "Expected failure #4."); + EXPECT_STRCASEEQ("a", "A") << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE("a", "A") << "Expected failure #5.", + "Expected failure #5."); + + EXPECT_FLOAT_EQ(1, 1) << "This should succeed."; + EXPECT_NONFATAL_FAILURE(EXPECT_DOUBLE_EQ(1, 1.2) << "Expected failure #6.", + "Expected failure #6."); + EXPECT_NEAR(1, 1.1, 0.2) << "This should succeed."; +} + +TEST(AssertionWithMessageTest, ASSERT) { + ASSERT_EQ(1, 1) << "This should succeed."; + ASSERT_NE(1, 2) << "This should succeed."; + ASSERT_LE(1, 2) << "This should succeed."; + ASSERT_LT(1, 2) << "This should succeed."; + ASSERT_GE(1, 0) << "This should succeed."; + EXPECT_FATAL_FAILURE(ASSERT_GT(1, 2) << "Expected failure.", + "Expected failure."); +} + +TEST(AssertionWithMessageTest, ASSERT_STR) { + ASSERT_STREQ("1", "1") << "This should succeed."; + ASSERT_STRNE("1", "2") << "This should succeed."; + ASSERT_STRCASEEQ("a", "A") << "This should succeed."; + EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("a", "A") << "Expected failure.", + "Expected failure."); +} + +TEST(AssertionWithMessageTest, ASSERT_FLOATING) { + ASSERT_FLOAT_EQ(1, 1) << "This should succeed."; + ASSERT_DOUBLE_EQ(1, 1) << "This should succeed."; + EXPECT_FATAL_FAILURE(ASSERT_NEAR(1, 1.2, 0.1) << "Expect failure.", // NOLINT + "Expect failure."); +} + +// Tests using ASSERT_FALSE with a streamed message. +TEST(AssertionWithMessageTest, ASSERT_FALSE) { + ASSERT_FALSE(false) << "This shouldn't fail."; + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_FALSE(true) << "Expected failure: " << 2 << " > " << 1 + << " evaluates to " << true; + }, "Expected failure"); +} + +// Tests using FAIL with a streamed message. +TEST(AssertionWithMessageTest, FAIL) { + EXPECT_FATAL_FAILURE(FAIL() << 0, + "0"); +} + +// Tests using SUCCEED with a streamed message. +TEST(AssertionWithMessageTest, SUCCEED) { + SUCCEED() << "Success == " << 1; +} + +// Tests using ASSERT_TRUE with a streamed message. +TEST(AssertionWithMessageTest, ASSERT_TRUE) { + ASSERT_TRUE(true) << "This should succeed."; + ASSERT_TRUE(true) << true; + EXPECT_FATAL_FAILURE( + { // NOLINT + ASSERT_TRUE(false) << static_cast<const char*>(nullptr) + << static_cast<char*>(nullptr); + }, + "(null)(null)"); +} + +#if GTEST_OS_WINDOWS +// Tests using wide strings in assertion messages. +TEST(AssertionWithMessageTest, WideStringMessage) { + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_TRUE(false) << L"This failure is expected.\x8119"; + }, "This failure is expected."); + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_EQ(1, 2) << "This failure is " + << L"expected too.\x8120"; + }, "This failure is expected too."); +} +#endif // GTEST_OS_WINDOWS + +// Tests EXPECT_TRUE. +TEST(ExpectTest, EXPECT_TRUE) { + EXPECT_TRUE(true) << "Intentional success"; + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "Intentional failure #1.", + "Intentional failure #1."); + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "Intentional failure #2.", + "Intentional failure #2."); + EXPECT_TRUE(2 > 1); // NOLINT + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 < 1), + "Value of: 2 < 1\n" + " Actual: false\n" + "Expected: true"); + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(2 > 3), + "2 > 3"); +} + +// Tests EXPECT_TRUE(predicate) for predicates returning AssertionResult. +TEST(ExpectTest, ExpectTrueWithAssertionResult) { + EXPECT_TRUE(ResultIsEven(2)); + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEven(3)), + "Value of: ResultIsEven(3)\n" + " Actual: false (3 is odd)\n" + "Expected: true"); + EXPECT_TRUE(ResultIsEvenNoExplanation(2)); + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(ResultIsEvenNoExplanation(3)), + "Value of: ResultIsEvenNoExplanation(3)\n" + " Actual: false (3 is odd)\n" + "Expected: true"); +} + +// Tests EXPECT_FALSE with a streamed message. +TEST(ExpectTest, EXPECT_FALSE) { + EXPECT_FALSE(2 < 1); // NOLINT + EXPECT_FALSE(false) << "Intentional success"; + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "Intentional failure #1.", + "Intentional failure #1."); + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "Intentional failure #2.", + "Intentional failure #2."); + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 > 1), + "Value of: 2 > 1\n" + " Actual: true\n" + "Expected: false"); + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(2 < 3), + "2 < 3"); +} + +// Tests EXPECT_FALSE(predicate) for predicates returning AssertionResult. +TEST(ExpectTest, ExpectFalseWithAssertionResult) { + EXPECT_FALSE(ResultIsEven(3)); + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEven(2)), + "Value of: ResultIsEven(2)\n" + " Actual: true (2 is even)\n" + "Expected: false"); + EXPECT_FALSE(ResultIsEvenNoExplanation(3)); + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(ResultIsEvenNoExplanation(2)), + "Value of: ResultIsEvenNoExplanation(2)\n" + " Actual: true\n" + "Expected: false"); +} + +#ifdef __BORLANDC__ +// Restores warnings after previous "#pragma option push" suppressed them +# pragma option pop +#endif + +// Tests EXPECT_EQ. +TEST(ExpectTest, EXPECT_EQ) { + EXPECT_EQ(5, 2 + 3); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2*3), + "Expected equality of these values:\n" + " 5\n" + " 2*3\n" + " Which is: 6"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5, 2 - 3), + "2 - 3"); +} + +// Tests using EXPECT_EQ on double values. The purpose is to make +// sure that the specialization we did for integer and anonymous enums +// isn't used for double arguments. +TEST(ExpectTest, EXPECT_EQ_Double) { + // A success. + EXPECT_EQ(5.6, 5.6); + + // A failure. + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(5.1, 5.2), + "5.1"); +} + +// Tests EXPECT_EQ(NULL, pointer). +TEST(ExpectTest, EXPECT_EQ_NULL) { + // A success. + const char* p = nullptr; + EXPECT_EQ(nullptr, p); + + // A failure. + int n = 0; + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(nullptr, &n), " &n\n Which is:"); +} + +// Tests EXPECT_EQ(0, non_pointer). Since the literal 0 can be +// treated as a null pointer by the compiler, we need to make sure +// that EXPECT_EQ(0, non_pointer) isn't interpreted by Google Test as +// EXPECT_EQ(static_cast<void*>(NULL), non_pointer). +TEST(ExpectTest, EXPECT_EQ_0) { + int n = 0; + + // A success. + EXPECT_EQ(0, n); + + // A failure. + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(0, 5.6), + " 0\n 5.6"); +} + +// Tests EXPECT_NE. +TEST(ExpectTest, EXPECT_NE) { + EXPECT_NE(6, 7); + + EXPECT_NONFATAL_FAILURE(EXPECT_NE('a', 'a'), + "Expected: ('a') != ('a'), " + "actual: 'a' (97, 0x61) vs 'a' (97, 0x61)"); + EXPECT_NONFATAL_FAILURE(EXPECT_NE(2, 2), + "2"); + char* const p0 = nullptr; + EXPECT_NONFATAL_FAILURE(EXPECT_NE(p0, p0), + "p0"); + // Only way to get the Nokia compiler to compile the cast + // is to have a separate void* variable first. Putting + // the two casts on the same line doesn't work, neither does + // a direct C-style to char*. + void* pv1 = (void*)0x1234; // NOLINT + char* const p1 = reinterpret_cast<char*>(pv1); + EXPECT_NONFATAL_FAILURE(EXPECT_NE(p1, p1), + "p1"); +} + +// Tests EXPECT_LE. +TEST(ExpectTest, EXPECT_LE) { + EXPECT_LE(2, 3); + EXPECT_LE(2, 2); + EXPECT_NONFATAL_FAILURE(EXPECT_LE(2, 0), + "Expected: (2) <= (0), actual: 2 vs 0"); + EXPECT_NONFATAL_FAILURE(EXPECT_LE(1.1, 0.9), + "(1.1) <= (0.9)"); +} + +// Tests EXPECT_LT. +TEST(ExpectTest, EXPECT_LT) { + EXPECT_LT(2, 3); + EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 2), + "Expected: (2) < (2), actual: 2 vs 2"); + EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1), + "(2) < (1)"); +} + +// Tests EXPECT_GE. +TEST(ExpectTest, EXPECT_GE) { + EXPECT_GE(2, 1); + EXPECT_GE(2, 2); + EXPECT_NONFATAL_FAILURE(EXPECT_GE(2, 3), + "Expected: (2) >= (3), actual: 2 vs 3"); + EXPECT_NONFATAL_FAILURE(EXPECT_GE(0.9, 1.1), + "(0.9) >= (1.1)"); +} + +// Tests EXPECT_GT. +TEST(ExpectTest, EXPECT_GT) { + EXPECT_GT(2, 1); + EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 2), + "Expected: (2) > (2), actual: 2 vs 2"); + EXPECT_NONFATAL_FAILURE(EXPECT_GT(2, 3), + "(2) > (3)"); +} + +#if GTEST_HAS_EXCEPTIONS + +// Tests EXPECT_THROW. +TEST(ExpectTest, EXPECT_THROW) { + EXPECT_THROW(ThrowAnInteger(), int); + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool), + "Expected: ThrowAnInteger() throws an exception of " + "type bool.\n Actual: it throws a different type."); + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowRuntimeError("A description"), + std::logic_error), + "Expected: ThrowRuntimeError(\"A description\") " + "throws an exception of type std::logic_error.\n " + "Actual: it throws " ERROR_DESC " " + "with description \"A description\"."); + EXPECT_NONFATAL_FAILURE( + EXPECT_THROW(ThrowNothing(), bool), + "Expected: ThrowNothing() throws an exception of type bool.\n" + " Actual: it throws nothing."); +} + +// Tests EXPECT_NO_THROW. +TEST(ExpectTest, EXPECT_NO_THROW) { + EXPECT_NO_THROW(ThrowNothing()); + EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()), + "Expected: ThrowAnInteger() doesn't throw an " + "exception.\n Actual: it throws."); + EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowRuntimeError("A description")), + "Expected: ThrowRuntimeError(\"A description\") " + "doesn't throw an exception.\n " + "Actual: it throws " ERROR_DESC " " + "with description \"A description\"."); +} + +// Tests EXPECT_ANY_THROW. +TEST(ExpectTest, EXPECT_ANY_THROW) { + EXPECT_ANY_THROW(ThrowAnInteger()); + EXPECT_NONFATAL_FAILURE( + EXPECT_ANY_THROW(ThrowNothing()), + "Expected: ThrowNothing() throws an exception.\n" + " Actual: it doesn't."); +} + +#endif // GTEST_HAS_EXCEPTIONS + +// Make sure we deal with the precedence of <<. +TEST(ExpectTest, ExpectPrecedence) { + EXPECT_EQ(1 < 2, true); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(true, true && false), + " true && false\n Which is: false"); +} + + +// Tests the StreamableToString() function. + +// Tests using StreamableToString() on a scalar. +TEST(StreamableToStringTest, Scalar) { + EXPECT_STREQ("5", StreamableToString(5).c_str()); +} + +// Tests using StreamableToString() on a non-char pointer. +TEST(StreamableToStringTest, Pointer) { + int n = 0; + int* p = &n; + EXPECT_STRNE("(null)", StreamableToString(p).c_str()); +} + +// Tests using StreamableToString() on a NULL non-char pointer. +TEST(StreamableToStringTest, NullPointer) { + int* p = nullptr; + EXPECT_STREQ("(null)", StreamableToString(p).c_str()); +} + +// Tests using StreamableToString() on a C string. +TEST(StreamableToStringTest, CString) { + EXPECT_STREQ("Foo", StreamableToString("Foo").c_str()); +} + +// Tests using StreamableToString() on a NULL C string. +TEST(StreamableToStringTest, NullCString) { + char* p = nullptr; + EXPECT_STREQ("(null)", StreamableToString(p).c_str()); +} + +// Tests using streamable values as assertion messages. + +// Tests using std::string as an assertion message. +TEST(StreamableTest, string) { + static const std::string str( + "This failure message is a std::string, and is expected."); + EXPECT_FATAL_FAILURE(FAIL() << str, + str.c_str()); +} + +// Tests that we can output strings containing embedded NULs. +// Limited to Linux because we can only do this with std::string's. +TEST(StreamableTest, stringWithEmbeddedNUL) { + static const char char_array_with_nul[] = + "Here's a NUL\0 and some more string"; + static const std::string string_with_nul(char_array_with_nul, + sizeof(char_array_with_nul) + - 1); // drops the trailing NUL + EXPECT_FATAL_FAILURE(FAIL() << string_with_nul, + "Here's a NUL\\0 and some more string"); +} + +// Tests that we can output a NUL char. +TEST(StreamableTest, NULChar) { + EXPECT_FATAL_FAILURE({ // NOLINT + FAIL() << "A NUL" << '\0' << " and some more string"; + }, "A NUL\\0 and some more string"); +} + +// Tests using int as an assertion message. +TEST(StreamableTest, int) { + EXPECT_FATAL_FAILURE(FAIL() << 900913, + "900913"); +} + +// Tests using NULL char pointer as an assertion message. +// +// In MSVC, streaming a NULL char * causes access violation. Google Test +// implemented a workaround (substituting "(null)" for NULL). This +// tests whether the workaround works. +TEST(StreamableTest, NullCharPtr) { + EXPECT_FATAL_FAILURE(FAIL() << static_cast<const char*>(nullptr), "(null)"); +} + +// Tests that basic IO manipulators (endl, ends, and flush) can be +// streamed to testing::Message. +TEST(StreamableTest, BasicIoManip) { + EXPECT_FATAL_FAILURE({ // NOLINT + FAIL() << "Line 1." << std::endl + << "A NUL char " << std::ends << std::flush << " in line 2."; + }, "Line 1.\nA NUL char \\0 in line 2."); +} + +// Tests the macros that haven't been covered so far. + +void AddFailureHelper(bool* aborted) { + *aborted = true; + ADD_FAILURE() << "Intentional failure."; + *aborted = false; +} + +// Tests ADD_FAILURE. +TEST(MacroTest, ADD_FAILURE) { + bool aborted = true; + EXPECT_NONFATAL_FAILURE(AddFailureHelper(&aborted), + "Intentional failure."); + EXPECT_FALSE(aborted); +} + +// Tests ADD_FAILURE_AT. +TEST(MacroTest, ADD_FAILURE_AT) { + // Verifies that ADD_FAILURE_AT does generate a nonfatal failure and + // the failure message contains the user-streamed part. + EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT("foo.cc", 42) << "Wrong!", "Wrong!"); + + // Verifies that the user-streamed part is optional. + EXPECT_NONFATAL_FAILURE(ADD_FAILURE_AT("foo.cc", 42), "Failed"); + + // Unfortunately, we cannot verify that the failure message contains + // the right file path and line number the same way, as + // EXPECT_NONFATAL_FAILURE() doesn't get to see the file path and + // line number. Instead, we do that in googletest-output-test_.cc. +} + +// Tests FAIL. +TEST(MacroTest, FAIL) { + EXPECT_FATAL_FAILURE(FAIL(), + "Failed"); + EXPECT_FATAL_FAILURE(FAIL() << "Intentional failure.", + "Intentional failure."); +} + +// Tests GTEST_FAIL_AT. +TEST(MacroTest, GTEST_FAIL_AT) { + // Verifies that GTEST_FAIL_AT does generate a fatal failure and + // the failure message contains the user-streamed part. + EXPECT_FATAL_FAILURE(GTEST_FAIL_AT("foo.cc", 42) << "Wrong!", "Wrong!"); + + // Verifies that the user-streamed part is optional. + EXPECT_FATAL_FAILURE(GTEST_FAIL_AT("foo.cc", 42), "Failed"); + + // See the ADD_FAIL_AT test above to see how we test that the failure message + // contains the right filename and line number -- the same applies here. +} + +// Tests SUCCEED +TEST(MacroTest, SUCCEED) { + SUCCEED(); + SUCCEED() << "Explicit success."; +} + +// Tests for EXPECT_EQ() and ASSERT_EQ(). +// +// These tests fail *intentionally*, s.t. the failure messages can be +// generated and tested. +// +// We have different tests for different argument types. + +// Tests using bool values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, Bool) { + EXPECT_EQ(true, true); + EXPECT_FATAL_FAILURE({ + bool false_value = false; + ASSERT_EQ(false_value, true); + }, " false_value\n Which is: false\n true"); +} + +// Tests using int values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, Int) { + ASSERT_EQ(32, 32); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(32, 33), + " 32\n 33"); +} + +// Tests using time_t values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, Time_T) { + EXPECT_EQ(static_cast<time_t>(0), + static_cast<time_t>(0)); + EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<time_t>(0), + static_cast<time_t>(1234)), + "1234"); +} + +// Tests using char values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, Char) { + ASSERT_EQ('z', 'z'); + const char ch = 'b'; + EXPECT_NONFATAL_FAILURE(EXPECT_EQ('\0', ch), + " ch\n Which is: 'b'"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ('a', ch), + " ch\n Which is: 'b'"); +} + +// Tests using wchar_t values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, WideChar) { + EXPECT_EQ(L'b', L'b'); + + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'\0', L'x'), + "Expected equality of these values:\n" + " L'\0'\n" + " Which is: L'\0' (0, 0x0)\n" + " L'x'\n" + " Which is: L'x' (120, 0x78)"); + + static wchar_t wchar; + wchar = L'b'; + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(L'a', wchar), + "wchar"); + wchar = 0x8119; + EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<wchar_t>(0x8120), wchar), + " wchar\n Which is: L'"); +} + +// Tests using ::std::string values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, StdString) { + // Compares a const char* to an std::string that has identical + // content. + ASSERT_EQ("Test", ::std::string("Test")); + + // Compares two identical std::strings. + static const ::std::string str1("A * in the middle"); + static const ::std::string str2(str1); + EXPECT_EQ(str1, str2); + + // Compares a const char* to an std::string that has different + // content + EXPECT_NONFATAL_FAILURE(EXPECT_EQ("Test", ::std::string("test")), + "\"test\""); + + // Compares an std::string to a char* that has different content. + char* const p1 = const_cast<char*>("foo"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(::std::string("bar"), p1), + "p1"); + + // Compares two std::strings that have different contents, one of + // which having a NUL character in the middle. This should fail. + static ::std::string str3(str1); + str3.at(2) = '\0'; + EXPECT_FATAL_FAILURE(ASSERT_EQ(str1, str3), + " str3\n Which is: \"A \\0 in the middle\""); +} + +#if GTEST_HAS_STD_WSTRING + +// Tests using ::std::wstring values in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, StdWideString) { + // Compares two identical std::wstrings. + const ::std::wstring wstr1(L"A * in the middle"); + const ::std::wstring wstr2(wstr1); + ASSERT_EQ(wstr1, wstr2); + + // Compares an std::wstring to a const wchar_t* that has identical + // content. + const wchar_t kTestX8119[] = { 'T', 'e', 's', 't', 0x8119, '\0' }; + EXPECT_EQ(::std::wstring(kTestX8119), kTestX8119); + + // Compares an std::wstring to a const wchar_t* that has different + // content. + const wchar_t kTestX8120[] = { 'T', 'e', 's', 't', 0x8120, '\0' }; + EXPECT_NONFATAL_FAILURE({ // NOLINT + EXPECT_EQ(::std::wstring(kTestX8119), kTestX8120); + }, "kTestX8120"); + + // Compares two std::wstrings that have different contents, one of + // which having a NUL character in the middle. + ::std::wstring wstr3(wstr1); + wstr3.at(2) = L'\0'; + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(wstr1, wstr3), + "wstr3"); + + // Compares a wchar_t* to an std::wstring that has different + // content. + EXPECT_FATAL_FAILURE({ // NOLINT + ASSERT_EQ(const_cast<wchar_t*>(L"foo"), ::std::wstring(L"bar")); + }, ""); +} + +#endif // GTEST_HAS_STD_WSTRING + +// Tests using char pointers in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, CharPointer) { + char* const p0 = nullptr; + // Only way to get the Nokia compiler to compile the cast + // is to have a separate void* variable first. Putting + // the two casts on the same line doesn't work, neither does + // a direct C-style to char*. + void* pv1 = (void*)0x1234; // NOLINT + void* pv2 = (void*)0xABC0; // NOLINT + char* const p1 = reinterpret_cast<char*>(pv1); + char* const p2 = reinterpret_cast<char*>(pv2); + ASSERT_EQ(p1, p1); + + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2), + " p2\n Which is:"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2), + " p2\n Which is:"); + EXPECT_FATAL_FAILURE(ASSERT_EQ(reinterpret_cast<char*>(0x1234), + reinterpret_cast<char*>(0xABC0)), + "ABC0"); +} + +// Tests using wchar_t pointers in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, WideCharPointer) { + wchar_t* const p0 = nullptr; + // Only way to get the Nokia compiler to compile the cast + // is to have a separate void* variable first. Putting + // the two casts on the same line doesn't work, neither does + // a direct C-style to char*. + void* pv1 = (void*)0x1234; // NOLINT + void* pv2 = (void*)0xABC0; // NOLINT + wchar_t* const p1 = reinterpret_cast<wchar_t*>(pv1); + wchar_t* const p2 = reinterpret_cast<wchar_t*>(pv2); + EXPECT_EQ(p0, p0); + + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p0, p2), + " p2\n Which is:"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p1, p2), + " p2\n Which is:"); + void* pv3 = (void*)0x1234; // NOLINT + void* pv4 = (void*)0xABC0; // NOLINT + const wchar_t* p3 = reinterpret_cast<const wchar_t*>(pv3); + const wchar_t* p4 = reinterpret_cast<const wchar_t*>(pv4); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(p3, p4), + "p4"); +} + +// Tests using other types of pointers in {EXPECT|ASSERT}_EQ. +TEST(EqAssertionTest, OtherPointer) { + ASSERT_EQ(static_cast<const int*>(nullptr), static_cast<const int*>(nullptr)); + EXPECT_FATAL_FAILURE(ASSERT_EQ(static_cast<const int*>(nullptr), + reinterpret_cast<const int*>(0x1234)), + "0x1234"); +} + +// A class that supports binary comparison operators but not streaming. +class UnprintableChar { + public: + explicit UnprintableChar(char ch) : char_(ch) {} + + bool operator==(const UnprintableChar& rhs) const { + return char_ == rhs.char_; + } + bool operator!=(const UnprintableChar& rhs) const { + return char_ != rhs.char_; + } + bool operator<(const UnprintableChar& rhs) const { + return char_ < rhs.char_; + } + bool operator<=(const UnprintableChar& rhs) const { + return char_ <= rhs.char_; + } + bool operator>(const UnprintableChar& rhs) const { + return char_ > rhs.char_; + } + bool operator>=(const UnprintableChar& rhs) const { + return char_ >= rhs.char_; + } + + private: + char char_; +}; + +// Tests that ASSERT_EQ() and friends don't require the arguments to +// be printable. +TEST(ComparisonAssertionTest, AcceptsUnprintableArgs) { + const UnprintableChar x('x'), y('y'); + ASSERT_EQ(x, x); + EXPECT_NE(x, y); + ASSERT_LT(x, y); + EXPECT_LE(x, y); + ASSERT_GT(y, x); + EXPECT_GE(x, x); + + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), "1-byte object <78>"); + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(x, y), "1-byte object <79>"); + EXPECT_NONFATAL_FAILURE(EXPECT_LT(y, y), "1-byte object <79>"); + EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), "1-byte object <78>"); + EXPECT_NONFATAL_FAILURE(EXPECT_GT(x, y), "1-byte object <79>"); + + // Code tested by EXPECT_FATAL_FAILURE cannot reference local + // variables, so we have to write UnprintableChar('x') instead of x. +#ifndef __BORLANDC__ + // ICE's in C++Builder. + EXPECT_FATAL_FAILURE(ASSERT_NE(UnprintableChar('x'), UnprintableChar('x')), + "1-byte object <78>"); + EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')), + "1-byte object <78>"); +#endif + EXPECT_FATAL_FAILURE(ASSERT_LE(UnprintableChar('y'), UnprintableChar('x')), + "1-byte object <79>"); + EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')), + "1-byte object <78>"); + EXPECT_FATAL_FAILURE(ASSERT_GE(UnprintableChar('x'), UnprintableChar('y')), + "1-byte object <79>"); +} + +// Tests the FRIEND_TEST macro. + +// This class has a private member we want to test. We will test it +// both in a TEST and in a TEST_F. +class Foo { + public: + Foo() {} + + private: + int Bar() const { return 1; } + + // Declares the friend tests that can access the private member + // Bar(). + FRIEND_TEST(FRIEND_TEST_Test, TEST); + FRIEND_TEST(FRIEND_TEST_Test2, TEST_F); +}; + +// Tests that the FRIEND_TEST declaration allows a TEST to access a +// class's private members. This should compile. +TEST(FRIEND_TEST_Test, TEST) { + ASSERT_EQ(1, Foo().Bar()); +} + +// The fixture needed to test using FRIEND_TEST with TEST_F. +class FRIEND_TEST_Test2 : public Test { + protected: + Foo foo; +}; + +// Tests that the FRIEND_TEST declaration allows a TEST_F to access a +// class's private members. This should compile. +TEST_F(FRIEND_TEST_Test2, TEST_F) { + ASSERT_EQ(1, foo.Bar()); +} + +// Tests the life cycle of Test objects. + +// The test fixture for testing the life cycle of Test objects. +// +// This class counts the number of live test objects that uses this +// fixture. +class TestLifeCycleTest : public Test { + protected: + // Constructor. Increments the number of test objects that uses + // this fixture. + TestLifeCycleTest() { count_++; } + + // Destructor. Decrements the number of test objects that uses this + // fixture. + ~TestLifeCycleTest() override { count_--; } + + // Returns the number of live test objects that uses this fixture. + int count() const { return count_; } + + private: + static int count_; +}; + +int TestLifeCycleTest::count_ = 0; + +// Tests the life cycle of test objects. +TEST_F(TestLifeCycleTest, Test1) { + // There should be only one test object in this test case that's + // currently alive. + ASSERT_EQ(1, count()); +} + +// Tests the life cycle of test objects. +TEST_F(TestLifeCycleTest, Test2) { + // After Test1 is done and Test2 is started, there should still be + // only one live test object, as the object for Test1 should've been + // deleted. + ASSERT_EQ(1, count()); +} + +} // namespace + +// Tests that the copy constructor works when it is NOT optimized away by +// the compiler. +TEST(AssertionResultTest, CopyConstructorWorksWhenNotOptimied) { + // Checks that the copy constructor doesn't try to dereference NULL pointers + // in the source object. + AssertionResult r1 = AssertionSuccess(); + AssertionResult r2 = r1; + // The following line is added to prevent the compiler from optimizing + // away the constructor call. + r1 << "abc"; + + AssertionResult r3 = r1; + EXPECT_EQ(static_cast<bool>(r3), static_cast<bool>(r1)); + EXPECT_STREQ("abc", r1.message()); +} + +// Tests that AssertionSuccess and AssertionFailure construct +// AssertionResult objects as expected. +TEST(AssertionResultTest, ConstructionWorks) { + AssertionResult r1 = AssertionSuccess(); + EXPECT_TRUE(r1); + EXPECT_STREQ("", r1.message()); + + AssertionResult r2 = AssertionSuccess() << "abc"; + EXPECT_TRUE(r2); + EXPECT_STREQ("abc", r2.message()); + + AssertionResult r3 = AssertionFailure(); + EXPECT_FALSE(r3); + EXPECT_STREQ("", r3.message()); + + AssertionResult r4 = AssertionFailure() << "def"; + EXPECT_FALSE(r4); + EXPECT_STREQ("def", r4.message()); + + AssertionResult r5 = AssertionFailure(Message() << "ghi"); + EXPECT_FALSE(r5); + EXPECT_STREQ("ghi", r5.message()); +} + +// Tests that the negation flips the predicate result but keeps the message. +TEST(AssertionResultTest, NegationWorks) { + AssertionResult r1 = AssertionSuccess() << "abc"; + EXPECT_FALSE(!r1); + EXPECT_STREQ("abc", (!r1).message()); + + AssertionResult r2 = AssertionFailure() << "def"; + EXPECT_TRUE(!r2); + EXPECT_STREQ("def", (!r2).message()); +} + +TEST(AssertionResultTest, StreamingWorks) { + AssertionResult r = AssertionSuccess(); + r << "abc" << 'd' << 0 << true; + EXPECT_STREQ("abcd0true", r.message()); +} + +TEST(AssertionResultTest, CanStreamOstreamManipulators) { + AssertionResult r = AssertionSuccess(); + r << "Data" << std::endl << std::flush << std::ends << "Will be visible"; + EXPECT_STREQ("Data\n\\0Will be visible", r.message()); +} + +// The next test uses explicit conversion operators + +TEST(AssertionResultTest, ConstructibleFromContextuallyConvertibleToBool) { + struct ExplicitlyConvertibleToBool { + explicit operator bool() const { return value; } + bool value; + }; + ExplicitlyConvertibleToBool v1 = {false}; + ExplicitlyConvertibleToBool v2 = {true}; + EXPECT_FALSE(v1); + EXPECT_TRUE(v2); +} + +struct ConvertibleToAssertionResult { + operator AssertionResult() const { return AssertionResult(true); } +}; + +TEST(AssertionResultTest, ConstructibleFromImplicitlyConvertible) { + ConvertibleToAssertionResult obj; + EXPECT_TRUE(obj); +} + +// Tests streaming a user type whose definition and operator << are +// both in the global namespace. +class Base { + public: + explicit Base(int an_x) : x_(an_x) {} + int x() const { return x_; } + private: + int x_; +}; +std::ostream& operator<<(std::ostream& os, + const Base& val) { + return os << val.x(); +} +std::ostream& operator<<(std::ostream& os, + const Base* pointer) { + return os << "(" << pointer->x() << ")"; +} + +TEST(MessageTest, CanStreamUserTypeInGlobalNameSpace) { + Message msg; + Base a(1); + + msg << a << &a; // Uses ::operator<<. + EXPECT_STREQ("1(1)", msg.GetString().c_str()); +} + +// Tests streaming a user type whose definition and operator<< are +// both in an unnamed namespace. +namespace { +class MyTypeInUnnamedNameSpace : public Base { + public: + explicit MyTypeInUnnamedNameSpace(int an_x): Base(an_x) {} +}; +std::ostream& operator<<(std::ostream& os, + const MyTypeInUnnamedNameSpace& val) { + return os << val.x(); +} +std::ostream& operator<<(std::ostream& os, + const MyTypeInUnnamedNameSpace* pointer) { + return os << "(" << pointer->x() << ")"; +} +} // namespace + +TEST(MessageTest, CanStreamUserTypeInUnnamedNameSpace) { + Message msg; + MyTypeInUnnamedNameSpace a(1); + + msg << a << &a; // Uses <unnamed_namespace>::operator<<. + EXPECT_STREQ("1(1)", msg.GetString().c_str()); +} + +// Tests streaming a user type whose definition and operator<< are +// both in a user namespace. +namespace namespace1 { +class MyTypeInNameSpace1 : public Base { + public: + explicit MyTypeInNameSpace1(int an_x): Base(an_x) {} +}; +std::ostream& operator<<(std::ostream& os, + const MyTypeInNameSpace1& val) { + return os << val.x(); +} +std::ostream& operator<<(std::ostream& os, + const MyTypeInNameSpace1* pointer) { + return os << "(" << pointer->x() << ")"; +} +} // namespace namespace1 + +TEST(MessageTest, CanStreamUserTypeInUserNameSpace) { + Message msg; + namespace1::MyTypeInNameSpace1 a(1); + + msg << a << &a; // Uses namespace1::operator<<. + EXPECT_STREQ("1(1)", msg.GetString().c_str()); +} + +// Tests streaming a user type whose definition is in a user namespace +// but whose operator<< is in the global namespace. +namespace namespace2 { +class MyTypeInNameSpace2 : public ::Base { + public: + explicit MyTypeInNameSpace2(int an_x): Base(an_x) {} +}; +} // namespace namespace2 +std::ostream& operator<<(std::ostream& os, + const namespace2::MyTypeInNameSpace2& val) { + return os << val.x(); +} +std::ostream& operator<<(std::ostream& os, + const namespace2::MyTypeInNameSpace2* pointer) { + return os << "(" << pointer->x() << ")"; +} + +TEST(MessageTest, CanStreamUserTypeInUserNameSpaceWithStreamOperatorInGlobal) { + Message msg; + namespace2::MyTypeInNameSpace2 a(1); + + msg << a << &a; // Uses ::operator<<. + EXPECT_STREQ("1(1)", msg.GetString().c_str()); +} + +// Tests streaming NULL pointers to testing::Message. +TEST(MessageTest, NullPointers) { + Message msg; + char* const p1 = nullptr; + unsigned char* const p2 = nullptr; + int* p3 = nullptr; + double* p4 = nullptr; + bool* p5 = nullptr; + Message* p6 = nullptr; + + msg << p1 << p2 << p3 << p4 << p5 << p6; + ASSERT_STREQ("(null)(null)(null)(null)(null)(null)", + msg.GetString().c_str()); +} + +// Tests streaming wide strings to testing::Message. +TEST(MessageTest, WideStrings) { + // Streams a NULL of type const wchar_t*. + const wchar_t* const_wstr = nullptr; + EXPECT_STREQ("(null)", + (Message() << const_wstr).GetString().c_str()); + + // Streams a NULL of type wchar_t*. + wchar_t* wstr = nullptr; + EXPECT_STREQ("(null)", + (Message() << wstr).GetString().c_str()); + + // Streams a non-NULL of type const wchar_t*. + const_wstr = L"abc\x8119"; + EXPECT_STREQ("abc\xe8\x84\x99", + (Message() << const_wstr).GetString().c_str()); + + // Streams a non-NULL of type wchar_t*. + wstr = const_cast<wchar_t*>(const_wstr); + EXPECT_STREQ("abc\xe8\x84\x99", + (Message() << wstr).GetString().c_str()); +} + + +// This line tests that we can define tests in the testing namespace. +namespace testing { + +// Tests the TestInfo class. + +class TestInfoTest : public Test { + protected: + static const TestInfo* GetTestInfo(const char* test_name) { + const TestSuite* const test_suite = + GetUnitTestImpl()->GetTestSuite("TestInfoTest", "", nullptr, nullptr); + + for (int i = 0; i < test_suite->total_test_count(); ++i) { + const TestInfo* const test_info = test_suite->GetTestInfo(i); + if (strcmp(test_name, test_info->name()) == 0) + return test_info; + } + return nullptr; + } + + static const TestResult* GetTestResult( + const TestInfo* test_info) { + return test_info->result(); + } +}; + +// Tests TestInfo::test_case_name() and TestInfo::name(). +TEST_F(TestInfoTest, Names) { + const TestInfo* const test_info = GetTestInfo("Names"); + + ASSERT_STREQ("TestInfoTest", test_info->test_suite_name()); + ASSERT_STREQ("Names", test_info->name()); +} + +// Tests TestInfo::result(). +TEST_F(TestInfoTest, result) { + const TestInfo* const test_info = GetTestInfo("result"); + + // Initially, there is no TestPartResult for this test. + ASSERT_EQ(0, GetTestResult(test_info)->total_part_count()); + + // After the previous assertion, there is still none. + ASSERT_EQ(0, GetTestResult(test_info)->total_part_count()); +} + +#define VERIFY_CODE_LOCATION \ + const int expected_line = __LINE__ - 1; \ + const TestInfo* const test_info = GetUnitTestImpl()->current_test_info(); \ + ASSERT_TRUE(test_info); \ + EXPECT_STREQ(__FILE__, test_info->file()); \ + EXPECT_EQ(expected_line, test_info->line()) + +TEST(CodeLocationForTEST, Verify) { + VERIFY_CODE_LOCATION; +} + +class CodeLocationForTESTF : public Test { +}; + +TEST_F(CodeLocationForTESTF, Verify) { + VERIFY_CODE_LOCATION; +} + +class CodeLocationForTESTP : public TestWithParam<int> { +}; + +TEST_P(CodeLocationForTESTP, Verify) { + VERIFY_CODE_LOCATION; +} + +INSTANTIATE_TEST_SUITE_P(, CodeLocationForTESTP, Values(0)); + +template <typename T> +class CodeLocationForTYPEDTEST : public Test { +}; + +TYPED_TEST_SUITE(CodeLocationForTYPEDTEST, int); + +TYPED_TEST(CodeLocationForTYPEDTEST, Verify) { + VERIFY_CODE_LOCATION; +} + +template <typename T> +class CodeLocationForTYPEDTESTP : public Test { +}; + +TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP); + +TYPED_TEST_P(CodeLocationForTYPEDTESTP, Verify) { + VERIFY_CODE_LOCATION; +} + +REGISTER_TYPED_TEST_SUITE_P(CodeLocationForTYPEDTESTP, Verify); + +INSTANTIATE_TYPED_TEST_SUITE_P(My, CodeLocationForTYPEDTESTP, int); + +#undef VERIFY_CODE_LOCATION + +// Tests setting up and tearing down a test case. +// Legacy API is deprecated but still available +#ifndef GTEST_REMOVE_LEGACY_TEST_CASEAPI_ +class SetUpTestCaseTest : public Test { + protected: + // This will be called once before the first test in this test case + // is run. + static void SetUpTestCase() { + printf("Setting up the test case . . .\n"); + + // Initializes some shared resource. In this simple example, we + // just create a C string. More complex stuff can be done if + // desired. + shared_resource_ = "123"; + + // Increments the number of test cases that have been set up. + counter_++; + + // SetUpTestCase() should be called only once. + EXPECT_EQ(1, counter_); + } + + // This will be called once after the last test in this test case is + // run. + static void TearDownTestCase() { + printf("Tearing down the test case . . .\n"); + + // Decrements the number of test cases that have been set up. + counter_--; + + // TearDownTestCase() should be called only once. + EXPECT_EQ(0, counter_); + + // Cleans up the shared resource. + shared_resource_ = nullptr; + } + + // This will be called before each test in this test case. + void SetUp() override { + // SetUpTestCase() should be called only once, so counter_ should + // always be 1. + EXPECT_EQ(1, counter_); + } + + // Number of test cases that have been set up. + static int counter_; + + // Some resource to be shared by all tests in this test case. + static const char* shared_resource_; +}; + +int SetUpTestCaseTest::counter_ = 0; +const char* SetUpTestCaseTest::shared_resource_ = nullptr; + +// A test that uses the shared resource. +TEST_F(SetUpTestCaseTest, Test1) { EXPECT_STRNE(nullptr, shared_resource_); } + +// Another test that uses the shared resource. +TEST_F(SetUpTestCaseTest, Test2) { + EXPECT_STREQ("123", shared_resource_); +} +#endif // GTEST_REMOVE_LEGACY_TEST_CASEAPI_ + +// Tests SetupTestSuite/TearDown TestSuite +class SetUpTestSuiteTest : public Test { + protected: + // This will be called once before the first test in this test case + // is run. + static void SetUpTestSuite() { + printf("Setting up the test suite . . .\n"); + + // Initializes some shared resource. In this simple example, we + // just create a C string. More complex stuff can be done if + // desired. + shared_resource_ = "123"; + + // Increments the number of test cases that have been set up. + counter_++; + + // SetUpTestSuite() should be called only once. + EXPECT_EQ(1, counter_); + } + + // This will be called once after the last test in this test case is + // run. + static void TearDownTestSuite() { + printf("Tearing down the test suite . . .\n"); + + // Decrements the number of test suites that have been set up. + counter_--; + + // TearDownTestSuite() should be called only once. + EXPECT_EQ(0, counter_); + + // Cleans up the shared resource. + shared_resource_ = nullptr; + } + + // This will be called before each test in this test case. + void SetUp() override { + // SetUpTestSuite() should be called only once, so counter_ should + // always be 1. + EXPECT_EQ(1, counter_); + } + + // Number of test suites that have been set up. + static int counter_; + + // Some resource to be shared by all tests in this test case. + static const char* shared_resource_; +}; + +int SetUpTestSuiteTest::counter_ = 0; +const char* SetUpTestSuiteTest::shared_resource_ = nullptr; + +// A test that uses the shared resource. +TEST_F(SetUpTestSuiteTest, TestSetupTestSuite1) { + EXPECT_STRNE(nullptr, shared_resource_); +} + +// Another test that uses the shared resource. +TEST_F(SetUpTestSuiteTest, TestSetupTestSuite2) { + EXPECT_STREQ("123", shared_resource_); +} + +// The ParseFlagsTest test case tests ParseGoogleTestFlagsOnly. + +// The Flags struct stores a copy of all Google Test flags. +struct Flags { + // Constructs a Flags struct where each flag has its default value. + Flags() + : also_run_disabled_tests(false), + break_on_failure(false), + catch_exceptions(false), + death_test_use_fork(false), + fail_fast(false), + filter(""), + list_tests(false), + output(""), + brief(false), + print_time(true), + random_seed(0), + repeat(1), + shuffle(false), + stack_trace_depth(kMaxStackTraceDepth), + stream_result_to(""), + throw_on_failure(false) {} + + // Factory methods. + + // Creates a Flags struct where the gtest_also_run_disabled_tests flag has + // the given value. + static Flags AlsoRunDisabledTests(bool also_run_disabled_tests) { + Flags flags; + flags.also_run_disabled_tests = also_run_disabled_tests; + return flags; + } + + // Creates a Flags struct where the gtest_break_on_failure flag has + // the given value. + static Flags BreakOnFailure(bool break_on_failure) { + Flags flags; + flags.break_on_failure = break_on_failure; + return flags; + } + + // Creates a Flags struct where the gtest_catch_exceptions flag has + // the given value. + static Flags CatchExceptions(bool catch_exceptions) { + Flags flags; + flags.catch_exceptions = catch_exceptions; + return flags; + } + + // Creates a Flags struct where the gtest_death_test_use_fork flag has + // the given value. + static Flags DeathTestUseFork(bool death_test_use_fork) { + Flags flags; + flags.death_test_use_fork = death_test_use_fork; + return flags; + } + + // Creates a Flags struct where the gtest_fail_fast flag has + // the given value. + static Flags FailFast(bool fail_fast) { + Flags flags; + flags.fail_fast = fail_fast; + return flags; + } + + // Creates a Flags struct where the gtest_filter flag has the given + // value. + static Flags Filter(const char* filter) { + Flags flags; + flags.filter = filter; + return flags; + } + + // Creates a Flags struct where the gtest_list_tests flag has the + // given value. + static Flags ListTests(bool list_tests) { + Flags flags; + flags.list_tests = list_tests; + return flags; + } + + // Creates a Flags struct where the gtest_output flag has the given + // value. + static Flags Output(const char* output) { + Flags flags; + flags.output = output; + return flags; + } + + // Creates a Flags struct where the gtest_brief flag has the given + // value. + static Flags Brief(bool brief) { + Flags flags; + flags.brief = brief; + return flags; + } + + // Creates a Flags struct where the gtest_print_time flag has the given + // value. + static Flags PrintTime(bool print_time) { + Flags flags; + flags.print_time = print_time; + return flags; + } + + // Creates a Flags struct where the gtest_random_seed flag has the given + // value. + static Flags RandomSeed(int32_t random_seed) { + Flags flags; + flags.random_seed = random_seed; + return flags; + } + + // Creates a Flags struct where the gtest_repeat flag has the given + // value. + static Flags Repeat(int32_t repeat) { + Flags flags; + flags.repeat = repeat; + return flags; + } + + // Creates a Flags struct where the gtest_shuffle flag has the given + // value. + static Flags Shuffle(bool shuffle) { + Flags flags; + flags.shuffle = shuffle; + return flags; + } + + // Creates a Flags struct where the GTEST_FLAG(stack_trace_depth) flag has + // the given value. + static Flags StackTraceDepth(int32_t stack_trace_depth) { + Flags flags; + flags.stack_trace_depth = stack_trace_depth; + return flags; + } + + // Creates a Flags struct where the GTEST_FLAG(stream_result_to) flag has + // the given value. + static Flags StreamResultTo(const char* stream_result_to) { + Flags flags; + flags.stream_result_to = stream_result_to; + return flags; + } + + // Creates a Flags struct where the gtest_throw_on_failure flag has + // the given value. + static Flags ThrowOnFailure(bool throw_on_failure) { + Flags flags; + flags.throw_on_failure = throw_on_failure; + return flags; + } + + // These fields store the flag values. + bool also_run_disabled_tests; + bool break_on_failure; + bool catch_exceptions; + bool death_test_use_fork; + bool fail_fast; + const char* filter; + bool list_tests; + const char* output; + bool brief; + bool print_time; + int32_t random_seed; + int32_t repeat; + bool shuffle; + int32_t stack_trace_depth; + const char* stream_result_to; + bool throw_on_failure; +}; + +// Fixture for testing ParseGoogleTestFlagsOnly(). +class ParseFlagsTest : public Test { + protected: + // Clears the flags before each test. + void SetUp() override { + GTEST_FLAG(also_run_disabled_tests) = false; + GTEST_FLAG(break_on_failure) = false; + GTEST_FLAG(catch_exceptions) = false; + GTEST_FLAG(death_test_use_fork) = false; + GTEST_FLAG(fail_fast) = false; + GTEST_FLAG(filter) = ""; + GTEST_FLAG(list_tests) = false; + GTEST_FLAG(output) = ""; + GTEST_FLAG(brief) = false; + GTEST_FLAG(print_time) = true; + GTEST_FLAG(random_seed) = 0; + GTEST_FLAG(repeat) = 1; + GTEST_FLAG(shuffle) = false; + GTEST_FLAG(stack_trace_depth) = kMaxStackTraceDepth; + GTEST_FLAG(stream_result_to) = ""; + GTEST_FLAG(throw_on_failure) = false; + } + + // Asserts that two narrow or wide string arrays are equal. + template <typename CharType> + static void AssertStringArrayEq(int size1, CharType** array1, int size2, + CharType** array2) { + ASSERT_EQ(size1, size2) << " Array sizes different."; + + for (int i = 0; i != size1; i++) { + ASSERT_STREQ(array1[i], array2[i]) << " where i == " << i; + } + } + + // Verifies that the flag values match the expected values. + static void CheckFlags(const Flags& expected) { + EXPECT_EQ(expected.also_run_disabled_tests, + GTEST_FLAG(also_run_disabled_tests)); + EXPECT_EQ(expected.break_on_failure, GTEST_FLAG(break_on_failure)); + EXPECT_EQ(expected.catch_exceptions, GTEST_FLAG(catch_exceptions)); + EXPECT_EQ(expected.death_test_use_fork, GTEST_FLAG(death_test_use_fork)); + EXPECT_EQ(expected.fail_fast, GTEST_FLAG(fail_fast)); + EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str()); + EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests)); + EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str()); + EXPECT_EQ(expected.brief, GTEST_FLAG(brief)); + EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time)); + EXPECT_EQ(expected.random_seed, GTEST_FLAG(random_seed)); + EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat)); + EXPECT_EQ(expected.shuffle, GTEST_FLAG(shuffle)); + EXPECT_EQ(expected.stack_trace_depth, GTEST_FLAG(stack_trace_depth)); + EXPECT_STREQ(expected.stream_result_to, + GTEST_FLAG(stream_result_to).c_str()); + EXPECT_EQ(expected.throw_on_failure, GTEST_FLAG(throw_on_failure)); + } + + // Parses a command line (specified by argc1 and argv1), then + // verifies that the flag values are expected and that the + // recognized flags are removed from the command line. + template <typename CharType> + static void TestParsingFlags(int argc1, const CharType** argv1, + int argc2, const CharType** argv2, + const Flags& expected, bool should_print_help) { + const bool saved_help_flag = ::testing::internal::g_help_flag; + ::testing::internal::g_help_flag = false; + +# if GTEST_HAS_STREAM_REDIRECTION + CaptureStdout(); +# endif + + // Parses the command line. + internal::ParseGoogleTestFlagsOnly(&argc1, const_cast<CharType**>(argv1)); + +# if GTEST_HAS_STREAM_REDIRECTION + const std::string captured_stdout = GetCapturedStdout(); +# endif + + // Verifies the flag values. + CheckFlags(expected); + + // Verifies that the recognized flags are removed from the command + // line. + AssertStringArrayEq(argc1 + 1, argv1, argc2 + 1, argv2); + + // ParseGoogleTestFlagsOnly should neither set g_help_flag nor print the + // help message for the flags it recognizes. + EXPECT_EQ(should_print_help, ::testing::internal::g_help_flag); + +# if GTEST_HAS_STREAM_REDIRECTION + const char* const expected_help_fragment = + "This program contains tests written using"; + if (should_print_help) { + EXPECT_PRED_FORMAT2(IsSubstring, expected_help_fragment, captured_stdout); + } else { + EXPECT_PRED_FORMAT2(IsNotSubstring, + expected_help_fragment, captured_stdout); + } +# endif // GTEST_HAS_STREAM_REDIRECTION + + ::testing::internal::g_help_flag = saved_help_flag; + } + + // This macro wraps TestParsingFlags s.t. the user doesn't need + // to specify the array sizes. + +# define GTEST_TEST_PARSING_FLAGS_(argv1, argv2, expected, should_print_help) \ + TestParsingFlags(sizeof(argv1)/sizeof(*argv1) - 1, argv1, \ + sizeof(argv2)/sizeof(*argv2) - 1, argv2, \ + expected, should_print_help) +}; + +// Tests parsing an empty command line. +TEST_F(ParseFlagsTest, Empty) { + const char* argv[] = {nullptr}; + + const char* argv2[] = {nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false); +} + +// Tests parsing a command line that has no flag. +TEST_F(ParseFlagsTest, NoFlag) { + const char* argv[] = {"foo.exe", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false); +} + +// Tests parsing --gtest_fail_fast. +TEST_F(ParseFlagsTest, FailFast) { + const char* argv[] = {"foo.exe", "--gtest_fail_fast", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::FailFast(true), false); +} + +// Tests parsing a bad --gtest_filter flag. +TEST_F(ParseFlagsTest, FilterBad) { + const char* argv[] = {"foo.exe", "--gtest_filter", nullptr}; + + const char* argv2[] = {"foo.exe", "--gtest_filter", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), true); +} + +// Tests parsing an empty --gtest_filter flag. +TEST_F(ParseFlagsTest, FilterEmpty) { + const char* argv[] = {"foo.exe", "--gtest_filter=", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter(""), false); +} + +// Tests parsing a non-empty --gtest_filter flag. +TEST_F(ParseFlagsTest, FilterNonEmpty) { + const char* argv[] = {"foo.exe", "--gtest_filter=abc", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abc"), false); +} + +// Tests parsing --gtest_break_on_failure. +TEST_F(ParseFlagsTest, BreakOnFailureWithoutValue) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false); +} + +// Tests parsing --gtest_break_on_failure=0. +TEST_F(ParseFlagsTest, BreakOnFailureFalse_0) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false); +} + +// Tests parsing --gtest_break_on_failure=f. +TEST_F(ParseFlagsTest, BreakOnFailureFalse_f) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure=f", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false); +} + +// Tests parsing --gtest_break_on_failure=F. +TEST_F(ParseFlagsTest, BreakOnFailureFalse_F) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure=F", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(false), false); +} + +// Tests parsing a --gtest_break_on_failure flag that has a "true" +// definition. +TEST_F(ParseFlagsTest, BreakOnFailureTrue) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::BreakOnFailure(true), false); +} + +// Tests parsing --gtest_catch_exceptions. +TEST_F(ParseFlagsTest, CatchExceptions) { + const char* argv[] = {"foo.exe", "--gtest_catch_exceptions", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::CatchExceptions(true), false); +} + +// Tests parsing --gtest_death_test_use_fork. +TEST_F(ParseFlagsTest, DeathTestUseFork) { + const char* argv[] = {"foo.exe", "--gtest_death_test_use_fork", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::DeathTestUseFork(true), false); +} + +// Tests having the same flag twice with different values. The +// expected behavior is that the one coming last takes precedence. +TEST_F(ParseFlagsTest, DuplicatedFlags) { + const char* argv[] = {"foo.exe", "--gtest_filter=a", "--gtest_filter=b", + nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("b"), false); +} + +// Tests having an unrecognized flag on the command line. +TEST_F(ParseFlagsTest, UnrecognizedFlag) { + const char* argv[] = {"foo.exe", "--gtest_break_on_failure", + "bar", // Unrecognized by Google Test. + "--gtest_filter=b", nullptr}; + + const char* argv2[] = {"foo.exe", "bar", nullptr}; + + Flags flags; + flags.break_on_failure = true; + flags.filter = "b"; + GTEST_TEST_PARSING_FLAGS_(argv, argv2, flags, false); +} + +// Tests having a --gtest_list_tests flag +TEST_F(ParseFlagsTest, ListTestsFlag) { + const char* argv[] = {"foo.exe", "--gtest_list_tests", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false); +} + +// Tests having a --gtest_list_tests flag with a "true" value +TEST_F(ParseFlagsTest, ListTestsTrue) { + const char* argv[] = {"foo.exe", "--gtest_list_tests=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(true), false); +} + +// Tests having a --gtest_list_tests flag with a "false" value +TEST_F(ParseFlagsTest, ListTestsFalse) { + const char* argv[] = {"foo.exe", "--gtest_list_tests=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false); +} + +// Tests parsing --gtest_list_tests=f. +TEST_F(ParseFlagsTest, ListTestsFalse_f) { + const char* argv[] = {"foo.exe", "--gtest_list_tests=f", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false); +} + +// Tests parsing --gtest_list_tests=F. +TEST_F(ParseFlagsTest, ListTestsFalse_F) { + const char* argv[] = {"foo.exe", "--gtest_list_tests=F", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ListTests(false), false); +} + +// Tests parsing --gtest_output (invalid). +TEST_F(ParseFlagsTest, OutputEmpty) { + const char* argv[] = {"foo.exe", "--gtest_output", nullptr}; + + const char* argv2[] = {"foo.exe", "--gtest_output", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), true); +} + +// Tests parsing --gtest_output=xml +TEST_F(ParseFlagsTest, OutputXml) { + const char* argv[] = {"foo.exe", "--gtest_output=xml", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml"), false); +} + +// Tests parsing --gtest_output=xml:file +TEST_F(ParseFlagsTest, OutputXmlFile) { + const char* argv[] = {"foo.exe", "--gtest_output=xml:file", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Output("xml:file"), false); +} + +// Tests parsing --gtest_output=xml:directory/path/ +TEST_F(ParseFlagsTest, OutputXmlDirectory) { + const char* argv[] = {"foo.exe", "--gtest_output=xml:directory/path/", + nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, + Flags::Output("xml:directory/path/"), false); +} + +// Tests having a --gtest_brief flag +TEST_F(ParseFlagsTest, BriefFlag) { + const char* argv[] = {"foo.exe", "--gtest_brief", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Brief(true), false); +} + +// Tests having a --gtest_brief flag with a "true" value +TEST_F(ParseFlagsTest, BriefFlagTrue) { + const char* argv[] = {"foo.exe", "--gtest_brief=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Brief(true), false); +} + +// Tests having a --gtest_brief flag with a "false" value +TEST_F(ParseFlagsTest, BriefFlagFalse) { + const char* argv[] = {"foo.exe", "--gtest_brief=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Brief(false), false); +} + +// Tests having a --gtest_print_time flag +TEST_F(ParseFlagsTest, PrintTimeFlag) { + const char* argv[] = {"foo.exe", "--gtest_print_time", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false); +} + +// Tests having a --gtest_print_time flag with a "true" value +TEST_F(ParseFlagsTest, PrintTimeTrue) { + const char* argv[] = {"foo.exe", "--gtest_print_time=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(true), false); +} + +// Tests having a --gtest_print_time flag with a "false" value +TEST_F(ParseFlagsTest, PrintTimeFalse) { + const char* argv[] = {"foo.exe", "--gtest_print_time=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false); +} + +// Tests parsing --gtest_print_time=f. +TEST_F(ParseFlagsTest, PrintTimeFalse_f) { + const char* argv[] = {"foo.exe", "--gtest_print_time=f", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false); +} + +// Tests parsing --gtest_print_time=F. +TEST_F(ParseFlagsTest, PrintTimeFalse_F) { + const char* argv[] = {"foo.exe", "--gtest_print_time=F", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::PrintTime(false), false); +} + +// Tests parsing --gtest_random_seed=number +TEST_F(ParseFlagsTest, RandomSeed) { + const char* argv[] = {"foo.exe", "--gtest_random_seed=1000", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::RandomSeed(1000), false); +} + +// Tests parsing --gtest_repeat=number +TEST_F(ParseFlagsTest, Repeat) { + const char* argv[] = {"foo.exe", "--gtest_repeat=1000", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Repeat(1000), false); +} + +// Tests having a --gtest_also_run_disabled_tests flag +TEST_F(ParseFlagsTest, AlsoRunDisabledTestsFlag) { + const char* argv[] = {"foo.exe", "--gtest_also_run_disabled_tests", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(true), + false); +} + +// Tests having a --gtest_also_run_disabled_tests flag with a "true" value +TEST_F(ParseFlagsTest, AlsoRunDisabledTestsTrue) { + const char* argv[] = {"foo.exe", "--gtest_also_run_disabled_tests=1", + nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(true), + false); +} + +// Tests having a --gtest_also_run_disabled_tests flag with a "false" value +TEST_F(ParseFlagsTest, AlsoRunDisabledTestsFalse) { + const char* argv[] = {"foo.exe", "--gtest_also_run_disabled_tests=0", + nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::AlsoRunDisabledTests(false), + false); +} + +// Tests parsing --gtest_shuffle. +TEST_F(ParseFlagsTest, ShuffleWithoutValue) { + const char* argv[] = {"foo.exe", "--gtest_shuffle", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false); +} + +// Tests parsing --gtest_shuffle=0. +TEST_F(ParseFlagsTest, ShuffleFalse_0) { + const char* argv[] = {"foo.exe", "--gtest_shuffle=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(false), false); +} + +// Tests parsing a --gtest_shuffle flag that has a "true" definition. +TEST_F(ParseFlagsTest, ShuffleTrue) { + const char* argv[] = {"foo.exe", "--gtest_shuffle=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Shuffle(true), false); +} + +// Tests parsing --gtest_stack_trace_depth=number. +TEST_F(ParseFlagsTest, StackTraceDepth) { + const char* argv[] = {"foo.exe", "--gtest_stack_trace_depth=5", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::StackTraceDepth(5), false); +} + +TEST_F(ParseFlagsTest, StreamResultTo) { + const char* argv[] = {"foo.exe", "--gtest_stream_result_to=localhost:1234", + nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_( + argv, argv2, Flags::StreamResultTo("localhost:1234"), false); +} + +// Tests parsing --gtest_throw_on_failure. +TEST_F(ParseFlagsTest, ThrowOnFailureWithoutValue) { + const char* argv[] = {"foo.exe", "--gtest_throw_on_failure", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false); +} + +// Tests parsing --gtest_throw_on_failure=0. +TEST_F(ParseFlagsTest, ThrowOnFailureFalse_0) { + const char* argv[] = {"foo.exe", "--gtest_throw_on_failure=0", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(false), false); +} + +// Tests parsing a --gtest_throw_on_failure flag that has a "true" +// definition. +TEST_F(ParseFlagsTest, ThrowOnFailureTrue) { + const char* argv[] = {"foo.exe", "--gtest_throw_on_failure=1", nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::ThrowOnFailure(true), false); +} + +# if GTEST_OS_WINDOWS +// Tests parsing wide strings. +TEST_F(ParseFlagsTest, WideStrings) { + const wchar_t* argv[] = { + L"foo.exe", + L"--gtest_filter=Foo*", + L"--gtest_list_tests=1", + L"--gtest_break_on_failure", + L"--non_gtest_flag", + NULL + }; + + const wchar_t* argv2[] = { + L"foo.exe", + L"--non_gtest_flag", + NULL + }; + + Flags expected_flags; + expected_flags.break_on_failure = true; + expected_flags.filter = "Foo*"; + expected_flags.list_tests = true; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false); +} +# endif // GTEST_OS_WINDOWS + +#if GTEST_USE_OWN_FLAGFILE_FLAG_ +class FlagfileTest : public ParseFlagsTest { + public: + void SetUp() override { + ParseFlagsTest::SetUp(); + + testdata_path_.Set(internal::FilePath( + testing::TempDir() + internal::GetCurrentExecutableName().string() + + "_flagfile_test")); + testing::internal::posix::RmDir(testdata_path_.c_str()); + EXPECT_TRUE(testdata_path_.CreateFolder()); + } + + void TearDown() override { + testing::internal::posix::RmDir(testdata_path_.c_str()); + ParseFlagsTest::TearDown(); + } + + internal::FilePath CreateFlagfile(const char* contents) { + internal::FilePath file_path(internal::FilePath::GenerateUniqueFileName( + testdata_path_, internal::FilePath("unique"), "txt")); + FILE* f = testing::internal::posix::FOpen(file_path.c_str(), "w"); + fprintf(f, "%s", contents); + fclose(f); + return file_path; + } + + private: + internal::FilePath testdata_path_; +}; + +// Tests an empty flagfile. +TEST_F(FlagfileTest, Empty) { + internal::FilePath flagfile_path(CreateFlagfile("")); + std::string flagfile_flag = + std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str(); + + const char* argv[] = {"foo.exe", flagfile_flag.c_str(), nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags(), false); +} + +// Tests passing a non-empty --gtest_filter flag via --gtest_flagfile. +TEST_F(FlagfileTest, FilterNonEmpty) { + internal::FilePath flagfile_path(CreateFlagfile( + "--" GTEST_FLAG_PREFIX_ "filter=abc")); + std::string flagfile_flag = + std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str(); + + const char* argv[] = {"foo.exe", flagfile_flag.c_str(), nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, Flags::Filter("abc"), false); +} + +// Tests passing several flags via --gtest_flagfile. +TEST_F(FlagfileTest, SeveralFlags) { + internal::FilePath flagfile_path(CreateFlagfile( + "--" GTEST_FLAG_PREFIX_ "filter=abc\n" + "--" GTEST_FLAG_PREFIX_ "break_on_failure\n" + "--" GTEST_FLAG_PREFIX_ "list_tests")); + std::string flagfile_flag = + std::string("--" GTEST_FLAG_PREFIX_ "flagfile=") + flagfile_path.c_str(); + + const char* argv[] = {"foo.exe", flagfile_flag.c_str(), nullptr}; + + const char* argv2[] = {"foo.exe", nullptr}; + + Flags expected_flags; + expected_flags.break_on_failure = true; + expected_flags.filter = "abc"; + expected_flags.list_tests = true; + + GTEST_TEST_PARSING_FLAGS_(argv, argv2, expected_flags, false); +} +#endif // GTEST_USE_OWN_FLAGFILE_FLAG_ + +// Tests current_test_info() in UnitTest. +class CurrentTestInfoTest : public Test { + protected: + // Tests that current_test_info() returns NULL before the first test in + // the test case is run. + static void SetUpTestSuite() { + // There should be no tests running at this point. + const TestInfo* test_info = + UnitTest::GetInstance()->current_test_info(); + EXPECT_TRUE(test_info == nullptr) + << "There should be no tests running at this point."; + } + + // Tests that current_test_info() returns NULL after the last test in + // the test case has run. + static void TearDownTestSuite() { + const TestInfo* test_info = + UnitTest::GetInstance()->current_test_info(); + EXPECT_TRUE(test_info == nullptr) + << "There should be no tests running at this point."; + } +}; + +// Tests that current_test_info() returns TestInfo for currently running +// test by checking the expected test name against the actual one. +TEST_F(CurrentTestInfoTest, WorksForFirstTestInATestSuite) { + const TestInfo* test_info = + UnitTest::GetInstance()->current_test_info(); + ASSERT_TRUE(nullptr != test_info) + << "There is a test running so we should have a valid TestInfo."; + EXPECT_STREQ("CurrentTestInfoTest", test_info->test_suite_name()) + << "Expected the name of the currently running test suite."; + EXPECT_STREQ("WorksForFirstTestInATestSuite", test_info->name()) + << "Expected the name of the currently running test."; +} + +// Tests that current_test_info() returns TestInfo for currently running +// test by checking the expected test name against the actual one. We +// use this test to see that the TestInfo object actually changed from +// the previous invocation. +TEST_F(CurrentTestInfoTest, WorksForSecondTestInATestSuite) { + const TestInfo* test_info = + UnitTest::GetInstance()->current_test_info(); + ASSERT_TRUE(nullptr != test_info) + << "There is a test running so we should have a valid TestInfo."; + EXPECT_STREQ("CurrentTestInfoTest", test_info->test_suite_name()) + << "Expected the name of the currently running test suite."; + EXPECT_STREQ("WorksForSecondTestInATestSuite", test_info->name()) + << "Expected the name of the currently running test."; +} + +} // namespace testing + + +// These two lines test that we can define tests in a namespace that +// has the name "testing" and is nested in another namespace. +namespace my_namespace { +namespace testing { + +// Makes sure that TEST knows to use ::testing::Test instead of +// ::my_namespace::testing::Test. +class Test {}; + +// Makes sure that an assertion knows to use ::testing::Message instead of +// ::my_namespace::testing::Message. +class Message {}; + +// Makes sure that an assertion knows to use +// ::testing::AssertionResult instead of +// ::my_namespace::testing::AssertionResult. +class AssertionResult {}; + +// Tests that an assertion that should succeed works as expected. +TEST(NestedTestingNamespaceTest, Success) { + EXPECT_EQ(1, 1) << "This shouldn't fail."; +} + +// Tests that an assertion that should fail works as expected. +TEST(NestedTestingNamespaceTest, Failure) { + EXPECT_FATAL_FAILURE(FAIL() << "This failure is expected.", + "This failure is expected."); +} + +} // namespace testing +} // namespace my_namespace + +// Tests that one can call superclass SetUp and TearDown methods-- +// that is, that they are not private. +// No tests are based on this fixture; the test "passes" if it compiles +// successfully. +class ProtectedFixtureMethodsTest : public Test { + protected: + void SetUp() override { Test::SetUp(); } + void TearDown() override { Test::TearDown(); } +}; + +// StreamingAssertionsTest tests the streaming versions of a representative +// sample of assertions. +TEST(StreamingAssertionsTest, Unconditional) { + SUCCEED() << "expected success"; + EXPECT_NONFATAL_FAILURE(ADD_FAILURE() << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(FAIL() << "expected failure", + "expected failure"); +} + +#ifdef __BORLANDC__ +// Silences warnings: "Condition is always true", "Unreachable code" +# pragma option push -w-ccc -w-rch +#endif + +TEST(StreamingAssertionsTest, Truth) { + EXPECT_TRUE(true) << "unexpected failure"; + ASSERT_TRUE(true) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_TRUE(false) << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_TRUE(false) << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, Truth2) { + EXPECT_FALSE(false) << "unexpected failure"; + ASSERT_FALSE(false) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_FALSE(true) << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_FALSE(true) << "expected failure", + "expected failure"); +} + +#ifdef __BORLANDC__ +// Restores warnings after previous "#pragma option push" suppressed them +# pragma option pop +#endif + +TEST(StreamingAssertionsTest, IntegerEquals) { + EXPECT_EQ(1, 1) << "unexpected failure"; + ASSERT_EQ(1, 1) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_EQ(1, 2) << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_EQ(1, 2) << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, IntegerLessThan) { + EXPECT_LT(1, 2) << "unexpected failure"; + ASSERT_LT(1, 2) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_LT(2, 1) << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_LT(2, 1) << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, StringsEqual) { + EXPECT_STREQ("foo", "foo") << "unexpected failure"; + ASSERT_STREQ("foo", "foo") << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_STREQ("foo", "bar") << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_STREQ("foo", "bar") << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, StringsNotEqual) { + EXPECT_STRNE("foo", "bar") << "unexpected failure"; + ASSERT_STRNE("foo", "bar") << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_STRNE("foo", "foo") << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_STRNE("foo", "foo") << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, StringsEqualIgnoringCase) { + EXPECT_STRCASEEQ("foo", "FOO") << "unexpected failure"; + ASSERT_STRCASEEQ("foo", "FOO") << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_STRCASEEQ("foo", "bar") << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_STRCASEEQ("foo", "bar") << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, StringNotEqualIgnoringCase) { + EXPECT_STRCASENE("foo", "bar") << "unexpected failure"; + ASSERT_STRCASENE("foo", "bar") << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_STRCASENE("foo", "FOO") << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_STRCASENE("bar", "BAR") << "expected failure", + "expected failure"); +} + +TEST(StreamingAssertionsTest, FloatingPointEquals) { + EXPECT_FLOAT_EQ(1.0, 1.0) << "unexpected failure"; + ASSERT_FLOAT_EQ(1.0, 1.0) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_FLOAT_EQ(0.0, 1.0) << "expected failure", + "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_FLOAT_EQ(0.0, 1.0) << "expected failure", + "expected failure"); +} + +#if GTEST_HAS_EXCEPTIONS + +TEST(StreamingAssertionsTest, Throw) { + EXPECT_THROW(ThrowAnInteger(), int) << "unexpected failure"; + ASSERT_THROW(ThrowAnInteger(), int) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_THROW(ThrowAnInteger(), bool) << + "expected failure", "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_THROW(ThrowAnInteger(), bool) << + "expected failure", "expected failure"); +} + +TEST(StreamingAssertionsTest, NoThrow) { + EXPECT_NO_THROW(ThrowNothing()) << "unexpected failure"; + ASSERT_NO_THROW(ThrowNothing()) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_NO_THROW(ThrowAnInteger()) << + "expected failure", "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_NO_THROW(ThrowAnInteger()) << + "expected failure", "expected failure"); +} + +TEST(StreamingAssertionsTest, AnyThrow) { + EXPECT_ANY_THROW(ThrowAnInteger()) << "unexpected failure"; + ASSERT_ANY_THROW(ThrowAnInteger()) << "unexpected failure"; + EXPECT_NONFATAL_FAILURE(EXPECT_ANY_THROW(ThrowNothing()) << + "expected failure", "expected failure"); + EXPECT_FATAL_FAILURE(ASSERT_ANY_THROW(ThrowNothing()) << + "expected failure", "expected failure"); +} + +#endif // GTEST_HAS_EXCEPTIONS + +// Tests that Google Test correctly decides whether to use colors in the output. + +TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsYes) { + GTEST_FLAG(color) = "yes"; + + SetEnv("TERM", "xterm"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY. + + SetEnv("TERM", "dumb"); // TERM doesn't support colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY. +} + +TEST(ColoredOutputTest, UsesColorsWhenGTestColorFlagIsAliasOfYes) { + SetEnv("TERM", "dumb"); // TERM doesn't support colors. + + GTEST_FLAG(color) = "True"; + EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY. + + GTEST_FLAG(color) = "t"; + EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY. + + GTEST_FLAG(color) = "1"; + EXPECT_TRUE(ShouldUseColor(false)); // Stdout is not a TTY. +} + +TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsNo) { + GTEST_FLAG(color) = "no"; + + SetEnv("TERM", "xterm"); // TERM supports colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY. + + SetEnv("TERM", "dumb"); // TERM doesn't support colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY. +} + +TEST(ColoredOutputTest, UsesNoColorWhenGTestColorFlagIsInvalid) { + SetEnv("TERM", "xterm"); // TERM supports colors. + + GTEST_FLAG(color) = "F"; + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + GTEST_FLAG(color) = "0"; + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + GTEST_FLAG(color) = "unknown"; + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. +} + +TEST(ColoredOutputTest, UsesColorsWhenStdoutIsTty) { + GTEST_FLAG(color) = "auto"; + + SetEnv("TERM", "xterm"); // TERM supports colors. + EXPECT_FALSE(ShouldUseColor(false)); // Stdout is not a TTY. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. +} + +TEST(ColoredOutputTest, UsesColorsWhenTermSupportsColors) { + GTEST_FLAG(color) = "auto"; + +#if GTEST_OS_WINDOWS && !GTEST_OS_WINDOWS_MINGW + // On Windows, we ignore the TERM variable as it's usually not set. + + SetEnv("TERM", "dumb"); + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", ""); + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "xterm"); + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. +#else + // On non-Windows platforms, we rely on TERM to determine if the + // terminal supports colors. + + SetEnv("TERM", "dumb"); // TERM doesn't support colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "emacs"); // TERM doesn't support colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "vt100"); // TERM doesn't support colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "xterm-mono"); // TERM doesn't support colors. + EXPECT_FALSE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "xterm"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "xterm-color"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "xterm-256color"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "screen"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "screen-256color"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "tmux"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "tmux-256color"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "rxvt-unicode"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "rxvt-unicode-256color"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "linux"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. + + SetEnv("TERM", "cygwin"); // TERM supports colors. + EXPECT_TRUE(ShouldUseColor(true)); // Stdout is a TTY. +#endif // GTEST_OS_WINDOWS +} + +// Verifies that StaticAssertTypeEq works in a namespace scope. + +static bool dummy1 GTEST_ATTRIBUTE_UNUSED_ = StaticAssertTypeEq<bool, bool>(); +static bool dummy2 GTEST_ATTRIBUTE_UNUSED_ = + StaticAssertTypeEq<const int, const int>(); + +// Verifies that StaticAssertTypeEq works in a class. + +template <typename T> +class StaticAssertTypeEqTestHelper { + public: + StaticAssertTypeEqTestHelper() { StaticAssertTypeEq<bool, T>(); } +}; + +TEST(StaticAssertTypeEqTest, WorksInClass) { + StaticAssertTypeEqTestHelper<bool>(); +} + +// Verifies that StaticAssertTypeEq works inside a function. + +typedef int IntAlias; + +TEST(StaticAssertTypeEqTest, CompilesForEqualTypes) { + StaticAssertTypeEq<int, IntAlias>(); + StaticAssertTypeEq<int*, IntAlias*>(); +} + +TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsNoFailure) { + EXPECT_FALSE(HasNonfatalFailure()); +} + +static void FailFatally() { FAIL(); } + +TEST(HasNonfatalFailureTest, ReturnsFalseWhenThereIsOnlyFatalFailure) { + FailFatally(); + const bool has_nonfatal_failure = HasNonfatalFailure(); + ClearCurrentTestPartResults(); + EXPECT_FALSE(has_nonfatal_failure); +} + +TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) { + ADD_FAILURE(); + const bool has_nonfatal_failure = HasNonfatalFailure(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_nonfatal_failure); +} + +TEST(HasNonfatalFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) { + FailFatally(); + ADD_FAILURE(); + const bool has_nonfatal_failure = HasNonfatalFailure(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_nonfatal_failure); +} + +// A wrapper for calling HasNonfatalFailure outside of a test body. +static bool HasNonfatalFailureHelper() { + return testing::Test::HasNonfatalFailure(); +} + +TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody) { + EXPECT_FALSE(HasNonfatalFailureHelper()); +} + +TEST(HasNonfatalFailureTest, WorksOutsideOfTestBody2) { + ADD_FAILURE(); + const bool has_nonfatal_failure = HasNonfatalFailureHelper(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_nonfatal_failure); +} + +TEST(HasFailureTest, ReturnsFalseWhenThereIsNoFailure) { + EXPECT_FALSE(HasFailure()); +} + +TEST(HasFailureTest, ReturnsTrueWhenThereIsFatalFailure) { + FailFatally(); + const bool has_failure = HasFailure(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_failure); +} + +TEST(HasFailureTest, ReturnsTrueWhenThereIsNonfatalFailure) { + ADD_FAILURE(); + const bool has_failure = HasFailure(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_failure); +} + +TEST(HasFailureTest, ReturnsTrueWhenThereAreFatalAndNonfatalFailures) { + FailFatally(); + ADD_FAILURE(); + const bool has_failure = HasFailure(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_failure); +} + +// A wrapper for calling HasFailure outside of a test body. +static bool HasFailureHelper() { return testing::Test::HasFailure(); } + +TEST(HasFailureTest, WorksOutsideOfTestBody) { + EXPECT_FALSE(HasFailureHelper()); +} + +TEST(HasFailureTest, WorksOutsideOfTestBody2) { + ADD_FAILURE(); + const bool has_failure = HasFailureHelper(); + ClearCurrentTestPartResults(); + EXPECT_TRUE(has_failure); +} + +class TestListener : public EmptyTestEventListener { + public: + TestListener() : on_start_counter_(nullptr), is_destroyed_(nullptr) {} + TestListener(int* on_start_counter, bool* is_destroyed) + : on_start_counter_(on_start_counter), + is_destroyed_(is_destroyed) {} + + ~TestListener() override { + if (is_destroyed_) + *is_destroyed_ = true; + } + + protected: + void OnTestProgramStart(const UnitTest& /*unit_test*/) override { + if (on_start_counter_ != nullptr) (*on_start_counter_)++; + } + + private: + int* on_start_counter_; + bool* is_destroyed_; +}; + +// Tests the constructor. +TEST(TestEventListenersTest, ConstructionWorks) { + TestEventListeners listeners; + + EXPECT_TRUE(TestEventListenersAccessor::GetRepeater(&listeners) != nullptr); + EXPECT_TRUE(listeners.default_result_printer() == nullptr); + EXPECT_TRUE(listeners.default_xml_generator() == nullptr); +} + +// Tests that the TestEventListeners destructor deletes all the listeners it +// owns. +TEST(TestEventListenersTest, DestructionWorks) { + bool default_result_printer_is_destroyed = false; + bool default_xml_printer_is_destroyed = false; + bool extra_listener_is_destroyed = false; + TestListener* default_result_printer = + new TestListener(nullptr, &default_result_printer_is_destroyed); + TestListener* default_xml_printer = + new TestListener(nullptr, &default_xml_printer_is_destroyed); + TestListener* extra_listener = + new TestListener(nullptr, &extra_listener_is_destroyed); + + { + TestEventListeners listeners; + TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, + default_result_printer); + TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, + default_xml_printer); + listeners.Append(extra_listener); + } + EXPECT_TRUE(default_result_printer_is_destroyed); + EXPECT_TRUE(default_xml_printer_is_destroyed); + EXPECT_TRUE(extra_listener_is_destroyed); +} + +// Tests that a listener Append'ed to a TestEventListeners list starts +// receiving events. +TEST(TestEventListenersTest, Append) { + int on_start_counter = 0; + bool is_destroyed = false; + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + { + TestEventListeners listeners; + listeners.Append(listener); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(1, on_start_counter); + } + EXPECT_TRUE(is_destroyed); +} + +// Tests that listeners receive events in the order they were appended to +// the list, except for *End requests, which must be received in the reverse +// order. +class SequenceTestingListener : public EmptyTestEventListener { + public: + SequenceTestingListener(std::vector<std::string>* vector, const char* id) + : vector_(vector), id_(id) {} + + protected: + void OnTestProgramStart(const UnitTest& /*unit_test*/) override { + vector_->push_back(GetEventDescription("OnTestProgramStart")); + } + + void OnTestProgramEnd(const UnitTest& /*unit_test*/) override { + vector_->push_back(GetEventDescription("OnTestProgramEnd")); + } + + void OnTestIterationStart(const UnitTest& /*unit_test*/, + int /*iteration*/) override { + vector_->push_back(GetEventDescription("OnTestIterationStart")); + } + + void OnTestIterationEnd(const UnitTest& /*unit_test*/, + int /*iteration*/) override { + vector_->push_back(GetEventDescription("OnTestIterationEnd")); + } + + private: + std::string GetEventDescription(const char* method) { + Message message; + message << id_ << "." << method; + return message.GetString(); + } + + std::vector<std::string>* vector_; + const char* const id_; + + GTEST_DISALLOW_COPY_AND_ASSIGN_(SequenceTestingListener); +}; + +TEST(EventListenerTest, AppendKeepsOrder) { + std::vector<std::string> vec; + TestEventListeners listeners; + listeners.Append(new SequenceTestingListener(&vec, "1st")); + listeners.Append(new SequenceTestingListener(&vec, "2nd")); + listeners.Append(new SequenceTestingListener(&vec, "3rd")); + + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + ASSERT_EQ(3U, vec.size()); + EXPECT_STREQ("1st.OnTestProgramStart", vec[0].c_str()); + EXPECT_STREQ("2nd.OnTestProgramStart", vec[1].c_str()); + EXPECT_STREQ("3rd.OnTestProgramStart", vec[2].c_str()); + + vec.clear(); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramEnd( + *UnitTest::GetInstance()); + ASSERT_EQ(3U, vec.size()); + EXPECT_STREQ("3rd.OnTestProgramEnd", vec[0].c_str()); + EXPECT_STREQ("2nd.OnTestProgramEnd", vec[1].c_str()); + EXPECT_STREQ("1st.OnTestProgramEnd", vec[2].c_str()); + + vec.clear(); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationStart( + *UnitTest::GetInstance(), 0); + ASSERT_EQ(3U, vec.size()); + EXPECT_STREQ("1st.OnTestIterationStart", vec[0].c_str()); + EXPECT_STREQ("2nd.OnTestIterationStart", vec[1].c_str()); + EXPECT_STREQ("3rd.OnTestIterationStart", vec[2].c_str()); + + vec.clear(); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestIterationEnd( + *UnitTest::GetInstance(), 0); + ASSERT_EQ(3U, vec.size()); + EXPECT_STREQ("3rd.OnTestIterationEnd", vec[0].c_str()); + EXPECT_STREQ("2nd.OnTestIterationEnd", vec[1].c_str()); + EXPECT_STREQ("1st.OnTestIterationEnd", vec[2].c_str()); +} + +// Tests that a listener removed from a TestEventListeners list stops receiving +// events and is not deleted when the list is destroyed. +TEST(TestEventListenersTest, Release) { + int on_start_counter = 0; + bool is_destroyed = false; + // Although Append passes the ownership of this object to the list, + // the following calls release it, and we need to delete it before the + // test ends. + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + { + TestEventListeners listeners; + listeners.Append(listener); + EXPECT_EQ(listener, listeners.Release(listener)); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_TRUE(listeners.Release(listener) == nullptr); + } + EXPECT_EQ(0, on_start_counter); + EXPECT_FALSE(is_destroyed); + delete listener; +} + +// Tests that no events are forwarded when event forwarding is disabled. +TEST(EventListenerTest, SuppressEventForwarding) { + int on_start_counter = 0; + TestListener* listener = new TestListener(&on_start_counter, nullptr); + + TestEventListeners listeners; + listeners.Append(listener); + ASSERT_TRUE(TestEventListenersAccessor::EventForwardingEnabled(listeners)); + TestEventListenersAccessor::SuppressEventForwarding(&listeners); + ASSERT_FALSE(TestEventListenersAccessor::EventForwardingEnabled(listeners)); + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(0, on_start_counter); +} + +// Tests that events generated by Google Test are not forwarded in +// death test subprocesses. +TEST(EventListenerDeathTest, EventsNotForwardedInDeathTestSubprecesses) { + EXPECT_DEATH_IF_SUPPORTED({ + GTEST_CHECK_(TestEventListenersAccessor::EventForwardingEnabled( + *GetUnitTestImpl()->listeners())) << "expected failure";}, + "expected failure"); +} + +// Tests that a listener installed via SetDefaultResultPrinter() starts +// receiving events and is returned via default_result_printer() and that +// the previous default_result_printer is removed from the list and deleted. +TEST(EventListenerTest, default_result_printer) { + int on_start_counter = 0; + bool is_destroyed = false; + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + + TestEventListeners listeners; + TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener); + + EXPECT_EQ(listener, listeners.default_result_printer()); + + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + + EXPECT_EQ(1, on_start_counter); + + // Replacing default_result_printer with something else should remove it + // from the list and destroy it. + TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, nullptr); + + EXPECT_TRUE(listeners.default_result_printer() == nullptr); + EXPECT_TRUE(is_destroyed); + + // After broadcasting an event the counter is still the same, indicating + // the listener is not in the list anymore. + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(1, on_start_counter); +} + +// Tests that the default_result_printer listener stops receiving events +// when removed via Release and that is not owned by the list anymore. +TEST(EventListenerTest, RemovingDefaultResultPrinterWorks) { + int on_start_counter = 0; + bool is_destroyed = false; + // Although Append passes the ownership of this object to the list, + // the following calls release it, and we need to delete it before the + // test ends. + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + { + TestEventListeners listeners; + TestEventListenersAccessor::SetDefaultResultPrinter(&listeners, listener); + + EXPECT_EQ(listener, listeners.Release(listener)); + EXPECT_TRUE(listeners.default_result_printer() == nullptr); + EXPECT_FALSE(is_destroyed); + + // Broadcasting events now should not affect default_result_printer. + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(0, on_start_counter); + } + // Destroying the list should not affect the listener now, too. + EXPECT_FALSE(is_destroyed); + delete listener; +} + +// Tests that a listener installed via SetDefaultXmlGenerator() starts +// receiving events and is returned via default_xml_generator() and that +// the previous default_xml_generator is removed from the list and deleted. +TEST(EventListenerTest, default_xml_generator) { + int on_start_counter = 0; + bool is_destroyed = false; + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + + TestEventListeners listeners; + TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener); + + EXPECT_EQ(listener, listeners.default_xml_generator()); + + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + + EXPECT_EQ(1, on_start_counter); + + // Replacing default_xml_generator with something else should remove it + // from the list and destroy it. + TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, nullptr); + + EXPECT_TRUE(listeners.default_xml_generator() == nullptr); + EXPECT_TRUE(is_destroyed); + + // After broadcasting an event the counter is still the same, indicating + // the listener is not in the list anymore. + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(1, on_start_counter); +} + +// Tests that the default_xml_generator listener stops receiving events +// when removed via Release and that is not owned by the list anymore. +TEST(EventListenerTest, RemovingDefaultXmlGeneratorWorks) { + int on_start_counter = 0; + bool is_destroyed = false; + // Although Append passes the ownership of this object to the list, + // the following calls release it, and we need to delete it before the + // test ends. + TestListener* listener = new TestListener(&on_start_counter, &is_destroyed); + { + TestEventListeners listeners; + TestEventListenersAccessor::SetDefaultXmlGenerator(&listeners, listener); + + EXPECT_EQ(listener, listeners.Release(listener)); + EXPECT_TRUE(listeners.default_xml_generator() == nullptr); + EXPECT_FALSE(is_destroyed); + + // Broadcasting events now should not affect default_xml_generator. + TestEventListenersAccessor::GetRepeater(&listeners)->OnTestProgramStart( + *UnitTest::GetInstance()); + EXPECT_EQ(0, on_start_counter); + } + // Destroying the list should not affect the listener now, too. + EXPECT_FALSE(is_destroyed); + delete listener; +} + +// Sanity tests to ensure that the alternative, verbose spellings of +// some of the macros work. We don't test them thoroughly as that +// would be quite involved. Since their implementations are +// straightforward, and they are rarely used, we'll just rely on the +// users to tell us when they are broken. +GTEST_TEST(AlternativeNameTest, Works) { // GTEST_TEST is the same as TEST. + GTEST_SUCCEED() << "OK"; // GTEST_SUCCEED is the same as SUCCEED. + + // GTEST_FAIL is the same as FAIL. + EXPECT_FATAL_FAILURE(GTEST_FAIL() << "An expected failure", + "An expected failure"); + + // GTEST_ASSERT_XY is the same as ASSERT_XY. + + GTEST_ASSERT_EQ(0, 0); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(0, 1) << "An expected failure", + "An expected failure"); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_EQ(1, 0) << "An expected failure", + "An expected failure"); + + GTEST_ASSERT_NE(0, 1); + GTEST_ASSERT_NE(1, 0); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_NE(0, 0) << "An expected failure", + "An expected failure"); + + GTEST_ASSERT_LE(0, 0); + GTEST_ASSERT_LE(0, 1); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_LE(1, 0) << "An expected failure", + "An expected failure"); + + GTEST_ASSERT_LT(0, 1); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(0, 0) << "An expected failure", + "An expected failure"); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_LT(1, 0) << "An expected failure", + "An expected failure"); + + GTEST_ASSERT_GE(0, 0); + GTEST_ASSERT_GE(1, 0); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_GE(0, 1) << "An expected failure", + "An expected failure"); + + GTEST_ASSERT_GT(1, 0); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(0, 1) << "An expected failure", + "An expected failure"); + EXPECT_FATAL_FAILURE(GTEST_ASSERT_GT(1, 1) << "An expected failure", + "An expected failure"); +} + +// Tests for internal utilities necessary for implementation of the universal +// printing. + +class ConversionHelperBase {}; +class ConversionHelperDerived : public ConversionHelperBase {}; + +struct HasDebugStringMethods { + std::string DebugString() const { return ""; } + std::string ShortDebugString() const { return ""; } +}; + +struct InheritsDebugStringMethods : public HasDebugStringMethods {}; + +struct WrongTypeDebugStringMethod { + std::string DebugString() const { return ""; } + int ShortDebugString() const { return 1; } +}; + +struct NotConstDebugStringMethod { + std::string DebugString() { return ""; } + std::string ShortDebugString() const { return ""; } +}; + +struct MissingDebugStringMethod { + std::string DebugString() { return ""; } +}; + +struct IncompleteType; + +// Tests that HasDebugStringAndShortDebugString<T>::value is a compile-time +// constant. +TEST(HasDebugStringAndShortDebugStringTest, ValueIsCompileTimeConstant) { + GTEST_COMPILE_ASSERT_( + HasDebugStringAndShortDebugString<HasDebugStringMethods>::value, + const_true); + GTEST_COMPILE_ASSERT_( + HasDebugStringAndShortDebugString<InheritsDebugStringMethods>::value, + const_true); + GTEST_COMPILE_ASSERT_(HasDebugStringAndShortDebugString< + const InheritsDebugStringMethods>::value, + const_true); + GTEST_COMPILE_ASSERT_( + !HasDebugStringAndShortDebugString<WrongTypeDebugStringMethod>::value, + const_false); + GTEST_COMPILE_ASSERT_( + !HasDebugStringAndShortDebugString<NotConstDebugStringMethod>::value, + const_false); + GTEST_COMPILE_ASSERT_( + !HasDebugStringAndShortDebugString<MissingDebugStringMethod>::value, + const_false); + GTEST_COMPILE_ASSERT_( + !HasDebugStringAndShortDebugString<IncompleteType>::value, const_false); + GTEST_COMPILE_ASSERT_(!HasDebugStringAndShortDebugString<int>::value, + const_false); +} + +// Tests that HasDebugStringAndShortDebugString<T>::value is true when T has +// needed methods. +TEST(HasDebugStringAndShortDebugStringTest, + ValueIsTrueWhenTypeHasDebugStringAndShortDebugString) { + EXPECT_TRUE( + HasDebugStringAndShortDebugString<InheritsDebugStringMethods>::value); +} + +// Tests that HasDebugStringAndShortDebugString<T>::value is false when T +// doesn't have needed methods. +TEST(HasDebugStringAndShortDebugStringTest, + ValueIsFalseWhenTypeIsNotAProtocolMessage) { + EXPECT_FALSE(HasDebugStringAndShortDebugString<int>::value); + EXPECT_FALSE( + HasDebugStringAndShortDebugString<const ConversionHelperBase>::value); +} + +// Tests GTEST_REMOVE_REFERENCE_AND_CONST_. + +template <typename T1, typename T2> +void TestGTestRemoveReferenceAndConst() { + static_assert(std::is_same<T1, GTEST_REMOVE_REFERENCE_AND_CONST_(T2)>::value, + "GTEST_REMOVE_REFERENCE_AND_CONST_ failed."); +} + +TEST(RemoveReferenceToConstTest, Works) { + TestGTestRemoveReferenceAndConst<int, int>(); + TestGTestRemoveReferenceAndConst<double, double&>(); + TestGTestRemoveReferenceAndConst<char, const char>(); + TestGTestRemoveReferenceAndConst<char, const char&>(); + TestGTestRemoveReferenceAndConst<const char*, const char*>(); +} + +// Tests GTEST_REFERENCE_TO_CONST_. + +template <typename T1, typename T2> +void TestGTestReferenceToConst() { + static_assert(std::is_same<T1, GTEST_REFERENCE_TO_CONST_(T2)>::value, + "GTEST_REFERENCE_TO_CONST_ failed."); +} + +TEST(GTestReferenceToConstTest, Works) { + TestGTestReferenceToConst<const char&, char>(); + TestGTestReferenceToConst<const int&, const int>(); + TestGTestReferenceToConst<const double&, double>(); + TestGTestReferenceToConst<const std::string&, const std::string&>(); +} + + +// Tests IsContainerTest. + +class NonContainer {}; + +TEST(IsContainerTestTest, WorksForNonContainer) { + EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<int>(0))); + EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<char[5]>(0))); + EXPECT_EQ(sizeof(IsNotContainer), sizeof(IsContainerTest<NonContainer>(0))); +} + +TEST(IsContainerTestTest, WorksForContainer) { + EXPECT_EQ(sizeof(IsContainer), + sizeof(IsContainerTest<std::vector<bool> >(0))); + EXPECT_EQ(sizeof(IsContainer), + sizeof(IsContainerTest<std::map<int, double> >(0))); +} + +struct ConstOnlyContainerWithPointerIterator { + using const_iterator = int*; + const_iterator begin() const; + const_iterator end() const; +}; + +struct ConstOnlyContainerWithClassIterator { + struct const_iterator { + const int& operator*() const; + const_iterator& operator++(/* pre-increment */); + }; + const_iterator begin() const; + const_iterator end() const; +}; + +TEST(IsContainerTestTest, ConstOnlyContainer) { + EXPECT_EQ(sizeof(IsContainer), + sizeof(IsContainerTest<ConstOnlyContainerWithPointerIterator>(0))); + EXPECT_EQ(sizeof(IsContainer), + sizeof(IsContainerTest<ConstOnlyContainerWithClassIterator>(0))); +} + +// Tests IsHashTable. +struct AHashTable { + typedef void hasher; +}; +struct NotReallyAHashTable { + typedef void hasher; + typedef void reverse_iterator; +}; +TEST(IsHashTable, Basic) { + EXPECT_TRUE(testing::internal::IsHashTable<AHashTable>::value); + EXPECT_FALSE(testing::internal::IsHashTable<NotReallyAHashTable>::value); + EXPECT_FALSE(testing::internal::IsHashTable<std::vector<int>>::value); + EXPECT_TRUE(testing::internal::IsHashTable<std::unordered_set<int>>::value); +} + +// Tests ArrayEq(). + +TEST(ArrayEqTest, WorksForDegeneratedArrays) { + EXPECT_TRUE(ArrayEq(5, 5L)); + EXPECT_FALSE(ArrayEq('a', 0)); +} + +TEST(ArrayEqTest, WorksForOneDimensionalArrays) { + // Note that a and b are distinct but compatible types. + const int a[] = { 0, 1 }; + long b[] = { 0, 1 }; + EXPECT_TRUE(ArrayEq(a, b)); + EXPECT_TRUE(ArrayEq(a, 2, b)); + + b[0] = 2; + EXPECT_FALSE(ArrayEq(a, b)); + EXPECT_FALSE(ArrayEq(a, 1, b)); +} + +TEST(ArrayEqTest, WorksForTwoDimensionalArrays) { + const char a[][3] = { "hi", "lo" }; + const char b[][3] = { "hi", "lo" }; + const char c[][3] = { "hi", "li" }; + + EXPECT_TRUE(ArrayEq(a, b)); + EXPECT_TRUE(ArrayEq(a, 2, b)); + + EXPECT_FALSE(ArrayEq(a, c)); + EXPECT_FALSE(ArrayEq(a, 2, c)); +} + +// Tests ArrayAwareFind(). + +TEST(ArrayAwareFindTest, WorksForOneDimensionalArray) { + const char a[] = "hello"; + EXPECT_EQ(a + 4, ArrayAwareFind(a, a + 5, 'o')); + EXPECT_EQ(a + 5, ArrayAwareFind(a, a + 5, 'x')); +} + +TEST(ArrayAwareFindTest, WorksForTwoDimensionalArray) { + int a[][2] = { { 0, 1 }, { 2, 3 }, { 4, 5 } }; + const int b[2] = { 2, 3 }; + EXPECT_EQ(a + 1, ArrayAwareFind(a, a + 3, b)); + + const int c[2] = { 6, 7 }; + EXPECT_EQ(a + 3, ArrayAwareFind(a, a + 3, c)); +} + +// Tests CopyArray(). + +TEST(CopyArrayTest, WorksForDegeneratedArrays) { + int n = 0; + CopyArray('a', &n); + EXPECT_EQ('a', n); +} + +TEST(CopyArrayTest, WorksForOneDimensionalArrays) { + const char a[3] = "hi"; + int b[3]; +#ifndef __BORLANDC__ // C++Builder cannot compile some array size deductions. + CopyArray(a, &b); + EXPECT_TRUE(ArrayEq(a, b)); +#endif + + int c[3]; + CopyArray(a, 3, c); + EXPECT_TRUE(ArrayEq(a, c)); +} + +TEST(CopyArrayTest, WorksForTwoDimensionalArrays) { + const int a[2][3] = { { 0, 1, 2 }, { 3, 4, 5 } }; + int b[2][3]; +#ifndef __BORLANDC__ // C++Builder cannot compile some array size deductions. + CopyArray(a, &b); + EXPECT_TRUE(ArrayEq(a, b)); +#endif + + int c[2][3]; + CopyArray(a, 2, c); + EXPECT_TRUE(ArrayEq(a, c)); +} + +// Tests NativeArray. + +TEST(NativeArrayTest, ConstructorFromArrayWorks) { + const int a[3] = { 0, 1, 2 }; + NativeArray<int> na(a, 3, RelationToSourceReference()); + EXPECT_EQ(3U, na.size()); + EXPECT_EQ(a, na.begin()); +} + +TEST(NativeArrayTest, CreatesAndDeletesCopyOfArrayWhenAskedTo) { + typedef int Array[2]; + Array* a = new Array[1]; + (*a)[0] = 0; + (*a)[1] = 1; + NativeArray<int> na(*a, 2, RelationToSourceCopy()); + EXPECT_NE(*a, na.begin()); + delete[] a; + EXPECT_EQ(0, na.begin()[0]); + EXPECT_EQ(1, na.begin()[1]); + + // We rely on the heap checker to verify that na deletes the copy of + // array. +} + +TEST(NativeArrayTest, TypeMembersAreCorrect) { + StaticAssertTypeEq<char, NativeArray<char>::value_type>(); + StaticAssertTypeEq<int[2], NativeArray<int[2]>::value_type>(); + + StaticAssertTypeEq<const char*, NativeArray<char>::const_iterator>(); + StaticAssertTypeEq<const bool(*)[2], NativeArray<bool[2]>::const_iterator>(); +} + +TEST(NativeArrayTest, MethodsWork) { + const int a[3] = { 0, 1, 2 }; + NativeArray<int> na(a, 3, RelationToSourceCopy()); + ASSERT_EQ(3U, na.size()); + EXPECT_EQ(3, na.end() - na.begin()); + + NativeArray<int>::const_iterator it = na.begin(); + EXPECT_EQ(0, *it); + ++it; + EXPECT_EQ(1, *it); + it++; + EXPECT_EQ(2, *it); + ++it; + EXPECT_EQ(na.end(), it); + + EXPECT_TRUE(na == na); + + NativeArray<int> na2(a, 3, RelationToSourceReference()); + EXPECT_TRUE(na == na2); + + const int b1[3] = { 0, 1, 1 }; + const int b2[4] = { 0, 1, 2, 3 }; + EXPECT_FALSE(na == NativeArray<int>(b1, 3, RelationToSourceReference())); + EXPECT_FALSE(na == NativeArray<int>(b2, 4, RelationToSourceCopy())); +} + +TEST(NativeArrayTest, WorksForTwoDimensionalArray) { + const char a[2][3] = { "hi", "lo" }; + NativeArray<char[3]> na(a, 2, RelationToSourceReference()); + ASSERT_EQ(2U, na.size()); + EXPECT_EQ(a, na.begin()); +} + +// IndexSequence +TEST(IndexSequence, MakeIndexSequence) { + using testing::internal::IndexSequence; + using testing::internal::MakeIndexSequence; + EXPECT_TRUE( + (std::is_same<IndexSequence<>, MakeIndexSequence<0>::type>::value)); + EXPECT_TRUE( + (std::is_same<IndexSequence<0>, MakeIndexSequence<1>::type>::value)); + EXPECT_TRUE( + (std::is_same<IndexSequence<0, 1>, MakeIndexSequence<2>::type>::value)); + EXPECT_TRUE(( + std::is_same<IndexSequence<0, 1, 2>, MakeIndexSequence<3>::type>::value)); + EXPECT_TRUE( + (std::is_base_of<IndexSequence<0, 1, 2>, MakeIndexSequence<3>>::value)); +} + +// ElemFromList +TEST(ElemFromList, Basic) { + using testing::internal::ElemFromList; + EXPECT_TRUE( + (std::is_same<int, ElemFromList<0, int, double, char>::type>::value)); + EXPECT_TRUE( + (std::is_same<double, ElemFromList<1, int, double, char>::type>::value)); + EXPECT_TRUE( + (std::is_same<char, ElemFromList<2, int, double, char>::type>::value)); + EXPECT_TRUE(( + std::is_same<char, ElemFromList<7, int, int, int, int, int, int, int, + char, int, int, int, int>::type>::value)); +} + +// FlatTuple +TEST(FlatTuple, Basic) { + using testing::internal::FlatTuple; + + FlatTuple<int, double, const char*> tuple = {}; + EXPECT_EQ(0, tuple.Get<0>()); + EXPECT_EQ(0.0, tuple.Get<1>()); + EXPECT_EQ(nullptr, tuple.Get<2>()); + + tuple = FlatTuple<int, double, const char*>( + testing::internal::FlatTupleConstructTag{}, 7, 3.2, "Foo"); + EXPECT_EQ(7, tuple.Get<0>()); + EXPECT_EQ(3.2, tuple.Get<1>()); + EXPECT_EQ(std::string("Foo"), tuple.Get<2>()); + + tuple.Get<1>() = 5.1; + EXPECT_EQ(5.1, tuple.Get<1>()); +} + +namespace { +std::string AddIntToString(int i, const std::string& s) { + return s + std::to_string(i); +} +} // namespace + +TEST(FlatTuple, Apply) { + using testing::internal::FlatTuple; + + FlatTuple<int, std::string> tuple{testing::internal::FlatTupleConstructTag{}, + 5, "Hello"}; + + // Lambda. + EXPECT_TRUE(tuple.Apply([](int i, const std::string& s) -> bool { + return i == static_cast<int>(s.size()); + })); + + // Function. + EXPECT_EQ(tuple.Apply(AddIntToString), "Hello5"); + + // Mutating operations. + tuple.Apply([](int& i, std::string& s) { + ++i; + s += s; + }); + EXPECT_EQ(tuple.Get<0>(), 6); + EXPECT_EQ(tuple.Get<1>(), "HelloHello"); +} + +struct ConstructionCounting { + ConstructionCounting() { ++default_ctor_calls; } + ~ConstructionCounting() { ++dtor_calls; } + ConstructionCounting(const ConstructionCounting&) { ++copy_ctor_calls; } + ConstructionCounting(ConstructionCounting&&) noexcept { ++move_ctor_calls; } + ConstructionCounting& operator=(const ConstructionCounting&) { + ++copy_assignment_calls; + return *this; + } + ConstructionCounting& operator=(ConstructionCounting&&) noexcept { + ++move_assignment_calls; + return *this; + } + + static void Reset() { + default_ctor_calls = 0; + dtor_calls = 0; + copy_ctor_calls = 0; + move_ctor_calls = 0; + copy_assignment_calls = 0; + move_assignment_calls = 0; + } + + static int default_ctor_calls; + static int dtor_calls; + static int copy_ctor_calls; + static int move_ctor_calls; + static int copy_assignment_calls; + static int move_assignment_calls; +}; + +int ConstructionCounting::default_ctor_calls = 0; +int ConstructionCounting::dtor_calls = 0; +int ConstructionCounting::copy_ctor_calls = 0; +int ConstructionCounting::move_ctor_calls = 0; +int ConstructionCounting::copy_assignment_calls = 0; +int ConstructionCounting::move_assignment_calls = 0; + +TEST(FlatTuple, ConstructorCalls) { + using testing::internal::FlatTuple; + + // Default construction. + ConstructionCounting::Reset(); + { FlatTuple<ConstructionCounting> tuple; } + EXPECT_EQ(ConstructionCounting::default_ctor_calls, 1); + EXPECT_EQ(ConstructionCounting::dtor_calls, 1); + EXPECT_EQ(ConstructionCounting::copy_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::move_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::copy_assignment_calls, 0); + EXPECT_EQ(ConstructionCounting::move_assignment_calls, 0); + + // Copy construction. + ConstructionCounting::Reset(); + { + ConstructionCounting elem; + FlatTuple<ConstructionCounting> tuple{ + testing::internal::FlatTupleConstructTag{}, elem}; + } + EXPECT_EQ(ConstructionCounting::default_ctor_calls, 1); + EXPECT_EQ(ConstructionCounting::dtor_calls, 2); + EXPECT_EQ(ConstructionCounting::copy_ctor_calls, 1); + EXPECT_EQ(ConstructionCounting::move_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::copy_assignment_calls, 0); + EXPECT_EQ(ConstructionCounting::move_assignment_calls, 0); + + // Move construction. + ConstructionCounting::Reset(); + { + FlatTuple<ConstructionCounting> tuple{ + testing::internal::FlatTupleConstructTag{}, ConstructionCounting{}}; + } + EXPECT_EQ(ConstructionCounting::default_ctor_calls, 1); + EXPECT_EQ(ConstructionCounting::dtor_calls, 2); + EXPECT_EQ(ConstructionCounting::copy_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::move_ctor_calls, 1); + EXPECT_EQ(ConstructionCounting::copy_assignment_calls, 0); + EXPECT_EQ(ConstructionCounting::move_assignment_calls, 0); + + // Copy assignment. + // TODO(ofats): it should be testing assignment operator of FlatTuple, not its + // elements + ConstructionCounting::Reset(); + { + FlatTuple<ConstructionCounting> tuple; + ConstructionCounting elem; + tuple.Get<0>() = elem; + } + EXPECT_EQ(ConstructionCounting::default_ctor_calls, 2); + EXPECT_EQ(ConstructionCounting::dtor_calls, 2); + EXPECT_EQ(ConstructionCounting::copy_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::move_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::copy_assignment_calls, 1); + EXPECT_EQ(ConstructionCounting::move_assignment_calls, 0); + + // Move assignment. + // TODO(ofats): it should be testing assignment operator of FlatTuple, not its + // elements + ConstructionCounting::Reset(); + { + FlatTuple<ConstructionCounting> tuple; + tuple.Get<0>() = ConstructionCounting{}; + } + EXPECT_EQ(ConstructionCounting::default_ctor_calls, 2); + EXPECT_EQ(ConstructionCounting::dtor_calls, 2); + EXPECT_EQ(ConstructionCounting::copy_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::move_ctor_calls, 0); + EXPECT_EQ(ConstructionCounting::copy_assignment_calls, 0); + EXPECT_EQ(ConstructionCounting::move_assignment_calls, 1); + + ConstructionCounting::Reset(); +} + +TEST(FlatTuple, ManyTypes) { + using testing::internal::FlatTuple; + + // Instantiate FlatTuple with 257 ints. + // Tests show that we can do it with thousands of elements, but very long + // compile times makes it unusuitable for this test. +#define GTEST_FLAT_TUPLE_INT8 int, int, int, int, int, int, int, int, +#define GTEST_FLAT_TUPLE_INT16 GTEST_FLAT_TUPLE_INT8 GTEST_FLAT_TUPLE_INT8 +#define GTEST_FLAT_TUPLE_INT32 GTEST_FLAT_TUPLE_INT16 GTEST_FLAT_TUPLE_INT16 +#define GTEST_FLAT_TUPLE_INT64 GTEST_FLAT_TUPLE_INT32 GTEST_FLAT_TUPLE_INT32 +#define GTEST_FLAT_TUPLE_INT128 GTEST_FLAT_TUPLE_INT64 GTEST_FLAT_TUPLE_INT64 +#define GTEST_FLAT_TUPLE_INT256 GTEST_FLAT_TUPLE_INT128 GTEST_FLAT_TUPLE_INT128 + + // Let's make sure that we can have a very long list of types without blowing + // up the template instantiation depth. + FlatTuple<GTEST_FLAT_TUPLE_INT256 int> tuple; + + tuple.Get<0>() = 7; + tuple.Get<99>() = 17; + tuple.Get<256>() = 1000; + EXPECT_EQ(7, tuple.Get<0>()); + EXPECT_EQ(17, tuple.Get<99>()); + EXPECT_EQ(1000, tuple.Get<256>()); +} + +// Tests SkipPrefix(). + +TEST(SkipPrefixTest, SkipsWhenPrefixMatches) { + const char* const str = "hello"; + + const char* p = str; + EXPECT_TRUE(SkipPrefix("", &p)); + EXPECT_EQ(str, p); + + p = str; + EXPECT_TRUE(SkipPrefix("hell", &p)); + EXPECT_EQ(str + 4, p); +} + +TEST(SkipPrefixTest, DoesNotSkipWhenPrefixDoesNotMatch) { + const char* const str = "world"; + + const char* p = str; + EXPECT_FALSE(SkipPrefix("W", &p)); + EXPECT_EQ(str, p); + + p = str; + EXPECT_FALSE(SkipPrefix("world!", &p)); + EXPECT_EQ(str, p); +} + +// Tests ad_hoc_test_result(). +TEST(AdHocTestResultTest, AdHocTestResultForUnitTestDoesNotShowFailure) { + const testing::TestResult& test_result = + testing::UnitTest::GetInstance()->ad_hoc_test_result(); + EXPECT_FALSE(test_result.Failed()); +} + +class DynamicUnitTestFixture : public testing::Test {}; + +class DynamicTest : public DynamicUnitTestFixture { + void TestBody() override { EXPECT_TRUE(true); } +}; + +auto* dynamic_test = testing::RegisterTest( + "DynamicUnitTestFixture", "DynamicTest", "TYPE", "VALUE", __FILE__, + __LINE__, []() -> DynamicUnitTestFixture* { return new DynamicTest; }); + +TEST(RegisterTest, WasRegistered) { + auto* unittest = testing::UnitTest::GetInstance(); + for (int i = 0; i < unittest->total_test_suite_count(); ++i) { + auto* tests = unittest->GetTestSuite(i); + if (tests->name() != std::string("DynamicUnitTestFixture")) continue; + for (int j = 0; j < tests->total_test_count(); ++j) { + if (tests->GetTestInfo(j)->name() != std::string("DynamicTest")) continue; + // Found it. + EXPECT_STREQ(tests->GetTestInfo(j)->value_param(), "VALUE"); + EXPECT_STREQ(tests->GetTestInfo(j)->type_param(), "TYPE"); + return; + } + } + + FAIL() << "Didn't find the test!"; +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile1_test_.cc b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile1_test_.cc new file mode 100644 index 0000000000..19aa252a30 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile1_test_.cc @@ -0,0 +1,43 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// gtest_xml_outfile1_test_ writes some xml via TestProperty used by +// gtest_xml_outfiles_test.py + +#include "gtest/gtest.h" + +class PropertyOne : public testing::Test { + protected: + void SetUp() override { RecordProperty("SetUpProp", 1); } + void TearDown() override { RecordProperty("TearDownProp", 1); } +}; + +TEST_F(PropertyOne, TestSomeProperties) { + RecordProperty("TestSomeProperty", 1); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile2_test_.cc b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile2_test_.cc new file mode 100644 index 0000000000..f9a2a6e984 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfile2_test_.cc @@ -0,0 +1,43 @@ +// Copyright 2008, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// +// gtest_xml_outfile2_test_ writes some xml via TestProperty used by +// gtest_xml_outfiles_test.py + +#include "gtest/gtest.h" + +class PropertyTwo : public testing::Test { + protected: + void SetUp() override { RecordProperty("SetUpProp", 2); } + void TearDown() override { RecordProperty("TearDownProp", 2); } +}; + +TEST_F(PropertyTwo, TestSomeProperties) { + RecordProperty("TestSomeProperty", 2); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_outfiles_test.py b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfiles_test.py new file mode 100755 index 0000000000..ac66feb667 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_outfiles_test.py @@ -0,0 +1,135 @@ +#!/usr/bin/env python +# +# Copyright 2008, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for the gtest_xml_output module.""" + +import os +from xml.dom import minidom, Node +import gtest_test_utils +import gtest_xml_test_utils + +GTEST_OUTPUT_SUBDIR = "xml_outfiles" +GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_" +GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_" + +EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests"> + <testsuite name="PropertyOne" tests="1" failures="0" skipped="0" disabled="0" errors="0" time="*" timestamp="*"> + <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyOne"> + <properties> + <property name="SetUpProp" value="1"/> + <property name="TestSomeProperty" value="1"/> + <property name="TearDownProp" value="1"/> + </properties> + </testcase> + </testsuite> +</testsuites> +""" + +EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests"> + <testsuite name="PropertyTwo" tests="1" failures="0" skipped="0" disabled="0" errors="0" time="*" timestamp="*"> + <testcase name="TestSomeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyTwo"> + <properties> + <property name="SetUpProp" value="2"/> + <property name="TestSomeProperty" value="2"/> + <property name="TearDownProp" value="2"/> + </properties> + </testcase> + </testsuite> +</testsuites> +""" + + +class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase): + """Unit test for Google Test's XML output functionality.""" + + def setUp(self): + # We want the trailing '/' that the last "" provides in os.path.join, for + # telling Google Test to create an output directory instead of a single file + # for xml output. + self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_OUTPUT_SUBDIR, "") + self.DeleteFilesAndDir() + + def tearDown(self): + self.DeleteFilesAndDir() + + def DeleteFilesAndDir(self): + try: + os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml")) + except os.error: + pass + try: + os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml")) + except os.error: + pass + try: + os.rmdir(self.output_dir_) + except os.error: + pass + + def testOutfile1(self): + self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1) + + def testOutfile2(self): + self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2) + + def _TestOutFile(self, test_name, expected_xml): + gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name) + command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_] + p = gtest_test_utils.Subprocess(command, + working_dir=gtest_test_utils.GetTempDir()) + self.assert_(p.exited) + self.assertEquals(0, p.exit_code) + + output_file_name1 = test_name + ".xml" + output_file1 = os.path.join(self.output_dir_, output_file_name1) + output_file_name2 = 'lt-' + output_file_name1 + output_file2 = os.path.join(self.output_dir_, output_file_name2) + self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2), + output_file1) + + expected = minidom.parseString(expected_xml) + if os.path.isfile(output_file1): + actual = minidom.parse(output_file1) + else: + actual = minidom.parse(output_file2) + self.NormalizeXml(actual.documentElement) + self.AssertEquivalentNodes(expected.documentElement, + actual.documentElement) + expected.unlink() + actual.unlink() + + +if __name__ == "__main__": + os.environ["GTEST_STACK_TRACE_DEPTH"] = "0" + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest.py b/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest.py new file mode 100755 index 0000000000..eade7aac88 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest.py @@ -0,0 +1,415 @@ +#!/usr/bin/env python +# +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test for the gtest_xml_output module""" + +import datetime +import errno +import os +import re +import sys +from xml.dom import minidom, Node + +import gtest_test_utils +import gtest_xml_test_utils + +GTEST_FILTER_FLAG = '--gtest_filter' +GTEST_LIST_TESTS_FLAG = '--gtest_list_tests' +GTEST_OUTPUT_FLAG = '--gtest_output' +GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml' +GTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_' + +# The flag indicating stacktraces are not supported +NO_STACKTRACE_SUPPORT_FLAG = '--no_stacktrace_support' + +# The environment variables for test sharding. +TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS' +SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX' +SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE' + +SUPPORTS_STACK_TRACES = NO_STACKTRACE_SUPPORT_FLAG not in sys.argv + +if SUPPORTS_STACK_TRACES: + STACK_TRACE_TEMPLATE = '\nStack trace:\n*' +else: + STACK_TRACE_TEMPLATE = '' + # unittest.main() can't handle unknown flags + sys.argv.remove(NO_STACKTRACE_SUPPORT_FLAG) + +EXPECTED_NON_EMPTY_XML = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="26" failures="5" disabled="2" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42"> + <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> + </testsuite> + <testsuite name="FailedTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="FailedTest"> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 1
 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + </testcase> + </testsuite> + <testsuite name="MixedResultTest" tests="3" failures="1" disabled="1" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest"/> + <testcase name="Fails" status="run" result="completed" time="*" timestamp="*" classname="MixedResultTest"> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 1
 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 2
 3" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 2 + 3%(stack)s]]></failure> + </testcase> + <testcase name="DISABLED_test" status="notrun" result="suppressed" time="*" timestamp="*" classname="MixedResultTest"/> + </testsuite> + <testsuite name="XmlQuotingTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="OutputsCData" status="run" result="completed" time="*" timestamp="*" classname="XmlQuotingTest"> + <failure message="gtest_xml_output_unittest_.cc:*
Failed
XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]></top>" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Failed +XML output: <?xml encoding="utf-8"><top><![CDATA[cdata text]]>]]><![CDATA[</top>%(stack)s]]></failure> + </testcase> + </testsuite> + <testsuite name="InvalidCharactersTest" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="InvalidCharactersInMessage" status="run" result="completed" time="*" timestamp="*" classname="InvalidCharactersTest"> + <failure message="gtest_xml_output_unittest_.cc:*
Failed
Invalid characters in brackets []" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Failed +Invalid characters in brackets []%(stack)s]]></failure> + </testcase> + </testsuite> + <testsuite name="DisabledTest" tests="1" failures="0" disabled="1" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="DISABLED_test_not_run" status="notrun" result="suppressed" time="*" timestamp="*" classname="DisabledTest"/> + </testsuite> + <testsuite name="SkippedTest" tests="3" failures="1" disabled="0" skipped="2" errors="0" time="*" timestamp="*"> + <testcase name="Skipped" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest"> + <skipped message="gtest_xml_output_unittest_.cc:*
"><![CDATA[gtest_xml_output_unittest_.cc:* +%(stack)s]]></skipped> + </testcase> + <testcase name="SkippedWithMessage" status="run" result="skipped" time="*" timestamp="*" classname="SkippedTest"> + <skipped message="gtest_xml_output_unittest_.cc:*
It is good practice to tell why you skip a test."><![CDATA[gtest_xml_output_unittest_.cc:* +It is good practice to tell why you skip a test.%(stack)s]]></skipped> + </testcase> + <testcase name="SkippedAfterFailure" status="run" result="completed" time="*" timestamp="*" classname="SkippedTest"> + <failure message="gtest_xml_output_unittest_.cc:*
Expected equality of these values:
 1
 2" type=""><![CDATA[gtest_xml_output_unittest_.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + <skipped message="gtest_xml_output_unittest_.cc:*
It is good practice to tell why you skip a test."><![CDATA[gtest_xml_output_unittest_.cc:* +It is good practice to tell why you skip a test.%(stack)s]]></skipped> + </testcase> + + </testsuite> + <testsuite name="PropertyRecordingTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye"> + <testcase name="OneProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <properties> + <property name="key_1" value="1"/> + </properties> + </testcase> + <testcase name="IntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <properties> + <property name="key_int" value="1"/> + </properties> + </testcase> + <testcase name="ThreeProperties" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <properties> + <property name="key_1" value="1"/> + <property name="key_2" value="2"/> + <property name="key_3" value="3"/> + </properties> + </testcase> + <testcase name="TwoValuesForOneKeyUsesLastValue" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <properties> + <property name="key_1" value="2"/> + </properties> + </testcase> + </testsuite> + <testsuite name="NoFixtureTest" tests="3" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="RecordProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <properties> + <property name="key" value="1"/> + </properties> + </testcase> + <testcase name="ExternalUtilityThatCallsRecordIntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <properties> + <property name="key_for_utility_int" value="1"/> + </properties> + </testcase> + <testcase name="ExternalUtilityThatCallsRecordStringValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="NoFixtureTest"> + <properties> + <property name="key_for_utility_string" value="1"/> + </properties> + </testcase> + </testsuite> + <testsuite name="Single/ValueParamTest" tests="4" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="HasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="AnotherTestThatHasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + <testcase name="AnotherTestThatHasValueParamAttribute/1" value_param="42" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + </testsuite> + <testsuite name="TypedTest/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/0" /> + </testsuite> + <testsuite name="TypedTest/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="TypedTest/1" /> + </testsuite> + <testsuite name="Single/TypeParameterizedTestSuite/0" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/0" /> + </testsuite> + <testsuite name="Single/TypeParameterizedTestSuite/1" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasTypeParamAttribute" type_param="*" status="run" result="completed" time="*" timestamp="*" classname="Single/TypeParameterizedTestSuite/1" /> + </testsuite> +</testsuites>""" % { + 'stack': STACK_TRACE_TEMPLATE +} + +EXPECTED_FILTERED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" + timestamp="*" name="AllTests" ad_hoc_property="42"> + <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" + errors="0" time="*" timestamp="*"> + <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> + </testsuite> +</testsuites>""" + +EXPECTED_SHARDED_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="3" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests" ad_hoc_property="42"> + <testsuite name="SuccessfulTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="Succeeds" status="run" result="completed" time="*" timestamp="*" classname="SuccessfulTest"/> + </testsuite> + <testsuite name="PropertyRecordingTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*" SetUpTestSuite="yes" TearDownTestSuite="aye"> + <testcase name="IntValuedProperty" status="run" result="completed" time="*" timestamp="*" classname="PropertyRecordingTest"> + <properties> + <property name="key_int" value="1"/> + </properties> + </testcase> + </testsuite> + <testsuite name="Single/ValueParamTest" tests="1" failures="0" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="HasValueParamAttribute/0" value_param="33" status="run" result="completed" time="*" timestamp="*" classname="Single/ValueParamTest" /> + </testsuite> +</testsuites>""" + +EXPECTED_NO_TEST_XML = """<?xml version="1.0" encoding="UTF-8"?> +<testsuites tests="0" failures="0" disabled="0" errors="0" time="*" + timestamp="*" name="AllTests"> + <testsuite name="NonTestSuiteFailure" tests="1" failures="1" disabled="0" skipped="0" errors="0" time="*" timestamp="*"> + <testcase name="" status="run" result="completed" time="*" timestamp="*" classname=""> + <failure message="gtest_no_test_unittest.cc:*
Expected equality of these values:
 1
 2" type=""><![CDATA[gtest_no_test_unittest.cc:* +Expected equality of these values: + 1 + 2%(stack)s]]></failure> + </testcase> + </testsuite> +</testsuites>""" % { + 'stack': STACK_TRACE_TEMPLATE +} + +GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME) + +SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess( + [GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output + + +class GTestXMLOutputUnitTest(gtest_xml_test_utils.GTestXMLTestCase): + """ + Unit test for Google Test's XML output functionality. + """ + + # This test currently breaks on platforms that do not support typed and + # type-parameterized tests, so we don't run it under them. + if SUPPORTS_TYPED_TESTS: + def testNonEmptyXmlOutput(self): + """ + Runs a test program that generates a non-empty XML output, and + tests that the XML output is expected. + """ + self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY_XML, 1) + + def testNoTestXmlOutput(self): + """Verifies XML output for a Google Test binary without actual tests. + + Runs a test program that generates an XML output for a binary without tests, + and tests that the XML output is expected. + """ + + self._TestXmlOutput('gtest_no_test_unittest', EXPECTED_NO_TEST_XML, 0) + + def testTimestampValue(self): + """Checks whether the timestamp attribute in the XML output is valid. + + Runs a test program that generates an empty XML output, and checks if + the timestamp attribute in the testsuites tag is valid. + """ + actual = self._GetXmlOutput('gtest_no_test_unittest', [], {}, 0) + date_time_str = actual.documentElement.getAttributeNode('timestamp').value + # datetime.strptime() is only available in Python 2.5+ so we have to + # parse the expected datetime manually. + match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str) + self.assertTrue( + re.match, + 'XML datettime string %s has incorrect format' % date_time_str) + date_time_from_xml = datetime.datetime( + year=int(match.group(1)), month=int(match.group(2)), + day=int(match.group(3)), hour=int(match.group(4)), + minute=int(match.group(5)), second=int(match.group(6))) + + time_delta = abs(datetime.datetime.now() - date_time_from_xml) + # timestamp value should be near the current local time + self.assertTrue(time_delta < datetime.timedelta(seconds=600), + 'time_delta is %s' % time_delta) + actual.unlink() + + def testDefaultOutputFile(self): + """ + Confirms that Google Test produces an XML output file with the expected + default name if no name is explicitly specified. + """ + output_file = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_DEFAULT_OUTPUT_FILE) + gtest_prog_path = gtest_test_utils.GetTestExecutablePath( + 'gtest_no_test_unittest') + try: + os.remove(output_file) + except OSError: + e = sys.exc_info()[1] + if e.errno != errno.ENOENT: + raise + + p = gtest_test_utils.Subprocess( + [gtest_prog_path, '%s=xml' % GTEST_OUTPUT_FLAG], + working_dir=gtest_test_utils.GetTempDir()) + self.assert_(p.exited) + self.assertEquals(0, p.exit_code) + self.assert_(os.path.isfile(output_file)) + + def testSuppressedXmlOutput(self): + """ + Tests that no XML file is generated if the default XML listener is + shut down before RUN_ALL_TESTS is invoked. + """ + + xml_path = os.path.join(gtest_test_utils.GetTempDir(), + GTEST_PROGRAM_NAME + 'out.xml') + if os.path.isfile(xml_path): + os.remove(xml_path) + + command = [GTEST_PROGRAM_PATH, + '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path), + '--shut_down_xml'] + p = gtest_test_utils.Subprocess(command) + if p.terminated_by_signal: + # p.signal is available only if p.terminated_by_signal is True. + self.assertFalse( + p.terminated_by_signal, + '%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal)) + else: + self.assert_(p.exited) + self.assertEquals(1, p.exit_code, + "'%s' exited with code %s, which doesn't match " + 'the expected exit code %s.' + % (command, p.exit_code, 1)) + + self.assert_(not os.path.isfile(xml_path)) + + def testFilteredTestXmlOutput(self): + """Verifies XML output when a filter is applied. + + Runs a test program that executes only some tests and verifies that + non-selected tests do not show up in the XML output. + """ + + self._TestXmlOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED_TEST_XML, 0, + extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG]) + + def testShardedTestXmlOutput(self): + """Verifies XML output when run using multiple shards. + + Runs a test program that executes only one shard and verifies that tests + from other shards do not show up in the XML output. + """ + + self._TestXmlOutput( + GTEST_PROGRAM_NAME, + EXPECTED_SHARDED_TEST_XML, + 0, + extra_env={SHARD_INDEX_ENV_VAR: '0', + TOTAL_SHARDS_ENV_VAR: '10'}) + + def _GetXmlOutput(self, gtest_prog_name, extra_args, extra_env, + expected_exit_code): + """ + Returns the xml output generated by running the program gtest_prog_name. + Furthermore, the program's exit code must be expected_exit_code. + """ + xml_path = os.path.join(gtest_test_utils.GetTempDir(), + gtest_prog_name + 'out.xml') + gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name) + + command = ([gtest_prog_path, '%s=xml:%s' % (GTEST_OUTPUT_FLAG, xml_path)] + + extra_args) + environ_copy = os.environ.copy() + if extra_env: + environ_copy.update(extra_env) + p = gtest_test_utils.Subprocess(command, env=environ_copy) + + if p.terminated_by_signal: + self.assert_(False, + '%s was killed by signal %d' % (gtest_prog_name, p.signal)) + else: + self.assert_(p.exited) + self.assertEquals(expected_exit_code, p.exit_code, + "'%s' exited with code %s, which doesn't match " + 'the expected exit code %s.' + % (command, p.exit_code, expected_exit_code)) + actual = minidom.parse(xml_path) + return actual + + def _TestXmlOutput(self, gtest_prog_name, expected_xml, + expected_exit_code, extra_args=None, extra_env=None): + """ + Asserts that the XML document generated by running the program + gtest_prog_name matches expected_xml, a string containing another + XML document. Furthermore, the program's exit code must be + expected_exit_code. + """ + + actual = self._GetXmlOutput(gtest_prog_name, extra_args or [], + extra_env or {}, expected_exit_code) + expected = minidom.parseString(expected_xml) + self.NormalizeXml(actual.documentElement) + self.AssertEquivalentNodes(expected.documentElement, + actual.documentElement) + expected.unlink() + actual.unlink() + + +if __name__ == '__main__': + os.environ['GTEST_STACK_TRACE_DEPTH'] = '1' + gtest_test_utils.Main() diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest_.cc b/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest_.cc new file mode 100644 index 0000000000..c0036aaef9 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_output_unittest_.cc @@ -0,0 +1,193 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Unit test for Google Test XML output. +// +// A user can specify XML output in a Google Test program to run via +// either the GTEST_OUTPUT environment variable or the --gtest_output +// flag. This is used for testing such functionality. +// +// This program will be invoked from a Python unit test. Don't run it +// directly. + +#include "gtest/gtest.h" + +using ::testing::InitGoogleTest; +using ::testing::TestEventListeners; +using ::testing::TestWithParam; +using ::testing::UnitTest; +using ::testing::Test; +using ::testing::Values; + +class SuccessfulTest : public Test { +}; + +TEST_F(SuccessfulTest, Succeeds) { + SUCCEED() << "This is a success."; + ASSERT_EQ(1, 1); +} + +class FailedTest : public Test { +}; + +TEST_F(FailedTest, Fails) { + ASSERT_EQ(1, 2); +} + +class DisabledTest : public Test { +}; + +TEST_F(DisabledTest, DISABLED_test_not_run) { + FAIL() << "Unexpected failure: Disabled test should not be run"; +} + +class SkippedTest : public Test { +}; + +TEST_F(SkippedTest, Skipped) { + GTEST_SKIP(); +} + +TEST_F(SkippedTest, SkippedWithMessage) { + GTEST_SKIP() << "It is good practice to tell why you skip a test."; +} + +TEST_F(SkippedTest, SkippedAfterFailure) { + EXPECT_EQ(1, 2); + GTEST_SKIP() << "It is good practice to tell why you skip a test."; +} + +TEST(MixedResultTest, Succeeds) { + EXPECT_EQ(1, 1); + ASSERT_EQ(1, 1); +} + +TEST(MixedResultTest, Fails) { + EXPECT_EQ(1, 2); + ASSERT_EQ(2, 3); +} + +TEST(MixedResultTest, DISABLED_test) { + FAIL() << "Unexpected failure: Disabled test should not be run"; +} + +TEST(XmlQuotingTest, OutputsCData) { + FAIL() << "XML output: " + "<?xml encoding=\"utf-8\"><top><![CDATA[cdata text]]></top>"; +} + +// Helps to test that invalid characters produced by test code do not make +// it into the XML file. +TEST(InvalidCharactersTest, InvalidCharactersInMessage) { + FAIL() << "Invalid characters in brackets [\x1\x2]"; +} + +class PropertyRecordingTest : public Test { + public: + static void SetUpTestSuite() { RecordProperty("SetUpTestSuite", "yes"); } + static void TearDownTestSuite() { + RecordProperty("TearDownTestSuite", "aye"); + } +}; + +TEST_F(PropertyRecordingTest, OneProperty) { + RecordProperty("key_1", "1"); +} + +TEST_F(PropertyRecordingTest, IntValuedProperty) { + RecordProperty("key_int", 1); +} + +TEST_F(PropertyRecordingTest, ThreeProperties) { + RecordProperty("key_1", "1"); + RecordProperty("key_2", "2"); + RecordProperty("key_3", "3"); +} + +TEST_F(PropertyRecordingTest, TwoValuesForOneKeyUsesLastValue) { + RecordProperty("key_1", "1"); + RecordProperty("key_1", "2"); +} + +TEST(NoFixtureTest, RecordProperty) { + RecordProperty("key", "1"); +} + +void ExternalUtilityThatCallsRecordProperty(const std::string& key, int value) { + testing::Test::RecordProperty(key, value); +} + +void ExternalUtilityThatCallsRecordProperty(const std::string& key, + const std::string& value) { + testing::Test::RecordProperty(key, value); +} + +TEST(NoFixtureTest, ExternalUtilityThatCallsRecordIntValuedProperty) { + ExternalUtilityThatCallsRecordProperty("key_for_utility_int", 1); +} + +TEST(NoFixtureTest, ExternalUtilityThatCallsRecordStringValuedProperty) { + ExternalUtilityThatCallsRecordProperty("key_for_utility_string", "1"); +} + +// Verifies that the test parameter value is output in the 'value_param' +// XML attribute for value-parameterized tests. +class ValueParamTest : public TestWithParam<int> {}; +TEST_P(ValueParamTest, HasValueParamAttribute) {} +TEST_P(ValueParamTest, AnotherTestThatHasValueParamAttribute) {} +INSTANTIATE_TEST_SUITE_P(Single, ValueParamTest, Values(33, 42)); + +// Verifies that the type parameter name is output in the 'type_param' +// XML attribute for typed tests. +template <typename T> class TypedTest : public Test {}; +typedef testing::Types<int, long> TypedTestTypes; +TYPED_TEST_SUITE(TypedTest, TypedTestTypes); +TYPED_TEST(TypedTest, HasTypeParamAttribute) {} + +// Verifies that the type parameter name is output in the 'type_param' +// XML attribute for type-parameterized tests. +template <typename T> +class TypeParameterizedTestSuite : public Test {}; +TYPED_TEST_SUITE_P(TypeParameterizedTestSuite); +TYPED_TEST_P(TypeParameterizedTestSuite, HasTypeParamAttribute) {} +REGISTER_TYPED_TEST_SUITE_P(TypeParameterizedTestSuite, HasTypeParamAttribute); +typedef testing::Types<int, long> TypeParameterizedTestSuiteTypes; // NOLINT +INSTANTIATE_TYPED_TEST_SUITE_P(Single, TypeParameterizedTestSuite, + TypeParameterizedTestSuiteTypes); + +int main(int argc, char** argv) { + InitGoogleTest(&argc, argv); + + if (argc > 1 && strcmp(argv[1], "--shut_down_xml") == 0) { + TestEventListeners& listeners = UnitTest::GetInstance()->listeners(); + delete listeners.Release(listeners.default_xml_generator()); + } + testing::Test::RecordProperty("ad_hoc_property", "42"); + return RUN_ALL_TESTS(); +} diff --git a/security/nss/gtests/google_test/gtest/test/gtest_xml_test_utils.py b/security/nss/gtests/google_test/gtest/test/gtest_xml_test_utils.py new file mode 100755 index 0000000000..ec42c62c3b --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/gtest_xml_test_utils.py @@ -0,0 +1,197 @@ +# Copyright 2006, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""Unit test utilities for gtest_xml_output""" + +import re +from xml.dom import minidom, Node +import gtest_test_utils + +GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml' + +class GTestXMLTestCase(gtest_test_utils.TestCase): + """ + Base class for tests of Google Test's XML output functionality. + """ + + + def AssertEquivalentNodes(self, expected_node, actual_node): + """ + Asserts that actual_node (a DOM node object) is equivalent to + expected_node (another DOM node object), in that either both of + them are CDATA nodes and have the same value, or both are DOM + elements and actual_node meets all of the following conditions: + + * It has the same tag name as expected_node. + * It has the same set of attributes as expected_node, each with + the same value as the corresponding attribute of expected_node. + Exceptions are any attribute named "time", which needs only be + convertible to a floating-point number and any attribute named + "type_param" which only has to be non-empty. + * It has an equivalent set of child nodes (including elements and + CDATA sections) as expected_node. Note that we ignore the + order of the children as they are not guaranteed to be in any + particular order. + """ + + if expected_node.nodeType == Node.CDATA_SECTION_NODE: + self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType) + self.assertEquals(expected_node.nodeValue, actual_node.nodeValue) + return + + self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType) + self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType) + self.assertEquals(expected_node.tagName, actual_node.tagName) + + expected_attributes = expected_node.attributes + actual_attributes = actual_node.attributes + self.assertEquals( + expected_attributes.length, actual_attributes.length, + 'attribute numbers differ in element %s:\nExpected: %r\nActual: %r' % ( + actual_node.tagName, expected_attributes.keys(), + actual_attributes.keys())) + for i in range(expected_attributes.length): + expected_attr = expected_attributes.item(i) + actual_attr = actual_attributes.get(expected_attr.name) + self.assert_( + actual_attr is not None, + 'expected attribute %s not found in element %s' % + (expected_attr.name, actual_node.tagName)) + self.assertEquals( + expected_attr.value, actual_attr.value, + ' values of attribute %s in element %s differ: %s vs %s' % + (expected_attr.name, actual_node.tagName, + expected_attr.value, actual_attr.value)) + + expected_children = self._GetChildren(expected_node) + actual_children = self._GetChildren(actual_node) + self.assertEquals( + len(expected_children), len(actual_children), + 'number of child elements differ in element ' + actual_node.tagName) + for child_id, child in expected_children.items(): + self.assert_(child_id in actual_children, + '<%s> is not in <%s> (in element %s)' % + (child_id, actual_children, actual_node.tagName)) + self.AssertEquivalentNodes(child, actual_children[child_id]) + + identifying_attribute = { + 'testsuites': 'name', + 'testsuite': 'name', + 'testcase': 'name', + 'failure': 'message', + 'skipped': 'message', + 'property': 'name', + } + + def _GetChildren(self, element): + """ + Fetches all of the child nodes of element, a DOM Element object. + Returns them as the values of a dictionary keyed by the IDs of the + children. For <testsuites>, <testsuite>, <testcase>, and <property> + elements, the ID is the value of their "name" attribute; for <failure> + elements, it is the value of the "message" attribute; for <properties> + elements, it is the value of their parent's "name" attribute plus the + literal string "properties"; CDATA sections and non-whitespace + text nodes are concatenated into a single CDATA section with ID + "detail". An exception is raised if any element other than the above + four is encountered, if two child elements with the same identifying + attributes are encountered, or if any other type of node is encountered. + """ + + children = {} + for child in element.childNodes: + if child.nodeType == Node.ELEMENT_NODE: + if child.tagName == 'properties': + self.assert_(child.parentNode is not None, + 'Encountered <properties> element without a parent') + child_id = child.parentNode.getAttribute('name') + '-properties' + else: + self.assert_(child.tagName in self.identifying_attribute, + 'Encountered unknown element <%s>' % child.tagName) + child_id = child.getAttribute( + self.identifying_attribute[child.tagName]) + self.assert_(child_id not in children) + children[child_id] = child + elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]: + if 'detail' not in children: + if (child.nodeType == Node.CDATA_SECTION_NODE or + not child.nodeValue.isspace()): + children['detail'] = child.ownerDocument.createCDATASection( + child.nodeValue) + else: + children['detail'].nodeValue += child.nodeValue + else: + self.fail('Encountered unexpected node type %d' % child.nodeType) + return children + + def NormalizeXml(self, element): + """ + Normalizes Google Test's XML output to eliminate references to transient + information that may change from run to run. + + * The "time" attribute of <testsuites>, <testsuite> and <testcase> + elements is replaced with a single asterisk, if it contains + only digit characters. + * The "timestamp" attribute of <testsuites> elements is replaced with a + single asterisk, if it contains a valid ISO8601 datetime value. + * The "type_param" attribute of <testcase> elements is replaced with a + single asterisk (if it sn non-empty) as it is the type name returned + by the compiler and is platform dependent. + * The line info reported in the first line of the "message" + attribute and CDATA section of <failure> elements is replaced with the + file's basename and a single asterisk for the line number. + * The directory names in file paths are removed. + * The stack traces are removed. + """ + + if element.tagName in ('testsuites', 'testsuite', 'testcase'): + timestamp = element.getAttributeNode('timestamp') + timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\d$', + '*', timestamp.value) + if element.tagName in ('testsuites', 'testsuite', 'testcase'): + time = element.getAttributeNode('time') + time.value = re.sub(r'^\d+(\.\d+)?$', '*', time.value) + type_param = element.getAttributeNode('type_param') + if type_param and type_param.value: + type_param.value = '*' + elif element.tagName == 'failure' or element.tagName == 'skipped': + source_line_pat = r'^.*[/\\](.*:)\d+\n' + # Replaces the source line information with a normalized form. + message = element.getAttributeNode('message') + message.value = re.sub(source_line_pat, '\\1*\n', message.value) + for child in element.childNodes: + if child.nodeType == Node.CDATA_SECTION_NODE: + # Replaces the source line information with a normalized form. + cdata = re.sub(source_line_pat, '\\1*\n', child.nodeValue) + # Removes the actual stack trace. + child.nodeValue = re.sub(r'Stack trace:\n(.|\n)*', + 'Stack trace:\n*', cdata) + for child in element.childNodes: + if child.nodeType == Node.ELEMENT_NODE: + self.NormalizeXml(child) diff --git a/security/nss/gtests/google_test/gtest/test/production.cc b/security/nss/gtests/google_test/gtest/test/production.cc new file mode 100644 index 0000000000..0f69f6dbd2 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/production.cc @@ -0,0 +1,35 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// This is part of the unit test for gtest_prod.h. + +#include "production.h" + +PrivateCode::PrivateCode() : x_(0) {} diff --git a/security/nss/gtests/google_test/gtest/test/production.h b/security/nss/gtests/google_test/gtest/test/production.h new file mode 100644 index 0000000000..41a5472254 --- /dev/null +++ b/security/nss/gtests/google_test/gtest/test/production.h @@ -0,0 +1,54 @@ +// Copyright 2006, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +// This is part of the unit test for gtest_prod.h. + +#ifndef GOOGLETEST_TEST_PRODUCTION_H_ +#define GOOGLETEST_TEST_PRODUCTION_H_ + +#include "gtest/gtest_prod.h" + +class PrivateCode { + public: + // Declares a friend test that does not use a fixture. + FRIEND_TEST(PrivateCodeTest, CanAccessPrivateMembers); + + // Declares a friend test that uses a fixture. + FRIEND_TEST(PrivateCodeFixtureTest, CanAccessPrivateMembers); + + PrivateCode(); + + int x() const { return x_; } + private: + void set_x(int an_x) { x_ = an_x; } + int x_; +}; + +#endif // GOOGLETEST_TEST_PRODUCTION_H_ |