diff options
Diffstat (limited to 'googletest/samples/sample9_unittest.cc')
-rw-r--r-- | googletest/samples/sample9_unittest.cc | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/googletest/samples/sample9_unittest.cc b/googletest/samples/sample9_unittest.cc index 53f9af5ba1df..e502d08d73e7 100644 --- a/googletest/samples/sample9_unittest.cc +++ b/googletest/samples/sample9_unittest.cc @@ -49,16 +49,16 @@ namespace { class TersePrinter : public EmptyTestEventListener { private: // Called before any test activity starts. - virtual void OnTestProgramStart(const UnitTest& /* unit_test */) {} + void OnTestProgramStart(const UnitTest& /* unit_test */) override {} // Called after all test activities have ended. - virtual void OnTestProgramEnd(const UnitTest& unit_test) { + void OnTestProgramEnd(const UnitTest& unit_test) override { fprintf(stdout, "TEST %s\n", unit_test.Passed() ? "PASSED" : "FAILED"); fflush(stdout); } // Called before a test starts. - virtual void OnTestStart(const TestInfo& test_info) { + void OnTestStart(const TestInfo& test_info) override { fprintf(stdout, "*** Test %s.%s starting.\n", test_info.test_case_name(), @@ -67,7 +67,7 @@ class TersePrinter : public EmptyTestEventListener { } // Called after a failed assertion or a SUCCEED() invocation. - virtual void OnTestPartResult(const TestPartResult& test_part_result) { + void OnTestPartResult(const TestPartResult& test_part_result) override { fprintf(stdout, "%s in %s:%d\n%s\n", test_part_result.failed() ? "*** Failure" : "Success", @@ -78,7 +78,7 @@ class TersePrinter : public EmptyTestEventListener { } // Called after a test ends. - virtual void OnTestEnd(const TestInfo& test_info) { + void OnTestEnd(const TestInfo& test_info) override { fprintf(stdout, "*** Test %s.%s ending.\n", test_info.test_case_name(), @@ -135,10 +135,10 @@ int main(int argc, char **argv) { // This is an example of using the UnitTest reflection API to inspect test // results. Here we discount failures from the tests we expected to fail. int unexpectedly_failed_tests = 0; - for (int i = 0; i < unit_test.total_test_case_count(); ++i) { - const TestCase& test_case = *unit_test.GetTestCase(i); - for (int j = 0; j < test_case.total_test_count(); ++j) { - const TestInfo& test_info = *test_case.GetTestInfo(j); + for (int i = 0; i < unit_test.total_test_suite_count(); ++i) { + const testing::TestSuite& test_suite = *unit_test.GetTestSuite(i); + for (int j = 0; j < test_suite.total_test_count(); ++j) { + const TestInfo& test_info = *test_suite.GetTestInfo(j); // Counts failed tests that were not meant to fail (those without // 'Fails' in the name). if (test_info.result()->Failed() && |