mirror of
https://github.com/google/googletest.git
synced 2024-12-26 17:41:03 +08:00
Adds --gtest_print_test for printing the elapsed time of tests.
This commit is contained in:
parent
dbc56bf84b
commit
15cbe5f70a
@ -70,6 +70,7 @@ GTEST_DECLARE_string(color);
|
||||
GTEST_DECLARE_string(filter);
|
||||
GTEST_DECLARE_bool(list_tests);
|
||||
GTEST_DECLARE_string(output);
|
||||
GTEST_DECLARE_bool(print_time);
|
||||
GTEST_DECLARE_int32(repeat);
|
||||
GTEST_DECLARE_int32(stack_trace_depth);
|
||||
GTEST_DECLARE_bool(show_internal_stack_frames);
|
||||
@ -79,10 +80,11 @@ namespace internal {
|
||||
// Names of the flags (needed for parsing Google Test flags).
|
||||
const char kBreakOnFailureFlag[] = "break_on_failure";
|
||||
const char kCatchExceptionsFlag[] = "catch_exceptions";
|
||||
const char kColorFlag[] = "color";
|
||||
const char kFilterFlag[] = "filter";
|
||||
const char kListTestsFlag[] = "list_tests";
|
||||
const char kOutputFlag[] = "output";
|
||||
const char kColorFlag[] = "color";
|
||||
const char kPrintTimeFlag[] = "print_time";
|
||||
const char kRepeatFlag[] = "repeat";
|
||||
|
||||
// This class saves the values of all Google Test flags in its c'tor, and
|
||||
@ -99,6 +101,7 @@ class GTestFlagSaver {
|
||||
internal_run_death_test_ = GTEST_FLAG(internal_run_death_test);
|
||||
list_tests_ = GTEST_FLAG(list_tests);
|
||||
output_ = GTEST_FLAG(output);
|
||||
print_time_ = GTEST_FLAG(print_time);
|
||||
repeat_ = GTEST_FLAG(repeat);
|
||||
}
|
||||
|
||||
@ -112,6 +115,7 @@ class GTestFlagSaver {
|
||||
GTEST_FLAG(internal_run_death_test) = internal_run_death_test_;
|
||||
GTEST_FLAG(list_tests) = list_tests_;
|
||||
GTEST_FLAG(output) = output_;
|
||||
GTEST_FLAG(print_time) = print_time_;
|
||||
GTEST_FLAG(repeat) = repeat_;
|
||||
}
|
||||
private:
|
||||
@ -124,6 +128,7 @@ class GTestFlagSaver {
|
||||
String internal_run_death_test_;
|
||||
bool list_tests_;
|
||||
String output_;
|
||||
bool print_time_;
|
||||
bool pretty_;
|
||||
internal::Int32 repeat_;
|
||||
} GTEST_ATTRIBUTE_UNUSED;
|
||||
|
36
src/gtest.cc
36
src/gtest.cc
@ -169,6 +169,12 @@ GTEST_DEFINE_string(
|
||||
"executable's name and, if necessary, made unique by adding "
|
||||
"digits.");
|
||||
|
||||
GTEST_DEFINE_bool(
|
||||
print_time,
|
||||
internal::BoolFromGTestEnv("print_time", false),
|
||||
"True iff " GTEST_NAME
|
||||
" should display elapsed time in text output.");
|
||||
|
||||
GTEST_DEFINE_int32(
|
||||
repeat,
|
||||
internal::Int32FromGTestEnv("repeat", 1),
|
||||
@ -2303,6 +2309,7 @@ class PrettyUnitTestResultPrinter : public UnitTestEventListenerInterface {
|
||||
virtual void OnUnitTestStart(const UnitTest * unit_test);
|
||||
virtual void OnGlobalSetUpStart(const UnitTest*);
|
||||
virtual void OnTestCaseStart(const TestCase * test_case);
|
||||
virtual void OnTestCaseEnd(const TestCase * test_case);
|
||||
virtual void OnTestStart(const TestInfo * test_info);
|
||||
virtual void OnNewTestPartResult(const TestPartResult * result);
|
||||
virtual void OnTestEnd(const TestInfo * test_info);
|
||||
@ -2349,6 +2356,20 @@ void PrettyUnitTestResultPrinter::OnTestCaseStart(
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
void PrettyUnitTestResultPrinter::OnTestCaseEnd(
|
||||
const TestCase * test_case) {
|
||||
if (!GTEST_FLAG(print_time)) return;
|
||||
|
||||
test_case_name_ = test_case->name();
|
||||
const internal::String counts =
|
||||
FormatCountableNoun(test_case->test_to_run_count(), "test", "tests");
|
||||
ColoredPrintf(COLOR_GREEN, "[----------] ");
|
||||
printf("%s from %s (%s ms total)\n\n",
|
||||
counts.c_str(), test_case_name_.c_str(),
|
||||
internal::StreamableToString(test_case->elapsed_time()).c_str());
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
void PrettyUnitTestResultPrinter::OnTestStart(const TestInfo * test_info) {
|
||||
ColoredPrintf(COLOR_GREEN, "[ RUN ] ");
|
||||
PrintTestName(test_case_name_.c_str(), test_info->name());
|
||||
@ -2363,7 +2384,12 @@ void PrettyUnitTestResultPrinter::OnTestEnd(const TestInfo * test_info) {
|
||||
ColoredPrintf(COLOR_RED, "[ FAILED ] ");
|
||||
}
|
||||
PrintTestName(test_case_name_.c_str(), test_info->name());
|
||||
printf("\n");
|
||||
if (GTEST_FLAG(print_time)) {
|
||||
printf(" (%s ms)\n", internal::StreamableToString(
|
||||
test_info->result()->elapsed_time()).c_str());
|
||||
} else {
|
||||
printf("\n");
|
||||
}
|
||||
fflush(stdout);
|
||||
}
|
||||
|
||||
@ -2420,9 +2446,14 @@ void PrettyUnitTestResultPrinter::OnUnitTestEnd(
|
||||
const internal::UnitTestImpl* const impl = unit_test->impl();
|
||||
|
||||
ColoredPrintf(COLOR_GREEN, "[==========] ");
|
||||
printf("%s from %s ran.\n",
|
||||
printf("%s from %s ran.",
|
||||
FormatTestCount(impl->test_to_run_count()).c_str(),
|
||||
FormatTestCaseCount(impl->test_case_to_run_count()).c_str());
|
||||
if (GTEST_FLAG(print_time)) {
|
||||
printf(" (%s ms total)",
|
||||
internal::StreamableToString(impl->elapsed_time()).c_str());
|
||||
}
|
||||
printf("\n");
|
||||
ColoredPrintf(COLOR_GREEN, "[ PASSED ] ");
|
||||
printf("%s.\n", FormatTestCount(impl->successful_test_count()).c_str());
|
||||
|
||||
@ -3505,6 +3536,7 @@ void InitGoogleTestImpl(int* argc, CharType** argv) {
|
||||
>EST_FLAG(internal_run_death_test)) ||
|
||||
ParseBoolFlag(arg, kListTestsFlag, >EST_FLAG(list_tests)) ||
|
||||
ParseStringFlag(arg, kOutputFlag, >EST_FLAG(output)) ||
|
||||
ParseBoolFlag(arg, kPrintTimeFlag, >EST_FLAG(print_time)) ||
|
||||
ParseInt32Flag(arg, kRepeatFlag, >EST_FLAG(repeat))
|
||||
) {
|
||||
// Yes. Shift the remainder of the argv list left by one. Note
|
||||
|
@ -58,8 +58,10 @@ else:
|
||||
PROGRAM = 'gtest_output_test_'
|
||||
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
|
||||
|
||||
COMMAND = os.path.join(gtest_test_utils.GetBuildDir(),
|
||||
PROGRAM) + ' --gtest_color=yes'
|
||||
PROGRAM_PATH = os.path.join(gtest_test_utils.GetBuildDir(), PROGRAM)
|
||||
COMMAND_WITH_COLOR = PROGRAM_PATH + ' --gtest_color=yes'
|
||||
COMMAND_WITH_TIME = (PROGRAM_PATH + ' --gtest_print_time '
|
||||
+ '--gtest_filter="FatalFailureTest.*:LoggingTest.*"')
|
||||
|
||||
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(),
|
||||
GOLDEN_NAME)
|
||||
@ -94,12 +96,19 @@ def RemoveStackTraces(output):
|
||||
'Stack trace: (omitted)\n\n', output)
|
||||
|
||||
|
||||
def RemoveTime(output):
|
||||
"""Removes all time information from a Google Test program's output."""
|
||||
|
||||
return re.sub(r'\(\d+ ms', '(? ms', output)
|
||||
|
||||
|
||||
def NormalizeOutput(output):
|
||||
"""Normalizes output (the output of gtest_output_test_.exe)."""
|
||||
|
||||
output = ToUnixLineEnding(output)
|
||||
output = RemoveLocations(output)
|
||||
output = RemoveStackTraces(output)
|
||||
output = RemoveTime(output)
|
||||
return output
|
||||
|
||||
|
||||
@ -165,8 +174,8 @@ def GetCommandOutput(cmd):
|
||||
|
||||
class GTestOutputTest(unittest.TestCase):
|
||||
def testOutput(self):
|
||||
output = GetCommandOutput(COMMAND)
|
||||
|
||||
output = (GetCommandOutput(COMMAND_WITH_COLOR) +
|
||||
GetCommandOutput(COMMAND_WITH_TIME))
|
||||
golden_file = open(GOLDEN_PATH, 'rb')
|
||||
golden = golden_file.read()
|
||||
golden_file.close()
|
||||
@ -176,7 +185,8 @@ class GTestOutputTest(unittest.TestCase):
|
||||
|
||||
if __name__ == '__main__':
|
||||
if sys.argv[1:] == [GENGOLDEN_FLAG]:
|
||||
output = GetCommandOutput(COMMAND)
|
||||
output = (GetCommandOutput(COMMAND_WITH_COLOR) +
|
||||
GetCommandOutput(COMMAND_WITH_TIME))
|
||||
golden_file = open(GOLDEN_PATH, 'wb')
|
||||
golden_file.write(output)
|
||||
golden_file.close()
|
||||
|
@ -381,3 +381,73 @@ Expected fatal failure.
|
||||
[0;31m[ FAILED ] [mExpectFatalFailureTest.FailsWhenStatementReturns
|
||||
|
||||
26 FAILED TESTS
|
||||
The non-test part of the code is expected to have 2 failures.
|
||||
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Value of: false
|
||||
Actual: false
|
||||
Expected: true
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Value of: 3
|
||||
Expected: 2
|
||||
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
|
||||
[==========] Running 4 tests from 2 test cases.
|
||||
[----------] Global test environment set-up.
|
||||
FooEnvironment::SetUp() called.
|
||||
BarEnvironment::SetUp() called.
|
||||
[----------] 3 tests from FatalFailureTest
|
||||
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
|
||||
(expecting a failure that x should be 1)
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Value of: x
|
||||
Actual: 2
|
||||
Expected: 1
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
|
||||
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
|
||||
(expecting a failure that x should be 1)
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Value of: x
|
||||
Actual: 2
|
||||
Expected: 1
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
|
||||
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
|
||||
(expecting a failure on false)
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Value of: false
|
||||
Actual: false
|
||||
Expected: true
|
||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
|
||||
[----------] 3 tests from FatalFailureTest (? ms total)
|
||||
|
||||
[----------] 1 test from LoggingTest
|
||||
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
|
||||
(expecting 2 failures on (3) >= (a[i]))
|
||||
i == 0
|
||||
i == 1
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Expected: (3) >= (a[i]), actual: 3 vs 9
|
||||
i == 2
|
||||
i == 3
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Expected: (3) >= (a[i]), actual: 3 vs 6
|
||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
|
||||
[----------] 1 test from LoggingTest (? ms total)
|
||||
|
||||
[----------] Global test environment tear-down
|
||||
BarEnvironment::TearDown() called.
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Failed
|
||||
Expected non-fatal failure.
|
||||
FooEnvironment::TearDown() called.
|
||||
gtest_output_test_.cc:#: Failure
|
||||
Failed
|
||||
Expected fatal failure.
|
||||
[==========] 4 tests from 2 test cases ran. (? ms total)
|
||||
[ PASSED ] 0 tests.
|
||||
[ FAILED ] 4 tests, listed below:
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
|
||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
|
||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
|
||||
|
||||
4 FAILED TESTS
|
||||
|
@ -358,3 +358,64 @@ Expected fatal failure.
|
||||
[ FAILED ] ExpectFatalFailureTest.FailsWhenStatementReturns
|
||||
|
||||
29 FAILED TESTS
|
||||
The non-test part of the code is expected to have 2 failures.
|
||||
|
||||
gtest_output_test_.cc:#: error: Value of: false
|
||||
Actual: false
|
||||
Expected: true
|
||||
gtest_output_test_.cc:#: error: Value of: 3
|
||||
Expected: 2
|
||||
Note: Google Test filter = FatalFailureTest.*:LoggingTest.*
|
||||
[==========] Running 4 tests from 2 test cases.
|
||||
[----------] Global test environment set-up.
|
||||
FooEnvironment::SetUp() called.
|
||||
BarEnvironment::SetUp() called.
|
||||
[----------] 3 tests from FatalFailureTest
|
||||
[ RUN ] FatalFailureTest.FatalFailureInSubroutine
|
||||
(expecting a failure that x should be 1)
|
||||
gtest_output_test_.cc:#: error: Value of: x
|
||||
Actual: 2
|
||||
Expected: 1
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine (? ms)
|
||||
[ RUN ] FatalFailureTest.FatalFailureInNestedSubroutine
|
||||
(expecting a failure that x should be 1)
|
||||
gtest_output_test_.cc:#: error: Value of: x
|
||||
Actual: 2
|
||||
Expected: 1
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine (? ms)
|
||||
[ RUN ] FatalFailureTest.NonfatalFailureInSubroutine
|
||||
(expecting a failure on false)
|
||||
gtest_output_test_.cc:#: error: Value of: false
|
||||
Actual: false
|
||||
Expected: true
|
||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine (? ms)
|
||||
[----------] 3 tests from FatalFailureTest (? ms total)
|
||||
|
||||
[----------] 1 test from LoggingTest
|
||||
[ RUN ] LoggingTest.InterleavingLoggingAndAssertions
|
||||
(expecting 2 failures on (3) >= (a[i]))
|
||||
i == 0
|
||||
i == 1
|
||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 9
|
||||
i == 2
|
||||
i == 3
|
||||
gtest_output_test_.cc:#: error: Expected: (3) >= (a[i]), actual: 3 vs 6
|
||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions (? ms)
|
||||
[----------] 1 test from LoggingTest (? ms total)
|
||||
|
||||
[----------] Global test environment tear-down
|
||||
BarEnvironment::TearDown() called.
|
||||
gtest_output_test_.cc:#: error: Failed
|
||||
Expected non-fatal failure.
|
||||
FooEnvironment::TearDown() called.
|
||||
gtest_output_test_.cc:#: error: Failed
|
||||
Expected fatal failure.
|
||||
[==========] 4 tests from 2 test cases ran. (? ms total)
|
||||
[ PASSED ] 0 tests.
|
||||
[ FAILED ] 4 tests, listed below:
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInSubroutine
|
||||
[ FAILED ] FatalFailureTest.FatalFailureInNestedSubroutine
|
||||
[ FAILED ] FatalFailureTest.NonfatalFailureInSubroutine
|
||||
[ FAILED ] LoggingTest.InterleavingLoggingAndAssertions
|
||||
|
||||
4 FAILED TESTS
|
||||
|
@ -808,6 +808,7 @@ class GTestFlagSaverTest : public testing::Test {
|
||||
testing::GTEST_FLAG(filter) = "";
|
||||
testing::GTEST_FLAG(list_tests) = false;
|
||||
testing::GTEST_FLAG(output) = "";
|
||||
testing::GTEST_FLAG(print_time) = false;
|
||||
testing::GTEST_FLAG(repeat) = 1;
|
||||
}
|
||||
|
||||
@ -827,6 +828,7 @@ class GTestFlagSaverTest : public testing::Test {
|
||||
EXPECT_STREQ("", testing::GTEST_FLAG(filter).c_str());
|
||||
EXPECT_FALSE(testing::GTEST_FLAG(list_tests));
|
||||
EXPECT_STREQ("", testing::GTEST_FLAG(output).c_str());
|
||||
EXPECT_FALSE(testing::GTEST_FLAG(print_time));
|
||||
EXPECT_EQ(1, testing::GTEST_FLAG(repeat));
|
||||
|
||||
testing::GTEST_FLAG(break_on_failure) = true;
|
||||
@ -835,6 +837,7 @@ class GTestFlagSaverTest : public testing::Test {
|
||||
testing::GTEST_FLAG(filter) = "abc";
|
||||
testing::GTEST_FLAG(list_tests) = true;
|
||||
testing::GTEST_FLAG(output) = "xml:foo.xml";
|
||||
testing::GTEST_FLAG(print_time) = true;
|
||||
testing::GTEST_FLAG(repeat) = 100;
|
||||
}
|
||||
private:
|
||||
@ -3471,6 +3474,7 @@ struct Flags {
|
||||
filter(""),
|
||||
list_tests(false),
|
||||
output(""),
|
||||
print_time(false),
|
||||
repeat(1) {}
|
||||
|
||||
// Factory methods.
|
||||
@ -3515,6 +3519,14 @@ struct Flags {
|
||||
return flags;
|
||||
}
|
||||
|
||||
// Creates a Flags struct where the gtest_print_time flag has the given
|
||||
// value.
|
||||
static Flags PrintTime(bool print_time) {
|
||||
Flags flags;
|
||||
flags.print_time = print_time;
|
||||
return flags;
|
||||
}
|
||||
|
||||
// Creates a Flags struct where the gtest_repeat flag has the given
|
||||
// value.
|
||||
static Flags Repeat(Int32 repeat) {
|
||||
@ -3529,6 +3541,7 @@ struct Flags {
|
||||
const char* filter;
|
||||
bool list_tests;
|
||||
const char* output;
|
||||
bool print_time;
|
||||
Int32 repeat;
|
||||
};
|
||||
|
||||
@ -3542,6 +3555,7 @@ class InitGoogleTestTest : public testing::Test {
|
||||
GTEST_FLAG(filter) = "";
|
||||
GTEST_FLAG(list_tests) = false;
|
||||
GTEST_FLAG(output) = "";
|
||||
GTEST_FLAG(print_time) = false;
|
||||
GTEST_FLAG(repeat) = 1;
|
||||
}
|
||||
|
||||
@ -3563,6 +3577,7 @@ class InitGoogleTestTest : public testing::Test {
|
||||
EXPECT_STREQ(expected.filter, GTEST_FLAG(filter).c_str());
|
||||
EXPECT_EQ(expected.list_tests, GTEST_FLAG(list_tests));
|
||||
EXPECT_STREQ(expected.output, GTEST_FLAG(output).c_str());
|
||||
EXPECT_EQ(expected.print_time, GTEST_FLAG(print_time));
|
||||
EXPECT_EQ(expected.repeat, GTEST_FLAG(repeat));
|
||||
}
|
||||
|
||||
@ -3950,6 +3965,86 @@ TEST_F(InitGoogleTestTest, OutputXmlDirectory) {
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::Output("xml:directory/path/"));
|
||||
}
|
||||
|
||||
// Tests having a --gtest_print_time flag
|
||||
TEST_F(InitGoogleTestTest, PrintTimeFlag) {
|
||||
const char* argv[] = {
|
||||
"foo.exe",
|
||||
"--gtest_print_time",
|
||||
NULL
|
||||
};
|
||||
|
||||
const char* argv2[] = {
|
||||
"foo.exe",
|
||||
NULL
|
||||
};
|
||||
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::PrintTime(true));
|
||||
}
|
||||
|
||||
// Tests having a --gtest_print_time flag with a "true" value
|
||||
TEST_F(InitGoogleTestTest, PrintTimeTrue) {
|
||||
const char* argv[] = {
|
||||
"foo.exe",
|
||||
"--gtest_print_time=1",
|
||||
NULL
|
||||
};
|
||||
|
||||
const char* argv2[] = {
|
||||
"foo.exe",
|
||||
NULL
|
||||
};
|
||||
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::PrintTime(true));
|
||||
}
|
||||
|
||||
// Tests having a --gtest_print_time flag with a "false" value
|
||||
TEST_F(InitGoogleTestTest, PrintTimeFalse) {
|
||||
const char* argv[] = {
|
||||
"foo.exe",
|
||||
"--gtest_print_time=0",
|
||||
NULL
|
||||
};
|
||||
|
||||
const char* argv2[] = {
|
||||
"foo.exe",
|
||||
NULL
|
||||
};
|
||||
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::PrintTime(false));
|
||||
}
|
||||
|
||||
// Tests parsing --gtest_print_time=f.
|
||||
TEST_F(InitGoogleTestTest, PrintTimeFalse_f) {
|
||||
const char* argv[] = {
|
||||
"foo.exe",
|
||||
"--gtest_print_time=f",
|
||||
NULL
|
||||
};
|
||||
|
||||
const char* argv2[] = {
|
||||
"foo.exe",
|
||||
NULL
|
||||
};
|
||||
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::PrintTime(false));
|
||||
}
|
||||
|
||||
// Tests parsing --gtest_print_time=F.
|
||||
TEST_F(InitGoogleTestTest, PrintTimeFalse_F) {
|
||||
const char* argv[] = {
|
||||
"foo.exe",
|
||||
"--gtest_print_time=F",
|
||||
NULL
|
||||
};
|
||||
|
||||
const char* argv2[] = {
|
||||
"foo.exe",
|
||||
NULL
|
||||
};
|
||||
|
||||
TEST_PARSING_FLAGS(argv, argv2, Flags::PrintTime(false));
|
||||
}
|
||||
|
||||
// Tests parsing --gtest_repeat=number
|
||||
TEST_F(InitGoogleTestTest, Repeat) {
|
||||
const char* argv[] = {
|
||||
|
Loading…
x
Reference in New Issue
Block a user