mirror of
https://github.com/google/googletest.git
synced 2025-03-10 09:16:48 +00:00
Merge pull request #1479 from petrhosek/json
Support JSON output format in addition to XML
This commit is contained in:
commit
18d270e335
@ -304,7 +304,9 @@ if (gtest_build_tests)
|
|||||||
cxx_executable(gtest_xml_outfile1_test_ test gtest_main)
|
cxx_executable(gtest_xml_outfile1_test_ test gtest_main)
|
||||||
cxx_executable(gtest_xml_outfile2_test_ test gtest_main)
|
cxx_executable(gtest_xml_outfile2_test_ test gtest_main)
|
||||||
py_test(gtest_xml_outfiles_test)
|
py_test(gtest_xml_outfiles_test)
|
||||||
|
py_test(gtest_json_outfiles_test)
|
||||||
|
|
||||||
cxx_executable(gtest_xml_output_unittest_ test gtest)
|
cxx_executable(gtest_xml_output_unittest_ test gtest)
|
||||||
py_test(gtest_xml_output_unittest)
|
py_test(gtest_xml_output_unittest)
|
||||||
|
py_test(gtest_json_output_unittest)
|
||||||
endif()
|
endif()
|
||||||
|
@ -2060,6 +2060,207 @@ Things to note:
|
|||||||
|
|
||||||
_Availability:_ Linux, Windows, Mac.
|
_Availability:_ Linux, Windows, Mac.
|
||||||
|
|
||||||
|
#### Generating an JSON Report {#JsonReport}
|
||||||
|
|
||||||
|
gUnit can also emit a JSON report as an alternative format to XML. To generate
|
||||||
|
the JSON report, set the `GUNIT_OUTPUT` environment variable or the
|
||||||
|
`--gunit_output` flag to the string `"json:path_to_output_file"`, which will
|
||||||
|
create the file at the given location. You can also just use the string
|
||||||
|
`"json"`, in which case the output can be found in the `test_detail.json` file
|
||||||
|
in the current directory.
|
||||||
|
|
||||||
|
The report format conforms to the following JSON Schema:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/schema#",
|
||||||
|
"type": "object",
|
||||||
|
"definitions": {
|
||||||
|
"TestCase": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": { "type": "string" },
|
||||||
|
"tests": { "type": "integer" },
|
||||||
|
"failures": { "type": "integer" },
|
||||||
|
"disabled": { "type": "integer" },
|
||||||
|
"time": { "type": "string" },
|
||||||
|
"testsuite": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/TestInfo"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"TestInfo": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": { "type": "string" },
|
||||||
|
"status": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["RUN", "NOTRUN"]
|
||||||
|
},
|
||||||
|
"time": { "type": "string" },
|
||||||
|
"classname": { "type": "string" },
|
||||||
|
"failures": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/Failure"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"Failure": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"failures": { "type": "string" },
|
||||||
|
"type": { "type": "string" }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"properties": {
|
||||||
|
"tests": { "type": "integer" },
|
||||||
|
"failures": { "type": "integer" },
|
||||||
|
"disabled": { "type": "integer" },
|
||||||
|
"errors": { "type": "integer" },
|
||||||
|
"timestamp": {
|
||||||
|
"type": "string",
|
||||||
|
"format": "date-time"
|
||||||
|
},
|
||||||
|
"time": { "type": "string" },
|
||||||
|
"name": { "type": "string" },
|
||||||
|
"testsuites": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/TestCase"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The report uses the format that conforms to the following Proto3 using the
|
||||||
|
[JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json):
|
||||||
|
|
||||||
|
```proto
|
||||||
|
syntax = "proto3";
|
||||||
|
|
||||||
|
package googletest;
|
||||||
|
|
||||||
|
import "google/protobuf/timestamp.proto";
|
||||||
|
import "google/protobuf/duration.proto";
|
||||||
|
|
||||||
|
message UnitTest {
|
||||||
|
int32 tests = 1;
|
||||||
|
int32 failures = 2;
|
||||||
|
int32 disabled = 3;
|
||||||
|
int32 errors = 4;
|
||||||
|
google.protobuf.Timestamp timestamp = 5;
|
||||||
|
google.protobuf.Duration time = 6;
|
||||||
|
string name = 7;
|
||||||
|
repeated TestCase testsuites = 8;
|
||||||
|
}
|
||||||
|
|
||||||
|
message TestCase {
|
||||||
|
string name = 1;
|
||||||
|
int32 tests = 2;
|
||||||
|
int32 failures = 3;
|
||||||
|
int32 disabled = 4;
|
||||||
|
int32 errors = 5;
|
||||||
|
google.protobuf.Duration time = 6;
|
||||||
|
repeated TestInfo testsuite = 7;
|
||||||
|
}
|
||||||
|
|
||||||
|
message TestInfo {
|
||||||
|
string name = 1;
|
||||||
|
enum Status {
|
||||||
|
RUN = 0;
|
||||||
|
NOTRUN = 1;
|
||||||
|
}
|
||||||
|
Status status = 2;
|
||||||
|
google.protobuf.Duration time = 3;
|
||||||
|
string classname = 4;
|
||||||
|
message Failure {
|
||||||
|
string failures = 1;
|
||||||
|
string type = 2;
|
||||||
|
}
|
||||||
|
repeated Failure failures = 5;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For instance, the following program
|
||||||
|
|
||||||
|
```c++
|
||||||
|
TEST(MathTest, Addition) { ... }
|
||||||
|
TEST(MathTest, Subtraction) { ... }
|
||||||
|
TEST(LogicTest, NonContradiction) { ... }
|
||||||
|
```
|
||||||
|
|
||||||
|
could generate this report:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tests": 3,
|
||||||
|
"failures": 1,
|
||||||
|
"errors": 0,
|
||||||
|
"time": "0.035s",
|
||||||
|
"timestamp": "2011-10-31T18:52:42Z"
|
||||||
|
"name": "AllTests",
|
||||||
|
"testsuites": [
|
||||||
|
{
|
||||||
|
"name": "MathTest",
|
||||||
|
"tests": 2,
|
||||||
|
"failures": 1,
|
||||||
|
"errors": 0,
|
||||||
|
"time": "0.015s",
|
||||||
|
"testsuite": [
|
||||||
|
{
|
||||||
|
"name": "Addition",
|
||||||
|
"status": "RUN",
|
||||||
|
"time": "0.007s",
|
||||||
|
"classname": "",
|
||||||
|
"failures": [
|
||||||
|
{
|
||||||
|
"message": "Value of: add(1, 1)\x0A Actual: 3\x0AExpected: 2",
|
||||||
|
"type": ""
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"message": "Value of: add(1, -1)\x0A Actual: 1\x0AExpected: 0",
|
||||||
|
"type": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Subtraction",
|
||||||
|
"status": "RUN",
|
||||||
|
"time": "0.005s",
|
||||||
|
"classname": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
{
|
||||||
|
"name": "LogicTest",
|
||||||
|
"tests": 1,
|
||||||
|
"failures": 0,
|
||||||
|
"errors": 0,
|
||||||
|
"time": "0.005s",
|
||||||
|
"testsuite": [
|
||||||
|
{
|
||||||
|
"name": "NonContradiction",
|
||||||
|
"status": "RUN",
|
||||||
|
"time": "0.005s",
|
||||||
|
"classname": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
IMPORTANT: The exact format of the JSON document is subject to change.
|
||||||
|
|
||||||
|
**Availability**: Linux, Windows, Mac.
|
||||||
|
|
||||||
## Controlling How Failures Are Reported ##
|
## Controlling How Failures Are Reported ##
|
||||||
|
|
||||||
### Turning Assertion Failures into Break-Points ###
|
### Turning Assertion Failures into Break-Points ###
|
||||||
|
@ -160,8 +160,10 @@ static const char kDeathTestCaseFilter[] = "*DeathTest:*DeathTest/*";
|
|||||||
// A test filter that matches everything.
|
// A test filter that matches everything.
|
||||||
static const char kUniversalFilter[] = "*";
|
static const char kUniversalFilter[] = "*";
|
||||||
|
|
||||||
// The default output file for XML output.
|
// The default output format.
|
||||||
static const char kDefaultOutputFile[] = "test_detail.xml";
|
static const char kDefaultOutputFormat[] = "xml";
|
||||||
|
// The default output file.
|
||||||
|
static const char kDefaultOutputFile[] = "test_detail";
|
||||||
|
|
||||||
// The environment variable name for the test shard index.
|
// The environment variable name for the test shard index.
|
||||||
static const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
|
static const char kTestShardIndex[] = "GTEST_SHARD_INDEX";
|
||||||
@ -231,9 +233,9 @@ GTEST_DEFINE_bool_(list_tests, false,
|
|||||||
GTEST_DEFINE_string_(
|
GTEST_DEFINE_string_(
|
||||||
output,
|
output,
|
||||||
internal::StringFromGTestEnv("output", ""),
|
internal::StringFromGTestEnv("output", ""),
|
||||||
"A format (currently must be \"xml\"), optionally followed "
|
"A format (defaults to \"xml\" but can be specified to be \"json\"), "
|
||||||
"by a colon and an output file name or directory. A directory "
|
"optionally followed by a colon and an output file name or directory. "
|
||||||
"is indicated by a trailing pathname separator. "
|
"A directory is indicated by a trailing pathname separator. "
|
||||||
"Examples: \"xml:filename.xml\", \"xml::directoryname/\". "
|
"Examples: \"xml:filename.xml\", \"xml::directoryname/\". "
|
||||||
"If a directory is specified, output files will be created "
|
"If a directory is specified, output files will be created "
|
||||||
"within that directory, with file-names based on the test "
|
"within that directory, with file-names based on the test "
|
||||||
@ -428,12 +430,17 @@ std::string UnitTestOptions::GetAbsolutePathToOutputFile() {
|
|||||||
if (gtest_output_flag == NULL)
|
if (gtest_output_flag == NULL)
|
||||||
return "";
|
return "";
|
||||||
|
|
||||||
|
std::string format = GetOutputFormat();
|
||||||
|
if (format.empty())
|
||||||
|
format = std::string(kDefaultOutputFormat);
|
||||||
|
|
||||||
const char* const colon = strchr(gtest_output_flag, ':');
|
const char* const colon = strchr(gtest_output_flag, ':');
|
||||||
if (colon == NULL)
|
if (colon == NULL)
|
||||||
return internal::FilePath::ConcatPaths(
|
return internal::FilePath::MakeFileName(
|
||||||
internal::FilePath(
|
internal::FilePath(
|
||||||
UnitTest::GetInstance()->original_working_dir()),
|
UnitTest::GetInstance()->original_working_dir()),
|
||||||
internal::FilePath(kDefaultOutputFile)).string();
|
internal::FilePath(kDefaultOutputFile), 0,
|
||||||
|
format.c_str()).string();
|
||||||
|
|
||||||
internal::FilePath output_name(colon + 1);
|
internal::FilePath output_name(colon + 1);
|
||||||
if (!output_name.IsAbsolutePath())
|
if (!output_name.IsAbsolutePath())
|
||||||
@ -3771,6 +3778,351 @@ std::string XmlUnitTestResultPrinter::TestPropertiesAsXmlAttributes(
|
|||||||
// End XmlUnitTestResultPrinter
|
// End XmlUnitTestResultPrinter
|
||||||
|
|
||||||
|
|
||||||
|
// This class generates an JSON output file.
|
||||||
|
class JsonUnitTestResultPrinter : public EmptyTestEventListener {
|
||||||
|
public:
|
||||||
|
explicit JsonUnitTestResultPrinter(const char* output_file);
|
||||||
|
|
||||||
|
virtual void OnTestIterationEnd(const UnitTest& unit_test, int iteration);
|
||||||
|
|
||||||
|
private:
|
||||||
|
// Returns an JSON-escaped copy of the input string str.
|
||||||
|
static std::string EscapeJson(const std::string& str);
|
||||||
|
|
||||||
|
//// Verifies that the given attribute belongs to the given element and
|
||||||
|
//// streams the attribute as JSON.
|
||||||
|
static void OutputJsonKey(std::ostream* stream,
|
||||||
|
const std::string& element_name,
|
||||||
|
const std::string& name,
|
||||||
|
const std::string& value,
|
||||||
|
const std::string& indent,
|
||||||
|
bool comma = true);
|
||||||
|
static void OutputJsonKey(std::ostream* stream,
|
||||||
|
const std::string& element_name,
|
||||||
|
const std::string& name,
|
||||||
|
int value,
|
||||||
|
const std::string& indent,
|
||||||
|
bool comma = true);
|
||||||
|
|
||||||
|
// Streams a JSON representation of a TestInfo object.
|
||||||
|
static void OutputJsonTestInfo(::std::ostream* stream,
|
||||||
|
const char* test_case_name,
|
||||||
|
const TestInfo& test_info);
|
||||||
|
|
||||||
|
// Prints a JSON representation of a TestCase object
|
||||||
|
static void PrintJsonTestCase(::std::ostream* stream,
|
||||||
|
const TestCase& test_case);
|
||||||
|
|
||||||
|
// Prints a JSON summary of unit_test to output stream out.
|
||||||
|
static void PrintJsonUnitTest(::std::ostream* stream,
|
||||||
|
const UnitTest& unit_test);
|
||||||
|
|
||||||
|
// Produces a string representing the test properties in a result as
|
||||||
|
// a JSON dictionary.
|
||||||
|
static std::string TestPropertiesAsJson(const TestResult& result,
|
||||||
|
const std::string& indent);
|
||||||
|
|
||||||
|
// The output file.
|
||||||
|
const std::string output_file_;
|
||||||
|
|
||||||
|
GTEST_DISALLOW_COPY_AND_ASSIGN_(JsonUnitTestResultPrinter);
|
||||||
|
};
|
||||||
|
|
||||||
|
// Creates a new JsonUnitTestResultPrinter.
|
||||||
|
JsonUnitTestResultPrinter::JsonUnitTestResultPrinter(const char* output_file)
|
||||||
|
: output_file_(output_file) {
|
||||||
|
if (output_file_.empty()) {
|
||||||
|
GTEST_LOG_(FATAL) << "JSON output file may not be null";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void JsonUnitTestResultPrinter::OnTestIterationEnd(const UnitTest& unit_test,
|
||||||
|
int /*iteration*/) {
|
||||||
|
FILE* jsonout = NULL;
|
||||||
|
FilePath output_file(output_file_);
|
||||||
|
FilePath output_dir(output_file.RemoveFileName());
|
||||||
|
|
||||||
|
if (output_dir.CreateDirectoriesRecursively()) {
|
||||||
|
jsonout = posix::FOpen(output_file_.c_str(), "w");
|
||||||
|
}
|
||||||
|
if (jsonout == NULL) {
|
||||||
|
// TODO(phosek): report the reason of the failure.
|
||||||
|
//
|
||||||
|
// We don't do it for now as:
|
||||||
|
//
|
||||||
|
// 1. There is no urgent need for it.
|
||||||
|
// 2. It's a bit involved to make the errno variable thread-safe on
|
||||||
|
// all three operating systems (Linux, Windows, and Mac OS).
|
||||||
|
// 3. To interpret the meaning of errno in a thread-safe way,
|
||||||
|
// we need the strerror_r() function, which is not available on
|
||||||
|
// Windows.
|
||||||
|
GTEST_LOG_(FATAL) << "Unable to open file \""
|
||||||
|
<< output_file_ << "\"";
|
||||||
|
}
|
||||||
|
std::stringstream stream;
|
||||||
|
PrintJsonUnitTest(&stream, unit_test);
|
||||||
|
fprintf(jsonout, "%s", StringStreamToString(&stream).c_str());
|
||||||
|
fclose(jsonout);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns an JSON-escaped copy of the input string str.
|
||||||
|
std::string JsonUnitTestResultPrinter::EscapeJson(const std::string& str) {
|
||||||
|
Message m;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < str.size(); ++i) {
|
||||||
|
const char ch = str[i];
|
||||||
|
switch (ch) {
|
||||||
|
case '\\':
|
||||||
|
case '"':
|
||||||
|
case '/':
|
||||||
|
m << '\\' << ch;
|
||||||
|
break;
|
||||||
|
case '\b':
|
||||||
|
m << "\\b";
|
||||||
|
break;
|
||||||
|
case '\t':
|
||||||
|
m << "\\t";
|
||||||
|
break;
|
||||||
|
case '\n':
|
||||||
|
m << "\\n";
|
||||||
|
break;
|
||||||
|
case '\f':
|
||||||
|
m << "\\f";
|
||||||
|
break;
|
||||||
|
case '\r':
|
||||||
|
m << "\\r";
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
if (ch < ' ') {
|
||||||
|
m << "\\u00" << String::FormatByte(static_cast<unsigned char>(ch));
|
||||||
|
} else {
|
||||||
|
m << ch;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m.GetString();
|
||||||
|
}
|
||||||
|
|
||||||
|
// The following routines generate an JSON representation of a UnitTest
|
||||||
|
// object.
|
||||||
|
|
||||||
|
// Formats the given time in milliseconds as seconds.
|
||||||
|
static std::string FormatTimeInMillisAsDuration(TimeInMillis ms) {
|
||||||
|
::std::stringstream ss;
|
||||||
|
ss << (static_cast<double>(ms) * 1e-3) << "s";
|
||||||
|
return ss.str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Converts the given epoch time in milliseconds to a date string in the
|
||||||
|
// RFC3339 format, without the timezone information.
|
||||||
|
static std::string FormatEpochTimeInMillisAsRFC3339(TimeInMillis ms) {
|
||||||
|
struct tm time_struct;
|
||||||
|
if (!PortableLocaltime(static_cast<time_t>(ms / 1000), &time_struct))
|
||||||
|
return "";
|
||||||
|
// YYYY-MM-DDThh:mm:ss
|
||||||
|
return StreamableToString(time_struct.tm_year + 1900) + "-" +
|
||||||
|
String::FormatIntWidth2(time_struct.tm_mon + 1) + "-" +
|
||||||
|
String::FormatIntWidth2(time_struct.tm_mday) + "T" +
|
||||||
|
String::FormatIntWidth2(time_struct.tm_hour) + ":" +
|
||||||
|
String::FormatIntWidth2(time_struct.tm_min) + ":" +
|
||||||
|
String::FormatIntWidth2(time_struct.tm_sec) + "Z";
|
||||||
|
}
|
||||||
|
|
||||||
|
static inline std::string Indent(int width) {
|
||||||
|
return std::string(width, ' ');
|
||||||
|
}
|
||||||
|
|
||||||
|
void JsonUnitTestResultPrinter::OutputJsonKey(
|
||||||
|
std::ostream* stream,
|
||||||
|
const std::string& element_name,
|
||||||
|
const std::string& name,
|
||||||
|
const std::string& value,
|
||||||
|
const std::string& indent,
|
||||||
|
bool comma) {
|
||||||
|
const std::vector<std::string>& allowed_names =
|
||||||
|
GetReservedAttributesForElement(element_name);
|
||||||
|
|
||||||
|
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
|
||||||
|
allowed_names.end())
|
||||||
|
<< "Key \"" << name << "\" is not allowed for value \"" << element_name
|
||||||
|
<< "\".";
|
||||||
|
|
||||||
|
*stream << indent << "\"" << name << "\": \"" << EscapeJson(value) << "\"";
|
||||||
|
if (comma)
|
||||||
|
*stream << ",\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
void JsonUnitTestResultPrinter::OutputJsonKey(
|
||||||
|
std::ostream* stream,
|
||||||
|
const std::string& element_name,
|
||||||
|
const std::string& name,
|
||||||
|
int value,
|
||||||
|
const std::string& indent,
|
||||||
|
bool comma) {
|
||||||
|
const std::vector<std::string>& allowed_names =
|
||||||
|
GetReservedAttributesForElement(element_name);
|
||||||
|
|
||||||
|
GTEST_CHECK_(std::find(allowed_names.begin(), allowed_names.end(), name) !=
|
||||||
|
allowed_names.end())
|
||||||
|
<< "Key \"" << name << "\" is not allowed for value \"" << element_name
|
||||||
|
<< "\".";
|
||||||
|
|
||||||
|
*stream << indent << "\"" << name << "\": " << StreamableToString(value);
|
||||||
|
if (comma)
|
||||||
|
*stream << ",\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prints a JSON representation of a TestInfo object.
|
||||||
|
void JsonUnitTestResultPrinter::OutputJsonTestInfo(::std::ostream* stream,
|
||||||
|
const char* test_case_name,
|
||||||
|
const TestInfo& test_info) {
|
||||||
|
const TestResult& result = *test_info.result();
|
||||||
|
const std::string kTestcase = "testcase";
|
||||||
|
const std::string kIndent = Indent(10);
|
||||||
|
|
||||||
|
*stream << Indent(8) << "{\n";
|
||||||
|
OutputJsonKey(stream, kTestcase, "name", test_info.name(), kIndent);
|
||||||
|
|
||||||
|
if (test_info.value_param() != NULL) {
|
||||||
|
OutputJsonKey(stream, kTestcase, "value_param",
|
||||||
|
test_info.value_param(), kIndent);
|
||||||
|
}
|
||||||
|
if (test_info.type_param() != NULL) {
|
||||||
|
OutputJsonKey(stream, kTestcase, "type_param", test_info.type_param(),
|
||||||
|
kIndent);
|
||||||
|
}
|
||||||
|
|
||||||
|
OutputJsonKey(stream, kTestcase, "status",
|
||||||
|
test_info.should_run() ? "RUN" : "NOTRUN", kIndent);
|
||||||
|
OutputJsonKey(stream, kTestcase, "time",
|
||||||
|
FormatTimeInMillisAsDuration(result.elapsed_time()), kIndent);
|
||||||
|
OutputJsonKey(stream, kTestcase, "classname", test_case_name, kIndent, false);
|
||||||
|
*stream << TestPropertiesAsJson(result, kIndent);
|
||||||
|
|
||||||
|
int failures = 0;
|
||||||
|
for (int i = 0; i < result.total_part_count(); ++i) {
|
||||||
|
const TestPartResult& part = result.GetTestPartResult(i);
|
||||||
|
if (part.failed()) {
|
||||||
|
*stream << ",\n";
|
||||||
|
if (++failures == 1) {
|
||||||
|
*stream << kIndent << "\"" << "failures" << "\": [\n";
|
||||||
|
}
|
||||||
|
const std::string location =
|
||||||
|
internal::FormatCompilerIndependentFileLocation(part.file_name(),
|
||||||
|
part.line_number());
|
||||||
|
const std::string summary = EscapeJson(location + "\n" + part.summary());
|
||||||
|
*stream << kIndent << " {\n"
|
||||||
|
<< kIndent << " \"failure\": \"" << summary << "\",\n"
|
||||||
|
<< kIndent << " \"type\": \"\"\n"
|
||||||
|
<< kIndent << " }";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (failures > 0)
|
||||||
|
*stream << "\n" << kIndent << "]";
|
||||||
|
*stream << "\n" << Indent(8) << "}";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prints an JSON representation of a TestCase object
|
||||||
|
void JsonUnitTestResultPrinter::PrintJsonTestCase(std::ostream* stream,
|
||||||
|
const TestCase& test_case) {
|
||||||
|
const std::string kTestsuite = "testsuite";
|
||||||
|
const std::string kIndent = Indent(6);
|
||||||
|
|
||||||
|
*stream << Indent(4) << "{\n";
|
||||||
|
OutputJsonKey(stream, kTestsuite, "name", test_case.name(), kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuite, "tests", test_case.reportable_test_count(),
|
||||||
|
kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuite, "failures", test_case.failed_test_count(),
|
||||||
|
kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuite, "disabled",
|
||||||
|
test_case.reportable_disabled_test_count(), kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuite, "errors", 0, kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuite, "time",
|
||||||
|
FormatTimeInMillisAsDuration(test_case.elapsed_time()), kIndent,
|
||||||
|
false);
|
||||||
|
*stream << TestPropertiesAsJson(test_case.ad_hoc_test_result(), kIndent)
|
||||||
|
<< ",\n";
|
||||||
|
|
||||||
|
*stream << kIndent << "\"" << kTestsuite << "\": [\n";
|
||||||
|
|
||||||
|
bool comma = false;
|
||||||
|
for (int i = 0; i < test_case.total_test_count(); ++i) {
|
||||||
|
if (test_case.GetTestInfo(i)->is_reportable()) {
|
||||||
|
if (comma) {
|
||||||
|
*stream << ",\n";
|
||||||
|
} else {
|
||||||
|
comma = true;
|
||||||
|
}
|
||||||
|
OutputJsonTestInfo(stream, test_case.name(), *test_case.GetTestInfo(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*stream << "\n" << kIndent << "]\n" << Indent(4) << "}";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prints a JSON summary of unit_test to output stream out.
|
||||||
|
void JsonUnitTestResultPrinter::PrintJsonUnitTest(std::ostream* stream,
|
||||||
|
const UnitTest& unit_test) {
|
||||||
|
const std::string kTestsuites = "testsuites";
|
||||||
|
const std::string kIndent = Indent(2);
|
||||||
|
*stream << "{\n";
|
||||||
|
|
||||||
|
OutputJsonKey(stream, kTestsuites, "tests", unit_test.reportable_test_count(),
|
||||||
|
kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuites, "failures", unit_test.failed_test_count(),
|
||||||
|
kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuites, "disabled",
|
||||||
|
unit_test.reportable_disabled_test_count(), kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuites, "errors", 0, kIndent);
|
||||||
|
if (GTEST_FLAG(shuffle)) {
|
||||||
|
OutputJsonKey(stream, kTestsuites, "random_seed", unit_test.random_seed(),
|
||||||
|
kIndent);
|
||||||
|
}
|
||||||
|
OutputJsonKey(stream, kTestsuites, "timestamp",
|
||||||
|
FormatEpochTimeInMillisAsRFC3339(unit_test.start_timestamp()),
|
||||||
|
kIndent);
|
||||||
|
OutputJsonKey(stream, kTestsuites, "time",
|
||||||
|
FormatTimeInMillisAsDuration(unit_test.elapsed_time()), kIndent,
|
||||||
|
false);
|
||||||
|
|
||||||
|
*stream << TestPropertiesAsJson(unit_test.ad_hoc_test_result(), kIndent)
|
||||||
|
<< ",\n";
|
||||||
|
|
||||||
|
OutputJsonKey(stream, kTestsuites, "name", "AllTests", kIndent);
|
||||||
|
*stream << kIndent << "\"" << kTestsuites << "\": [\n";
|
||||||
|
|
||||||
|
bool comma = false;
|
||||||
|
for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
|
||||||
|
if (unit_test.GetTestCase(i)->reportable_test_count() > 0) {
|
||||||
|
if (comma) {
|
||||||
|
*stream << ",\n";
|
||||||
|
} else {
|
||||||
|
comma = true;
|
||||||
|
}
|
||||||
|
PrintJsonTestCase(stream, *unit_test.GetTestCase(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*stream << "\n" << kIndent << "]\n" << "}\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
// Produces a string representing the test properties in a result as
|
||||||
|
// a JSON dictionary.
|
||||||
|
std::string JsonUnitTestResultPrinter::TestPropertiesAsJson(
|
||||||
|
const TestResult& result, const std::string& indent) {
|
||||||
|
Message attributes;
|
||||||
|
for (int i = 0; i < result.test_property_count(); ++i) {
|
||||||
|
const TestProperty& property = result.GetTestProperty(i);
|
||||||
|
attributes << ",\n" << indent << "\"" << property.key() << "\": "
|
||||||
|
<< "\"" << EscapeJson(property.value()) << "\"";
|
||||||
|
}
|
||||||
|
return attributes.GetString();
|
||||||
|
}
|
||||||
|
|
||||||
|
// End JsonUnitTestResultPrinter
|
||||||
|
|
||||||
#if GTEST_CAN_STREAM_RESULTS_
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
|
|
||||||
// Checks if str contains '=', '&', '%' or '\n' characters. If yes,
|
// Checks if str contains '=', '&', '%' or '\n' characters. If yes,
|
||||||
@ -4397,6 +4749,9 @@ void UnitTestImpl::ConfigureXmlOutput() {
|
|||||||
if (output_format == "xml") {
|
if (output_format == "xml") {
|
||||||
listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter(
|
listeners()->SetDefaultXmlGenerator(new XmlUnitTestResultPrinter(
|
||||||
UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));
|
UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));
|
||||||
|
} else if (output_format == "json") {
|
||||||
|
listeners()->SetDefaultXmlGenerator(new JsonUnitTestResultPrinter(
|
||||||
|
UnitTestOptions::GetAbsolutePathToOutputFile().c_str()));
|
||||||
} else if (output_format != "") {
|
} else if (output_format != "") {
|
||||||
GTEST_LOG_(WARNING) << "WARNING: unrecognized output format \""
|
GTEST_LOG_(WARNING) << "WARNING: unrecognized output format \""
|
||||||
<< output_format << "\" ignored.";
|
<< output_format << "\" ignored.";
|
||||||
@ -5182,10 +5537,10 @@ static const char kColorEncodedHelpMessage[] =
|
|||||||
" Enable/disable colored output. The default is @Gauto@D.\n"
|
" Enable/disable colored output. The default is @Gauto@D.\n"
|
||||||
" @G--" GTEST_FLAG_PREFIX_ "print_time=0@D\n"
|
" @G--" GTEST_FLAG_PREFIX_ "print_time=0@D\n"
|
||||||
" Don't print the elapsed time of each test.\n"
|
" Don't print the elapsed time of each test.\n"
|
||||||
" @G--" GTEST_FLAG_PREFIX_ "output=xml@Y[@G:@YDIRECTORY_PATH@G"
|
" @G--" GTEST_FLAG_PREFIX_ "output=@Y(@Gjson@Y|@Gxml@Y)[@G:@YDIRECTORY_PATH@G"
|
||||||
GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
|
GTEST_PATH_SEP_ "@Y|@G:@YFILE_PATH]@D\n"
|
||||||
" Generate an XML report in the given directory or with the given file\n"
|
" Generate a JSON or XML report in the given directory or with the given\n"
|
||||||
" name. @YFILE_PATH@D defaults to @Gtest_detail.xml@D.\n"
|
" file name. @YFILE_PATH@D defaults to @Gtest_details.xml@D.\n"
|
||||||
#if GTEST_CAN_STREAM_RESULTS_
|
#if GTEST_CAN_STREAM_RESULTS_
|
||||||
" @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n"
|
" @G--" GTEST_FLAG_PREFIX_ "stream_result_to=@YHOST@G:@YPORT@D\n"
|
||||||
" Stream test results to the given server.\n"
|
" Stream test results to the given server.\n"
|
||||||
|
163
googletest/test/gtest_json_outfiles_test.py
Normal file
163
googletest/test/gtest_json_outfiles_test.py
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2018, Google Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above
|
||||||
|
# copyright notice, this list of conditions and the following disclaimer
|
||||||
|
# in the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
# * Neither the name of Google Inc. nor the names of its
|
||||||
|
# contributors may be used to endorse or promote products derived from
|
||||||
|
# this software without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""Unit test for the gtest_json_output module."""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import gtest_test_utils
|
||||||
|
import gtest_json_test_utils
|
||||||
|
|
||||||
|
|
||||||
|
GTEST_OUTPUT_SUBDIR = 'json_outfiles'
|
||||||
|
GTEST_OUTPUT_1_TEST = 'gtest_xml_outfile1_test_'
|
||||||
|
GTEST_OUTPUT_2_TEST = 'gtest_xml_outfile2_test_'
|
||||||
|
|
||||||
|
EXPECTED_1 = {
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'timestamp': u'*',
|
||||||
|
u'name': u'AllTests',
|
||||||
|
u'testsuites': [{
|
||||||
|
u'name': u'PropertyOne',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [{
|
||||||
|
u'name': u'TestSomeProperties',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyOne',
|
||||||
|
u'SetUpProp': u'1',
|
||||||
|
u'TestSomeProperty': u'1',
|
||||||
|
u'TearDownProp': u'1',
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_2 = {
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'timestamp': u'*',
|
||||||
|
u'name': u'AllTests',
|
||||||
|
u'testsuites': [{
|
||||||
|
u'name': u'PropertyTwo',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [{
|
||||||
|
u'name': u'TestSomeProperties',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyTwo',
|
||||||
|
u'SetUpProp': u'2',
|
||||||
|
u'TestSomeProperty': u'2',
|
||||||
|
u'TearDownProp': u'2',
|
||||||
|
}],
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class GTestJsonOutFilesTest(gtest_test_utils.TestCase):
|
||||||
|
"""Unit test for Google Test's JSON output functionality."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
# We want the trailing '/' that the last "" provides in os.path.join, for
|
||||||
|
# telling Google Test to create an output directory instead of a single file
|
||||||
|
# for xml output.
|
||||||
|
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
|
||||||
|
GTEST_OUTPUT_SUBDIR, '')
|
||||||
|
self.DeleteFilesAndDir()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.DeleteFilesAndDir()
|
||||||
|
|
||||||
|
def DeleteFilesAndDir(self):
|
||||||
|
try:
|
||||||
|
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + '.json'))
|
||||||
|
except os.error:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + '.json'))
|
||||||
|
except os.error:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
os.rmdir(self.output_dir_)
|
||||||
|
except os.error:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def testOutfile1(self):
|
||||||
|
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_1)
|
||||||
|
|
||||||
|
def testOutfile2(self):
|
||||||
|
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_2)
|
||||||
|
|
||||||
|
def _TestOutFile(self, test_name, expected):
|
||||||
|
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
|
||||||
|
command = [gtest_prog_path, '--gtest_output=json:%s' % self.output_dir_]
|
||||||
|
p = gtest_test_utils.Subprocess(command,
|
||||||
|
working_dir=gtest_test_utils.GetTempDir())
|
||||||
|
self.assert_(p.exited)
|
||||||
|
self.assertEquals(0, p.exit_code)
|
||||||
|
|
||||||
|
# TODO(wan@google.com): libtool causes the built test binary to be
|
||||||
|
# named lt-gtest_xml_outfiles_test_ instead of
|
||||||
|
# gtest_xml_outfiles_test_. To account for this possibility, we
|
||||||
|
# allow both names in the following code. We should remove this
|
||||||
|
# hack when Chandler Carruth's libtool replacement tool is ready.
|
||||||
|
output_file_name1 = test_name + '.json'
|
||||||
|
output_file1 = os.path.join(self.output_dir_, output_file_name1)
|
||||||
|
output_file_name2 = 'lt-' + output_file_name1
|
||||||
|
output_file2 = os.path.join(self.output_dir_, output_file_name2)
|
||||||
|
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
|
||||||
|
output_file1)
|
||||||
|
|
||||||
|
if os.path.isfile(output_file1):
|
||||||
|
with open(output_file1) as f:
|
||||||
|
actual = json.load(f)
|
||||||
|
else:
|
||||||
|
with open(output_file2) as f:
|
||||||
|
actual = json.load(f)
|
||||||
|
self.assertEqual(expected, gtest_json_test_utils.normalize(actual))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
os.environ['GTEST_STACK_TRACE_DEPTH'] = '0'
|
||||||
|
gtest_test_utils.Main()
|
612
googletest/test/gtest_json_output_unittest.py
Normal file
612
googletest/test/gtest_json_output_unittest.py
Normal file
@ -0,0 +1,612 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# Copyright 2018, Google Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above
|
||||||
|
# copyright notice, this list of conditions and the following disclaimer
|
||||||
|
# in the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
# * Neither the name of Google Inc. nor the names of its
|
||||||
|
# contributors may be used to endorse or promote products derived from
|
||||||
|
# this software without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""Unit test for the gtest_json_output module."""
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import gtest_test_utils
|
||||||
|
import gtest_json_test_utils
|
||||||
|
|
||||||
|
|
||||||
|
GTEST_FILTER_FLAG = '--gtest_filter'
|
||||||
|
GTEST_LIST_TESTS_FLAG = '--gtest_list_tests'
|
||||||
|
GTEST_OUTPUT_FLAG = '--gtest_output'
|
||||||
|
GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.json'
|
||||||
|
GTEST_PROGRAM_NAME = 'gtest_xml_output_unittest_'
|
||||||
|
|
||||||
|
SUPPORTS_STACK_TRACES = False
|
||||||
|
|
||||||
|
if SUPPORTS_STACK_TRACES:
|
||||||
|
STACK_TRACE_TEMPLATE = '\nStack trace:\n*'
|
||||||
|
else:
|
||||||
|
STACK_TRACE_TEMPLATE = ''
|
||||||
|
|
||||||
|
EXPECTED_NON_EMPTY = {
|
||||||
|
u'tests': 23,
|
||||||
|
u'failures': 4,
|
||||||
|
u'disabled': 2,
|
||||||
|
u'errors': 0,
|
||||||
|
u'timestamp': u'*',
|
||||||
|
u'time': u'*',
|
||||||
|
u'ad_hoc_property': u'42',
|
||||||
|
u'name': u'AllTests',
|
||||||
|
u'testsuites': [
|
||||||
|
{
|
||||||
|
u'name': u'SuccessfulTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'Succeeds',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'SuccessfulTest'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'FailedTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 1,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'Fails',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'FailedTest',
|
||||||
|
u'failures': [
|
||||||
|
{
|
||||||
|
u'failure':
|
||||||
|
u'gtest_xml_output_unittest_.cc:*\n'
|
||||||
|
u'Expected equality of these values:\n'
|
||||||
|
u' 1\n 2' + STACK_TRACE_TEMPLATE,
|
||||||
|
u'type': u''
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'DisabledTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 1,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'DISABLED_test_not_run',
|
||||||
|
u'status': u'NOTRUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'DisabledTest'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'MixedResultTest',
|
||||||
|
u'tests': 3,
|
||||||
|
u'failures': 1,
|
||||||
|
u'disabled': 1,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'Succeeds',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'MixedResultTest'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'Fails',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'MixedResultTest',
|
||||||
|
u'failures': [
|
||||||
|
{
|
||||||
|
u'failure':
|
||||||
|
u'gtest_xml_output_unittest_.cc:*\n'
|
||||||
|
u'Expected equality of these values:\n'
|
||||||
|
u' 1\n 2' + STACK_TRACE_TEMPLATE,
|
||||||
|
u'type': u''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'failure':
|
||||||
|
u'gtest_xml_output_unittest_.cc:*\n'
|
||||||
|
u'Expected equality of these values:\n'
|
||||||
|
u' 2\n 3' + STACK_TRACE_TEMPLATE,
|
||||||
|
u'type': u''
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'DISABLED_test',
|
||||||
|
u'status': u'NOTRUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'MixedResultTest'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'XmlQuotingTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 1,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'OutputsCData',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'XmlQuotingTest',
|
||||||
|
u'failures': [
|
||||||
|
{
|
||||||
|
u'failure':
|
||||||
|
u'gtest_xml_output_unittest_.cc:*\n'
|
||||||
|
u'Failed\nXML output: <?xml encoding="utf-8">'
|
||||||
|
u'<top><![CDATA[cdata text]]></top>' +
|
||||||
|
STACK_TRACE_TEMPLATE,
|
||||||
|
u'type': u''
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'InvalidCharactersTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 1,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'InvalidCharactersInMessage',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'InvalidCharactersTest',
|
||||||
|
u'failures': [
|
||||||
|
{
|
||||||
|
u'failure':
|
||||||
|
u'gtest_xml_output_unittest_.cc:*\n'
|
||||||
|
u'Failed\nInvalid characters in brackets'
|
||||||
|
u' [\x01\x02]' + STACK_TRACE_TEMPLATE,
|
||||||
|
u'type': u''
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'PropertyRecordingTest',
|
||||||
|
u'tests': 4,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'SetUpTestCase': u'yes',
|
||||||
|
u'TearDownTestCase': u'aye',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'OneProperty',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyRecordingTest',
|
||||||
|
u'key_1': u'1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'IntValuedProperty',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyRecordingTest',
|
||||||
|
u'key_int': u'1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'ThreeProperties',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyRecordingTest',
|
||||||
|
u'key_1': u'1',
|
||||||
|
u'key_2': u'2',
|
||||||
|
u'key_3': u'3'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'TwoValuesForOneKeyUsesLastValue',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'PropertyRecordingTest',
|
||||||
|
u'key_1': u'2'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'NoFixtureTest',
|
||||||
|
u'tests': 3,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'RecordProperty',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'NoFixtureTest',
|
||||||
|
u'key': u'1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'ExternalUtilityThatCallsRecordIntValuedProperty',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'NoFixtureTest',
|
||||||
|
u'key_for_utility_int': u'1'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name':
|
||||||
|
u'ExternalUtilityThatCallsRecordStringValuedProperty',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'NoFixtureTest',
|
||||||
|
u'key_for_utility_string': u'1'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'TypedTest/0',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'HasTypeParamAttribute',
|
||||||
|
u'type_param': u'int',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'TypedTest/0'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'TypedTest/1',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'HasTypeParamAttribute',
|
||||||
|
u'type_param': u'long',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'TypedTest/1'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'Single/TypeParameterizedTestCase/0',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'HasTypeParamAttribute',
|
||||||
|
u'type_param': u'int',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/TypeParameterizedTestCase/0'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'Single/TypeParameterizedTestCase/1',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'HasTypeParamAttribute',
|
||||||
|
u'type_param': u'long',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/TypeParameterizedTestCase/1'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'Single/ValueParamTest',
|
||||||
|
u'tests': 4,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [
|
||||||
|
{
|
||||||
|
u'name': u'HasValueParamAttribute/0',
|
||||||
|
u'value_param': u'33',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/ValueParamTest'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'HasValueParamAttribute/1',
|
||||||
|
u'value_param': u'42',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/ValueParamTest'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'AnotherTestThatHasValueParamAttribute/0',
|
||||||
|
u'value_param': u'33',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/ValueParamTest'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
u'name': u'AnotherTestThatHasValueParamAttribute/1',
|
||||||
|
u'value_param': u'42',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'Single/ValueParamTest'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_FILTERED = {
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'timestamp': u'*',
|
||||||
|
u'name': u'AllTests',
|
||||||
|
u'ad_hoc_property': u'42',
|
||||||
|
u'testsuites': [{
|
||||||
|
u'name': u'SuccessfulTest',
|
||||||
|
u'tests': 1,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'testsuite': [{
|
||||||
|
u'name': u'Succeeds',
|
||||||
|
u'status': u'RUN',
|
||||||
|
u'time': u'*',
|
||||||
|
u'classname': u'SuccessfulTest',
|
||||||
|
}]
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
|
||||||
|
EXPECTED_EMPTY = {
|
||||||
|
u'tests': 0,
|
||||||
|
u'failures': 0,
|
||||||
|
u'disabled': 0,
|
||||||
|
u'errors': 0,
|
||||||
|
u'time': u'*',
|
||||||
|
u'timestamp': u'*',
|
||||||
|
u'name': u'AllTests',
|
||||||
|
u'testsuites': [],
|
||||||
|
}
|
||||||
|
|
||||||
|
GTEST_PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath(GTEST_PROGRAM_NAME)
|
||||||
|
|
||||||
|
SUPPORTS_TYPED_TESTS = 'TypedTest' in gtest_test_utils.Subprocess(
|
||||||
|
[GTEST_PROGRAM_PATH, GTEST_LIST_TESTS_FLAG], capture_stderr=False).output
|
||||||
|
|
||||||
|
|
||||||
|
class GTestJsonOutputUnitTest(gtest_test_utils.TestCase):
|
||||||
|
"""Unit test for Google Test's JSON output functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# This test currently breaks on platforms that do not support typed and
|
||||||
|
# type-parameterized tests, so we don't run it under them.
|
||||||
|
if SUPPORTS_TYPED_TESTS:
|
||||||
|
|
||||||
|
def testNonEmptyJsonOutput(self):
|
||||||
|
"""Verifies JSON output for a Google Test binary with non-empty output.
|
||||||
|
|
||||||
|
Runs a test program that generates a non-empty JSON output, and
|
||||||
|
tests that the JSON output is expected.
|
||||||
|
"""
|
||||||
|
self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_NON_EMPTY, 1)
|
||||||
|
|
||||||
|
def testEmptyJsonOutput(self):
|
||||||
|
"""Verifies JSON output for a Google Test binary without actual tests.
|
||||||
|
|
||||||
|
Runs a test program that generates an empty JSON output, and
|
||||||
|
tests that the JSON output is expected.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self._TestJsonOutput('gtest_no_test_unittest', EXPECTED_EMPTY, 0)
|
||||||
|
|
||||||
|
def testTimestampValue(self):
|
||||||
|
"""Checks whether the timestamp attribute in the JSON output is valid.
|
||||||
|
|
||||||
|
Runs a test program that generates an empty JSON output, and checks if
|
||||||
|
the timestamp attribute in the testsuites tag is valid.
|
||||||
|
"""
|
||||||
|
actual = self._GetJsonOutput('gtest_no_test_unittest', [], 0)
|
||||||
|
date_time_str = actual['timestamp']
|
||||||
|
# datetime.strptime() is only available in Python 2.5+ so we have to
|
||||||
|
# parse the expected datetime manually.
|
||||||
|
match = re.match(r'(\d+)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)', date_time_str)
|
||||||
|
self.assertTrue(
|
||||||
|
re.match,
|
||||||
|
'JSON datettime string %s has incorrect format' % date_time_str)
|
||||||
|
date_time_from_json = datetime.datetime(
|
||||||
|
year=int(match.group(1)), month=int(match.group(2)),
|
||||||
|
day=int(match.group(3)), hour=int(match.group(4)),
|
||||||
|
minute=int(match.group(5)), second=int(match.group(6)))
|
||||||
|
|
||||||
|
time_delta = abs(datetime.datetime.now() - date_time_from_json)
|
||||||
|
# timestamp value should be near the current local time
|
||||||
|
self.assertTrue(time_delta < datetime.timedelta(seconds=600),
|
||||||
|
'time_delta is %s' % time_delta)
|
||||||
|
|
||||||
|
def testDefaultOutputFile(self):
|
||||||
|
"""Verifies the default output file name.
|
||||||
|
|
||||||
|
Confirms that Google Test produces an JSON output file with the expected
|
||||||
|
default name if no name is explicitly specified.
|
||||||
|
"""
|
||||||
|
output_file = os.path.join(gtest_test_utils.GetTempDir(),
|
||||||
|
GTEST_DEFAULT_OUTPUT_FILE)
|
||||||
|
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(
|
||||||
|
'gtest_no_test_unittest')
|
||||||
|
try:
|
||||||
|
os.remove(output_file)
|
||||||
|
except OSError:
|
||||||
|
e = sys.exc_info()[1]
|
||||||
|
if e.errno != errno.ENOENT:
|
||||||
|
raise
|
||||||
|
|
||||||
|
p = gtest_test_utils.Subprocess(
|
||||||
|
[gtest_prog_path, '%s=json' % GTEST_OUTPUT_FLAG],
|
||||||
|
working_dir=gtest_test_utils.GetTempDir())
|
||||||
|
self.assert_(p.exited)
|
||||||
|
self.assertEquals(0, p.exit_code)
|
||||||
|
self.assert_(os.path.isfile(output_file))
|
||||||
|
|
||||||
|
def testSuppressedJsonOutput(self):
|
||||||
|
"""Verifies that no JSON output is generated.
|
||||||
|
|
||||||
|
Tests that no JSON file is generated if the default JSON listener is
|
||||||
|
shut down before RUN_ALL_TESTS is invoked.
|
||||||
|
"""
|
||||||
|
|
||||||
|
json_path = os.path.join(gtest_test_utils.GetTempDir(),
|
||||||
|
GTEST_PROGRAM_NAME + 'out.json')
|
||||||
|
if os.path.isfile(json_path):
|
||||||
|
os.remove(json_path)
|
||||||
|
|
||||||
|
command = [GTEST_PROGRAM_PATH,
|
||||||
|
'%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path),
|
||||||
|
'--shut_down_xml']
|
||||||
|
p = gtest_test_utils.Subprocess(command)
|
||||||
|
if p.terminated_by_signal:
|
||||||
|
# p.signal is available only if p.terminated_by_signal is True.
|
||||||
|
self.assertFalse(
|
||||||
|
p.terminated_by_signal,
|
||||||
|
'%s was killed by signal %d' % (GTEST_PROGRAM_NAME, p.signal))
|
||||||
|
else:
|
||||||
|
self.assert_(p.exited)
|
||||||
|
self.assertEquals(1, p.exit_code,
|
||||||
|
"'%s' exited with code %s, which doesn't match "
|
||||||
|
'the expected exit code %s.'
|
||||||
|
% (command, p.exit_code, 1))
|
||||||
|
|
||||||
|
self.assert_(not os.path.isfile(json_path))
|
||||||
|
|
||||||
|
def testFilteredTestJsonOutput(self):
|
||||||
|
"""Verifies JSON output when a filter is applied.
|
||||||
|
|
||||||
|
Runs a test program that executes only some tests and verifies that
|
||||||
|
non-selected tests do not show up in the JSON output.
|
||||||
|
"""
|
||||||
|
|
||||||
|
self._TestJsonOutput(GTEST_PROGRAM_NAME, EXPECTED_FILTERED, 0,
|
||||||
|
extra_args=['%s=SuccessfulTest.*' % GTEST_FILTER_FLAG])
|
||||||
|
|
||||||
|
def _GetJsonOutput(self, gtest_prog_name, extra_args, expected_exit_code):
|
||||||
|
"""Returns the JSON output generated by running the program gtest_prog_name.
|
||||||
|
|
||||||
|
Furthermore, the program's exit code must be expected_exit_code.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gtest_prog_name: Google Test binary name.
|
||||||
|
extra_args: extra arguments to binary invocation.
|
||||||
|
expected_exit_code: program's exit code.
|
||||||
|
"""
|
||||||
|
json_path = os.path.join(gtest_test_utils.GetTempDir(),
|
||||||
|
gtest_prog_name + 'out.json')
|
||||||
|
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(gtest_prog_name)
|
||||||
|
|
||||||
|
command = (
|
||||||
|
[gtest_prog_path, '%s=json:%s' % (GTEST_OUTPUT_FLAG, json_path)] +
|
||||||
|
extra_args
|
||||||
|
)
|
||||||
|
p = gtest_test_utils.Subprocess(command)
|
||||||
|
if p.terminated_by_signal:
|
||||||
|
self.assert_(False,
|
||||||
|
'%s was killed by signal %d' % (gtest_prog_name, p.signal))
|
||||||
|
else:
|
||||||
|
self.assert_(p.exited)
|
||||||
|
self.assertEquals(expected_exit_code, p.exit_code,
|
||||||
|
"'%s' exited with code %s, which doesn't match "
|
||||||
|
'the expected exit code %s.'
|
||||||
|
% (command, p.exit_code, expected_exit_code))
|
||||||
|
with open(json_path) as f:
|
||||||
|
actual = json.load(f)
|
||||||
|
return actual
|
||||||
|
|
||||||
|
def _TestJsonOutput(self, gtest_prog_name, expected,
|
||||||
|
expected_exit_code, extra_args=None):
|
||||||
|
"""Checks the JSON output generated by the Google Test binary.
|
||||||
|
|
||||||
|
Asserts that the JSON document generated by running the program
|
||||||
|
gtest_prog_name matches expected_json, a string containing another
|
||||||
|
JSON document. Furthermore, the program's exit code must be
|
||||||
|
expected_exit_code.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
gtest_prog_name: Google Test binary name.
|
||||||
|
expected: expected output.
|
||||||
|
expected_exit_code: program's exit code.
|
||||||
|
extra_args: extra arguments to binary invocation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
actual = self._GetJsonOutput(gtest_prog_name, extra_args or [],
|
||||||
|
expected_exit_code)
|
||||||
|
self.assertEqual(expected, gtest_json_test_utils.normalize(actual))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
os.environ['GTEST_STACK_TRACE_DEPTH'] = '1'
|
||||||
|
gtest_test_utils.Main()
|
60
googletest/test/gtest_json_test_utils.py
Normal file
60
googletest/test/gtest_json_test_utils.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
# Copyright 2018, Google Inc.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
#
|
||||||
|
# * Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# * Redistributions in binary form must reproduce the above
|
||||||
|
# copyright notice, this list of conditions and the following disclaimer
|
||||||
|
# in the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
# * Neither the name of Google Inc. nor the names of its
|
||||||
|
# contributors may be used to endorse or promote products derived from
|
||||||
|
# this software without specific prior written permission.
|
||||||
|
#
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||||
|
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||||
|
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||||
|
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||||
|
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
|
||||||
|
"""Unit test utilities for gtest_json_output."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(obj):
|
||||||
|
"""Normalize output object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
obj: Google Test's JSON output object to normalize.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Normalized output without any references to transient information that may
|
||||||
|
change from run to run.
|
||||||
|
"""
|
||||||
|
def _normalize(key, value):
|
||||||
|
if key == 'time':
|
||||||
|
return re.sub(r'^\d+(\.\d+)?s$', u'*', value)
|
||||||
|
elif key == 'timestamp':
|
||||||
|
return re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\dZ$', '*', value)
|
||||||
|
elif key == 'failure':
|
||||||
|
value = re.sub(r'^.*[/\\](.*:)\d+\n', '\\1*\n', value)
|
||||||
|
return re.sub(r'Stack trace:\n(.|\n)*', 'Stack trace:\n*', value)
|
||||||
|
else:
|
||||||
|
return normalize(value)
|
||||||
|
if isinstance(obj, dict):
|
||||||
|
return {k: _normalize(k, v) for k, v in obj.items()}
|
||||||
|
if isinstance(obj, list):
|
||||||
|
return [normalize(x) for x in obj]
|
||||||
|
else:
|
||||||
|
return obj
|
Loading…
x
Reference in New Issue
Block a user