|
@@ -35,18 +35,34 @@
|
|
|
#include "conformance_test.h"
|
|
|
#include <google/protobuf/stubs/common.h>
|
|
|
#include <google/protobuf/stubs/stringprintf.h>
|
|
|
+#include <google/protobuf/text_format.h>
|
|
|
+#include <google/protobuf/util/json_util.h>
|
|
|
+#include <google/protobuf/util/message_differencer.h>
|
|
|
+#include <google/protobuf/util/type_resolver_util.h>
|
|
|
#include <google/protobuf/wire_format_lite.h>
|
|
|
|
|
|
using conformance::ConformanceRequest;
|
|
|
using conformance::ConformanceResponse;
|
|
|
using conformance::TestAllTypes;
|
|
|
+using conformance::WireFormat;
|
|
|
using google::protobuf::Descriptor;
|
|
|
using google::protobuf::FieldDescriptor;
|
|
|
using google::protobuf::internal::WireFormatLite;
|
|
|
+using google::protobuf::TextFormat;
|
|
|
+using google::protobuf::util::JsonToBinaryString;
|
|
|
+using google::protobuf::util::MessageDifferencer;
|
|
|
+using google::protobuf::util::NewTypeResolverForDescriptorPool;
|
|
|
+using google::protobuf::util::Status;
|
|
|
using std::string;
|
|
|
|
|
|
namespace {
|
|
|
|
|
|
+static const char kTypeUrlPrefix[] = "type.googleapis.com";
|
|
|
+
|
|
|
+static string GetTypeUrl(const Descriptor* message) {
|
|
|
+ return string(kTypeUrlPrefix) + "/" + message->full_name();
|
|
|
+}
|
|
|
+
|
|
|
/* Routines for building arbitrary protos *************************************/
|
|
|
|
|
|
// We would use CodedOutputStream except that we want more freedom to build
|
|
@@ -162,9 +178,13 @@ void ConformanceTestSuite::ReportSuccess(const string& test_name) {
|
|
|
}
|
|
|
|
|
|
void ConformanceTestSuite::ReportFailure(const string& test_name,
|
|
|
+ const ConformanceRequest& request,
|
|
|
+ const ConformanceResponse& response,
|
|
|
const char* fmt, ...) {
|
|
|
if (expected_to_fail_.erase(test_name) == 1) {
|
|
|
- StringAppendF(&output_, "FAILED AS EXPECTED, test=%s: ", test_name.c_str());
|
|
|
+ expected_failures_++;
|
|
|
+ if (!verbose_)
|
|
|
+ return;
|
|
|
} else {
|
|
|
StringAppendF(&output_, "ERROR, test=%s: ", test_name.c_str());
|
|
|
unexpected_failing_tests_.insert(test_name);
|
|
@@ -173,7 +193,20 @@ void ConformanceTestSuite::ReportFailure(const string& test_name,
|
|
|
va_start(args, fmt);
|
|
|
StringAppendV(&output_, fmt, args);
|
|
|
va_end(args);
|
|
|
- failures_++;
|
|
|
+ StringAppendF(&output_, " request=%s, response=%s\n",
|
|
|
+ request.ShortDebugString().c_str(),
|
|
|
+ response.ShortDebugString().c_str());
|
|
|
+}
|
|
|
+
|
|
|
+void ConformanceTestSuite::ReportSkip(const string& test_name,
|
|
|
+ const ConformanceRequest& request,
|
|
|
+ const ConformanceResponse& response) {
|
|
|
+ if (verbose_) {
|
|
|
+ StringAppendF(&output_, "SKIPPED, test=%s request=%s, response=%s\n",
|
|
|
+ test_name.c_str(), request.ShortDebugString().c_str(),
|
|
|
+ response.ShortDebugString().c_str());
|
|
|
+ }
|
|
|
+ skipped_.insert(test_name);
|
|
|
}
|
|
|
|
|
|
void ConformanceTestSuite::RunTest(const string& test_name,
|
|
@@ -202,26 +235,117 @@ void ConformanceTestSuite::RunTest(const string& test_name,
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+void ConformanceTestSuite::RunValidInputTest(
|
|
|
+ const string& test_name, const string& input, WireFormat input_format,
|
|
|
+ const string& equivalent_text_format, WireFormat requested_output) {
|
|
|
+ TestAllTypes reference_message;
|
|
|
+ GOOGLE_CHECK(
|
|
|
+ TextFormat::ParseFromString(equivalent_text_format, &reference_message));
|
|
|
+
|
|
|
+ ConformanceRequest request;
|
|
|
+ ConformanceResponse response;
|
|
|
+
|
|
|
+ switch (input_format) {
|
|
|
+ case conformance::PROTOBUF:
|
|
|
+ request.set_protobuf_payload(input);
|
|
|
+ break;
|
|
|
+
|
|
|
+ case conformance::JSON:
|
|
|
+ request.set_json_payload(input);
|
|
|
+ break;
|
|
|
+
|
|
|
+ case conformance::UNSPECIFIED:
|
|
|
+ GOOGLE_LOG(FATAL) << "Unspecified input format";
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ request.set_requested_output_format(requested_output);
|
|
|
+
|
|
|
+ RunTest(test_name, request, &response);
|
|
|
+
|
|
|
+ TestAllTypes test_message;
|
|
|
+
|
|
|
+ switch (response.result_case()) {
|
|
|
+ case ConformanceResponse::kParseError:
|
|
|
+ case ConformanceResponse::kRuntimeError:
|
|
|
+ ReportFailure(test_name, request, response,
|
|
|
+ "Failed to parse valid JSON input.");
|
|
|
+ return;
|
|
|
+
|
|
|
+ case ConformanceResponse::kSkipped:
|
|
|
+ ReportSkip(test_name, request, response);
|
|
|
+ return;
|
|
|
+
|
|
|
+ case ConformanceResponse::kJsonPayload: {
|
|
|
+ if (requested_output != conformance::JSON) {
|
|
|
+ ReportFailure(
|
|
|
+ test_name, request, response,
|
|
|
+ "Test was asked for protobuf output but provided JSON instead.");
|
|
|
+ return;
|
|
|
+ }
|
|
|
+ string binary_protobuf;
|
|
|
+ Status status =
|
|
|
+ JsonToBinaryString(type_resolver_.get(), type_url_,
|
|
|
+ response.json_payload(), &binary_protobuf);
|
|
|
+ if (!status.ok()) {
|
|
|
+ ReportFailure(test_name, request, response,
|
|
|
+ "JSON output we received from test was unparseable.");
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ GOOGLE_CHECK(test_message.ParseFromString(binary_protobuf));
|
|
|
+ break;
|
|
|
+ }
|
|
|
+
|
|
|
+ case ConformanceResponse::kProtobufPayload: {
|
|
|
+ if (requested_output != conformance::PROTOBUF) {
|
|
|
+ ReportFailure(
|
|
|
+ test_name, request, response,
|
|
|
+ "Test was asked for JSON output but provided protobuf instead.");
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ if (!test_message.ParseFromString(response.protobuf_payload())) {
|
|
|
+ ReportFailure(test_name, request, response,
|
|
|
+ "Protobuf output we received from test was unparseable.");
|
|
|
+ return;
|
|
|
+ }
|
|
|
+
|
|
|
+ break;
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
+ MessageDifferencer differencer;
|
|
|
+ string differences;
|
|
|
+ differencer.ReportDifferencesToString(&differences);
|
|
|
+
|
|
|
+ if (differencer.Equals(reference_message, test_message)) {
|
|
|
+ ReportSuccess(test_name);
|
|
|
+ } else {
|
|
|
+ ReportFailure(test_name, request, response,
|
|
|
+ "Output was not equivalent to reference message: %s.",
|
|
|
+ differences.c_str());
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
// Expect that this precise protobuf will cause a parse error.
|
|
|
void ConformanceTestSuite::ExpectParseFailureForProto(
|
|
|
const string& proto, const string& test_name) {
|
|
|
ConformanceRequest request;
|
|
|
ConformanceResponse response;
|
|
|
request.set_protobuf_payload(proto);
|
|
|
+ string effective_test_name = "ProtobufInput." + test_name;
|
|
|
|
|
|
// We don't expect output, but if the program erroneously accepts the protobuf
|
|
|
// we let it send its response as this. We must not leave it unspecified.
|
|
|
- request.set_requested_output(ConformanceRequest::PROTOBUF);
|
|
|
+ request.set_requested_output_format(conformance::PROTOBUF);
|
|
|
|
|
|
- RunTest(test_name, request, &response);
|
|
|
+ RunTest(effective_test_name, request, &response);
|
|
|
if (response.result_case() == ConformanceResponse::kParseError) {
|
|
|
- ReportSuccess(test_name);
|
|
|
+ ReportSuccess(effective_test_name);
|
|
|
} else {
|
|
|
- ReportFailure(test_name,
|
|
|
- "Should have failed to parse, but didn't. Request: %s, "
|
|
|
- "response: %s\n",
|
|
|
- request.ShortDebugString().c_str(),
|
|
|
- response.ShortDebugString().c_str());
|
|
|
+ ReportFailure(effective_test_name, request, response,
|
|
|
+ "Should have failed to parse, but didn't.");
|
|
|
}
|
|
|
}
|
|
|
|
|
@@ -235,6 +359,16 @@ void ConformanceTestSuite::ExpectHardParseFailureForProto(
|
|
|
return ExpectParseFailureForProto(proto, test_name);
|
|
|
}
|
|
|
|
|
|
+void ConformanceTestSuite::RunValidJsonTest(
|
|
|
+ const string& test_name, const string& input_json,
|
|
|
+ const string& equivalent_text_format) {
|
|
|
+ RunValidInputTest("JsonInput." + test_name + ".JsonOutput", input_json,
|
|
|
+ conformance::JSON, equivalent_text_format,
|
|
|
+ conformance::PROTOBUF);
|
|
|
+ RunValidInputTest("JsonInput." + test_name + ".ProtobufOutput", input_json, conformance::JSON,
|
|
|
+ equivalent_text_format, conformance::JSON);
|
|
|
+}
|
|
|
+
|
|
|
void ConformanceTestSuite::TestPrematureEOFForType(FieldDescriptor::Type type) {
|
|
|
// Incomplete values for each wire type.
|
|
|
static const string incompletes[6] = {
|
|
@@ -333,11 +467,12 @@ bool ConformanceTestSuite::CheckSetEmpty(const set<string>& set_to_check,
|
|
|
return true;
|
|
|
} else {
|
|
|
StringAppendF(&output_, "\n");
|
|
|
- StringAppendF(&output_, "ERROR: %s:\n", msg);
|
|
|
+ StringAppendF(&output_, "%s:\n", msg);
|
|
|
for (set<string>::const_iterator iter = set_to_check.begin();
|
|
|
iter != set_to_check.end(); ++iter) {
|
|
|
- StringAppendF(&output_, "%s\n", iter->c_str());
|
|
|
+ StringAppendF(&output_, " %s\n", iter->c_str());
|
|
|
}
|
|
|
+ StringAppendF(&output_, "\n");
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
@@ -345,23 +480,25 @@ bool ConformanceTestSuite::CheckSetEmpty(const set<string>& set_to_check,
|
|
|
bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
|
|
|
std::string* output) {
|
|
|
runner_ = runner;
|
|
|
- output_.clear();
|
|
|
successes_ = 0;
|
|
|
- failures_ = 0;
|
|
|
+ expected_failures_ = 0;
|
|
|
+ skipped_.clear();
|
|
|
test_names_.clear();
|
|
|
unexpected_failing_tests_.clear();
|
|
|
unexpected_succeeding_tests_.clear();
|
|
|
+ type_resolver_.reset(NewTypeResolverForDescriptorPool(
|
|
|
+ kTypeUrlPrefix, DescriptorPool::generated_pool()));
|
|
|
+ type_url_ = GetTypeUrl(TestAllTypes::descriptor());
|
|
|
+
|
|
|
+ output_ = "\nCONFORMANCE TEST BEGIN ====================================\n\n";
|
|
|
|
|
|
for (int i = 1; i <= FieldDescriptor::MAX_TYPE; i++) {
|
|
|
if (i == FieldDescriptor::TYPE_GROUP) continue;
|
|
|
TestPrematureEOFForType(static_cast<FieldDescriptor::Type>(i));
|
|
|
}
|
|
|
|
|
|
- StringAppendF(&output_, "\n");
|
|
|
- StringAppendF(&output_,
|
|
|
- "CONFORMANCE SUITE FINISHED: completed %d tests, %d successes, "
|
|
|
- "%d failures.\n",
|
|
|
- successes_ + failures_, successes_, failures_);
|
|
|
+ RunValidJsonTest("HelloWorld", "{\"optionalString\":\"Hello, World!\"}",
|
|
|
+ "optional_string: 'Hello, World!'");
|
|
|
|
|
|
bool ok =
|
|
|
CheckSetEmpty(expected_to_fail_,
|
|
@@ -377,6 +514,17 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
|
|
|
"These tests succeeded, even though they were listed in "
|
|
|
"the failure list. Remove them from the failure list");
|
|
|
|
|
|
+ CheckSetEmpty(skipped_,
|
|
|
+ "These tests were skipped (probably because support for some "
|
|
|
+ "features is not implemented)");
|
|
|
+
|
|
|
+ StringAppendF(&output_,
|
|
|
+ "CONFORMANCE SUITE %s: %d successes, %d skipped, "
|
|
|
+ "%d expected failures, %d unexpected failures.\n",
|
|
|
+ ok ? "PASSED" : "FAILED", successes_, skipped_.size(),
|
|
|
+ expected_failures_, unexpected_failing_tests_.size());
|
|
|
+ StringAppendF(&output_, "\n");
|
|
|
+
|
|
|
output->assign(output_);
|
|
|
|
|
|
return ok;
|