Преглед на файлове

Merge pull request #1 from google/beta-3

Beta 3
Jie Luo преди 9 години
родител
ревизия
e1f588ae3c
променени са 100 файла, в които са добавени 15015 реда и са изтрити 1683 реда
  1. 11 0
      .gitignore
  2. 29 1
      .travis.yml
  3. 302 47
      BUILD
  4. 177 8
      CHANGES.txt
  5. 0 237
      INSTALL.txt
  6. 1 1
      LICENSE
  7. 266 187
      Makefile.am
  8. 4 2
      Protobuf.podspec
  9. 45 161
      README.md
  10. 45 8
      WORKSPACE
  11. 2 2
      appveyor.bat
  12. 32 32
      appveyor.yml
  13. 13 1
      autogen.sh
  14. 69 0
      benchmarks/Makefile.am
  15. 28 0
      benchmarks/README.md
  16. 11 8
      benchmarks/benchmark_messages_proto2.proto
  17. 76 0
      benchmarks/benchmark_messages_proto3.proto
  18. 63 0
      benchmarks/benchmarks.proto
  19. 117 0
      benchmarks/generate_datasets.cc
  20. 72 42
      cmake/CMakeLists.txt
  21. 13 12
      cmake/README.md
  22. 3 1
      cmake/extract_includes.bat.in
  23. 9 3
      cmake/libprotobuf-lite.cmake
  24. 10 3
      cmake/libprotobuf.cmake
  25. 12 3
      cmake/libprotoc.cmake
  26. 18 0
      cmake/tests.cmake
  27. 13 3
      configure.ac
  28. 26 4
      conformance/ConformanceJava.java
  29. 125 0
      conformance/ConformanceJavaLite.java
  30. 218 15
      conformance/Makefile.am
  31. 61 0
      conformance/conformance.proto
  32. 15 2
      conformance/conformance_cpp.cc
  33. 179 0
      conformance/conformance_objc.m
  34. 130 0
      conformance/conformance_python.py
  35. 12 4
      conformance/conformance_ruby.rb
  36. 1495 23
      conformance/conformance_test.cc
  37. 21 1
      conformance/conformance_test.h
  38. 74 14
      conformance/conformance_test_runner.cc
  39. 85 0
      conformance/failure_list_cpp.txt
  40. 16 0
      conformance/failure_list_csharp.txt
  41. 49 0
      conformance/failure_list_java.txt
  42. 4 0
      conformance/failure_list_objc.txt
  43. 2 0
      conformance/failure_list_python-post26.txt
  44. 85 0
      conformance/failure_list_python.txt
  45. 110 0
      conformance/failure_list_python_cpp.txt
  46. 199 2
      conformance/failure_list_ruby.txt
  47. 2075 0
      conformance/third_party/jsoncpp/json.h
  48. 5192 0
      conformance/third_party/jsoncpp/jsoncpp.cpp
  49. 1 0
      csharp/.gitignore
  50. 1 1
      csharp/CHANGES.txt
  51. 37 0
      csharp/Google.Protobuf.Tools.nuspec
  52. 32 16
      csharp/README.md
  53. 2 12
      csharp/generate_protos.sh
  54. BIN
      csharp/keys/Google.Protobuf.snk
  55. 5 1
      csharp/keys/README.md
  56. 8 1
      csharp/protos/unittest_issues.proto
  57. 3 3
      csharp/src/AddressBook/AddPerson.cs
  58. 25 22
      csharp/src/AddressBook/Addressbook.cs
  59. 3 3
      csharp/src/AddressBook/ListPeople.cs
  60. 5 2
      csharp/src/AddressBook/SampleUsage.cs
  61. 168 119
      csharp/src/Google.Protobuf.Conformance/Conformance.cs
  62. 41 25
      csharp/src/Google.Protobuf.Conformance/Program.cs
  63. 72 4
      csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs
  64. 28 0
      csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs
  65. 2 54
      csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs
  66. 1 1
      csharp/src/Google.Protobuf.Test/FieldCodecTest.cs
  67. 89 21
      csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs
  68. 6 1
      csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj
  69. 19 0
      csharp/src/Google.Protobuf.Test/IssuesTest.cs
  70. 156 49
      csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs
  71. 936 0
      csharp/src/Google.Protobuf.Test/JsonParserTest.cs
  72. 408 0
      csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs
  73. 21 24
      csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs
  74. 2 2
      csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs
  75. 94 0
      csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs
  76. 8 8
      csharp/src/Google.Protobuf.Test/SampleMessages.cs
  77. 2 2
      csharp/src/Google.Protobuf.Test/TestCornerCases.cs
  78. 45 0
      csharp/src/Google.Protobuf.Test/TestProtos/ForeignMessagePartial.cs
  79. 30 30
      csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs
  80. 12 12
      csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs
  81. 7 7
      csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs
  82. 217 50
      csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs
  83. 159 155
      csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs
  84. 67 28
      csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs
  85. 50 0
      csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs
  86. 33 5
      csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs
  87. 62 0
      csharp/src/Google.Protobuf.Test/WellKnownTypes/FieldMaskTest.cs
  88. 31 0
      csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs
  89. 80 8
      csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs
  90. 3 3
      csharp/src/Google.Protobuf.sln
  91. 3 3
      csharp/src/Google.Protobuf/ByteString.cs
  92. 69 13
      csharp/src/Google.Protobuf/CodedInputStream.cs
  93. 60 7
      csharp/src/Google.Protobuf/CodedOutputStream.cs
  94. 32 44
      csharp/src/Google.Protobuf/Collections/MapField.cs
  95. 8 8
      csharp/src/Google.Protobuf/Collections/RepeatedField.cs
  96. 77 90
      csharp/src/Google.Protobuf/FieldCodec.cs
  97. 24 4
      csharp/src/Google.Protobuf/Google.Protobuf.csproj
  98. 35 18
      csharp/src/Google.Protobuf/Google.Protobuf.nuspec
  99. 69 0
      csharp/src/Google.Protobuf/ICustomDiagnosticMessage.cs
  100. 53 0
      csharp/src/Google.Protobuf/InvalidJsonException.cs

+ 11 - 0
.gitignore

@@ -58,6 +58,7 @@ python/.eggs/
 python/.tox
 python/build/
 python/google/protobuf/compiler/
+python/google/protobuf/util/
 
 src/protoc
 src/unittest_proto_middleman
@@ -81,6 +82,7 @@ javanano/target
 
 # Windows native output.
 cmake/build
+build_msvc
 
 # NuGet packages: we want the repository configuration, but not the
 # packages themselves.
@@ -100,7 +102,16 @@ objectivec/ProtocolBuffers_iOS.xcodeproj/xcuserdata/
 conformance/.libs/
 conformance/com/
 conformance/conformance-cpp
+conformance/conformance-csharp
+conformance/conformance-java
+conformance/conformance-objc
 conformance/conformance-test-runner
 conformance/conformance.pb.cc
 conformance/conformance.pb.h
+conformance/Conformance.pbobjc.h
+conformance/Conformance.pbobjc.m
+conformance/conformance.rb
+conformance/google/
+conformance/javac_middleman
+conformance/lite/
 conformance/protoc_middleman

+ 29 - 1
.travis.yml

@@ -7,18 +7,22 @@ language: cpp
 os:
   - linux
   - osx
+# The Objective C build needs Xcode 7.0 or later.
+osx_image: xcode7.2
 script:
-  - ./travis.sh $CONFIG
+  - ./tests.sh $CONFIG
 env:
   - CONFIG=cpp
   - CONFIG=cpp_distcheck
   - CONFIG=csharp
+  - CONFIG=golang
   - CONFIG=java_jdk6
   - CONFIG=java_jdk7
   - CONFIG=java_oracle7
   - CONFIG=javanano_jdk6
   - CONFIG=javanano_jdk7
   - CONFIG=javanano_oracle7
+  - CONFIG=javascript
   - CONFIG=python
   - CONFIG=python_cpp
   - CONFIG=ruby19
@@ -46,11 +50,35 @@ matrix:
     # which doesn't work on OS X.
     - os: osx
       env: CONFIG=csharp
+    # Requires installing golang, currently travis.sh is doing that with apt-get
+    # which doesn't work on OS X.
+    - os: osx
+      env: CONFIG=golang
+  # Add into the matrix OS X tests of Objective C (needs Xcode, so it won't
+  # work on other platforms). These are split so it doesn't take as long to run.
+  include:
+    - os: osx
+      env: CONFIG=objectivec_ios
+    - os: osx
+      env: CONFIG=objectivec_osx
   allow_failures:
     # These currently do not work on OS X but are being worked on by @haberman.
     - os: osx
       env: CONFIG=ruby22
     - os: osx
       env: CONFIG=jruby
+    # https://github.com/google/protobuf/issues/1253 - Started failing when
+    # we moved to an OS X image that is 10.11.
+    - os: osx
+      env: CONFIG=python_cpp
+    # xctool 0.2.8 seems to have a bug where it randomly kills tests saying
+    # they failed.
+    #   https://github.com/facebook/xctool/issues/619
+    #   https://github.com/google/protobuf/issues/1232
+    # travis updated their images to include 0.2.8:
+    #   https://blog.travis-ci.com/2016-03-23-xcode-image-updates
+    # Mark the iOS test as flakey so these failures don't turn things red.
+    - os: osx
+      env: CONFIG=objectivec_ios
 notifications:
   email: false

+ 302 - 47
BUILD

@@ -15,8 +15,57 @@ COPTS = [
     "-Wno-error=unused-function",
 ]
 
-# Bazel should provide portable link_opts for pthread.
-LINK_OPTS = ["-lpthread"]
+config_setting(
+    name = "android",
+    values = {
+        "crosstool_top": "//external:android/crosstool",
+    },
+)
+
+# Android builds do not need to link in a separate pthread library.
+LINK_OPTS = select({
+    ":android": [],
+    "//conditions:default": ["-lpthread"],
+})
+
+load(
+    "protobuf",
+    "cc_proto_library",
+    "py_proto_library",
+    "internal_gen_well_known_protos_java",
+    "internal_protobuf_py_tests",
+)
+
+config_setting(
+    name = "ios_armv7",
+    values = {
+        "ios_cpu": "armv7",
+    },
+)
+
+config_setting(
+    name = "ios_armv7s",
+    values = {
+        "ios_cpu": "armv7s",
+    },
+)
+
+config_setting(
+    name = "ios_arm64",
+    values = {
+        "ios_cpu": "arm64",
+    },
+)
+
+IOS_ARM_COPTS = COPTS + [
+    "-DOS_IOS",
+    "-miphoneos-version-min=7.0",
+    "-arch armv7",
+    "-arch armv7s",
+    "-arch arm64",
+    "-D__thread=",
+    "-isysroot /Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS9.2.sdk/",
+]
 
 cc_library(
     name = "protobuf_lite",
@@ -46,7 +95,13 @@ cc_library(
         "src/google/protobuf/stubs/time.cc",
         "src/google/protobuf/wire_format_lite.cc",
     ],
-    copts = COPTS,
+    hdrs = glob(["src/google/protobuf/**/*.h"]),
+    copts = select({
+        ":ios_armv7": IOS_ARM_COPTS,
+        ":ios_armv7s": IOS_ARM_COPTS,
+        ":ios_arm64": IOS_ARM_COPTS,
+        "//conditions:default": COPTS,
+    }),
     includes = ["src/"],
     linkopts = LINK_OPTS,
     visibility = ["//visibility:public"],
@@ -97,6 +152,7 @@ cc_library(
         "src/google/protobuf/util/internal/json_objectwriter.cc",
         "src/google/protobuf/util/internal/json_stream_parser.cc",
         "src/google/protobuf/util/internal/object_writer.cc",
+        "src/google/protobuf/util/internal/proto_writer.cc",
         "src/google/protobuf/util/internal/protostream_objectsource.cc",
         "src/google/protobuf/util/internal/protostream_objectwriter.cc",
         "src/google/protobuf/util/internal/type_info.cc",
@@ -109,7 +165,13 @@ cc_library(
         "src/google/protobuf/wire_format.cc",
         "src/google/protobuf/wrappers.pb.cc",
     ],
-    copts = COPTS,
+    hdrs = glob(["src/**/*.h"]),
+    copts = select({
+        ":ios_armv7": IOS_ARM_COPTS,
+        ":ios_armv7s": IOS_ARM_COPTS,
+        ":ios_arm64": IOS_ARM_COPTS,
+        "//conditions:default": COPTS,
+    }),
     includes = ["src/"],
     linkopts = LINK_OPTS,
     visibility = ["//visibility:public"],
@@ -124,7 +186,7 @@ objc_library(
     visibility = ["//visibility:public"],
 )
 
-WELL_KNOWN_PROTOS = [
+RELATIVE_WELL_KNOWN_PROTOS = [
     # AUTOGEN(well_known_protos)
     "google/protobuf/any.proto",
     "google/protobuf/api.proto",
@@ -140,6 +202,24 @@ WELL_KNOWN_PROTOS = [
     "google/protobuf/wrappers.proto",
 ]
 
+WELL_KNOWN_PROTOS = ["src/" + s for s in RELATIVE_WELL_KNOWN_PROTOS]
+
+filegroup(
+    name = "well_known_protos",
+    srcs = WELL_KNOWN_PROTOS,
+    visibility = ["//visibility:public"],
+)
+
+cc_proto_library(
+    name = "cc_wkt_protos",
+    srcs = WELL_KNOWN_PROTOS,
+    include = "src",
+    default_runtime = ":protobuf",
+    internal_bootstrap_hack = 1,
+    protoc = ":protoc",
+    visibility = ["//visibility:public"],
+)
+
 ################################################################################
 # Protocol Buffers Compiler
 ################################################################################
@@ -173,11 +253,11 @@ cc_library(
         "src/google/protobuf/compiler/csharp/csharp_message.cc",
         "src/google/protobuf/compiler/csharp/csharp_message_field.cc",
         "src/google/protobuf/compiler/csharp/csharp_primitive_field.cc",
+        "src/google/protobuf/compiler/csharp/csharp_reflection_class.cc",
         "src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc",
         "src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc",
         "src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc",
         "src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc",
-        "src/google/protobuf/compiler/csharp/csharp_umbrella_class.cc",
         "src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc",
         "src/google/protobuf/compiler/java/java_context.cc",
         "src/google/protobuf/compiler/java/java_doc_comment.cc",
@@ -186,6 +266,7 @@ cc_library(
         "src/google/protobuf/compiler/java/java_enum_field_lite.cc",
         "src/google/protobuf/compiler/java/java_enum_lite.cc",
         "src/google/protobuf/compiler/java/java_extension.cc",
+        "src/google/protobuf/compiler/java/java_extension_lite.cc",
         "src/google/protobuf/compiler/java/java_field.cc",
         "src/google/protobuf/compiler/java/java_file.cc",
         "src/google/protobuf/compiler/java/java_generator.cc",
@@ -219,6 +300,7 @@ cc_library(
         "src/google/protobuf/compiler/javanano/javanano_message.cc",
         "src/google/protobuf/compiler/javanano/javanano_message_field.cc",
         "src/google/protobuf/compiler/javanano/javanano_primitive_field.cc",
+        "src/google/protobuf/compiler/js/js_generator.cc",
         "src/google/protobuf/compiler/objectivec/objectivec_enum.cc",
         "src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc",
         "src/google/protobuf/compiler/objectivec/objectivec_extension.cc",
@@ -253,33 +335,11 @@ cc_binary(
     deps = [":protoc_lib"],
 )
 
-################################################################################
-# Java support
-################################################################################
-genrule(
-    name = "generate_java_descriptor_proto",
-    tools = [":protoc"],
-    srcs = [ "src/google/protobuf/descriptor.proto", ],
-    outs = [ "com/google/protobuf/DescriptorProtos.java" ],
-    cmd = "$(location :protoc) --java_out=$(@D)/../../.. $<",
-)
-
-java_library(
-    name = "java_proto",
-    visibility = ["//visibility:public"],
-    srcs = glob([
-        "java/src/main/java/com/google/protobuf/*.java"
-    ]) + [
-      ":generate_java_descriptor_proto",
-    ]
-)
-
-
 ################################################################################
 # Tests
 ################################################################################
 
-LITE_TEST_PROTOS = [
+RELATIVE_LITE_TEST_PROTOS = [
     # AUTOGEN(lite_test_protos)
     "google/protobuf/map_lite_unittest.proto",
     "google/protobuf/unittest_import_lite.proto",
@@ -288,7 +348,9 @@ LITE_TEST_PROTOS = [
     "google/protobuf/unittest_no_arena_lite.proto",
 ]
 
-TEST_PROTOS = [
+LITE_TEST_PROTOS = ["src/" + s for s in RELATIVE_LITE_TEST_PROTOS]
+
+RELATIVE_TEST_PROTOS = [
     # AUTOGEN(test_protos)
     "google/protobuf/any_test.proto",
     "google/protobuf/compiler/cpp/cpp_test_bad_identifiers.proto",
@@ -315,6 +377,8 @@ TEST_PROTOS = [
     "google/protobuf/unittest_preserve_unknown_enum.proto",
     "google/protobuf/unittest_preserve_unknown_enum2.proto",
     "google/protobuf/unittest_proto3_arena.proto",
+    "google/protobuf/unittest_proto3_arena_lite.proto",
+    "google/protobuf/unittest_proto3_lite.proto",
     "google/protobuf/unittest_well_known_types.proto",
     "google/protobuf/util/internal/testdata/anys.proto",
     "google/protobuf/util/internal/testdata/books.proto",
@@ -326,24 +390,18 @@ TEST_PROTOS = [
     "google/protobuf/util/internal/testdata/struct.proto",
     "google/protobuf/util/internal/testdata/timestamp_duration.proto",
     "google/protobuf/util/json_format_proto3.proto",
+    "google/protobuf/util/message_differencer_unittest.proto",
 ]
 
-PROTOS = LITE_TEST_PROTOS + TEST_PROTOS
-
-INPUTS = PROTOS + WELL_KNOWN_PROTOS
-
-OUTPUTS = ["src/" + x[:-5] + "pb.h" for x in PROTOS] + \
-          ["src/" + x[:-5] + "pb.cc" for x in PROTOS]
+TEST_PROTOS = ["src/" + s for s in RELATIVE_TEST_PROTOS]
 
-genrule(
-    name = "gen_test_protos",
-    srcs = ["src/" + x for x in INPUTS],
-    outs = OUTPUTS,
-    cmd =
-        "$(location :protoc) --cpp_out=$(@D)/src" +
-        "".join([" -I" + x + "=$(location src/" + x + ")" for x in INPUTS]) +
-        "".join([" $(location src/" + x + ")" for x in PROTOS]),
-    tools = [":protoc"],
+cc_proto_library(
+    name = "cc_test_protos",
+    srcs = LITE_TEST_PROTOS + TEST_PROTOS,
+    include = "src",
+    default_runtime = ":protobuf",
+    protoc = ":protoc",
+    deps = [":cc_wkt_protos"],
 )
 
 COMMON_TEST_SRCS = [
@@ -372,7 +430,7 @@ cc_binary(
 
 cc_test(
     name = "protobuf_test",
-    srcs = OUTPUTS + COMMON_TEST_SRCS + [
+    srcs = COMMON_TEST_SRCS + [
         # AUTOGEN(test_srcs)
         "src/google/protobuf/any_test.cc",
         "src/google/protobuf/arena_unittest.cc",
@@ -381,6 +439,7 @@ cc_test(
         "src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc",
         "src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc",
         "src/google/protobuf/compiler/cpp/cpp_unittest.cc",
+        "src/google/protobuf/compiler/cpp/metadata_test.cc",
         "src/google/protobuf/compiler/csharp/csharp_generator_unittest.cc",
         "src/google/protobuf/compiler/importer_unittest.cc",
         "src/google/protobuf/compiler/java/java_doc_comment_unittest.cc",
@@ -405,7 +464,9 @@ cc_test(
         "src/google/protobuf/message_unittest.cc",
         "src/google/protobuf/no_field_presence_test.cc",
         "src/google/protobuf/preserve_unknown_enum_test.cc",
+        "src/google/protobuf/proto3_arena_lite_unittest.cc",
         "src/google/protobuf/proto3_arena_unittest.cc",
+        "src/google/protobuf/proto3_lite_unittest.cc",
         "src/google/protobuf/reflection_ops_unittest.cc",
         "src/google/protobuf/repeated_field_reflection_unittest.cc",
         "src/google/protobuf/repeated_field_unittest.cc",
@@ -433,6 +494,7 @@ cc_test(
         "src/google/protobuf/util/internal/protostream_objectwriter_test.cc",
         "src/google/protobuf/util/internal/type_info_test_helper.cc",
         "src/google/protobuf/util/json_util_test.cc",
+        "src/google/protobuf/util/message_differencer_unittest.cc",
         "src/google/protobuf/util/time_util_test.cc",
         "src/google/protobuf/util/type_resolver_util_test.cc",
         "src/google/protobuf/well_known_types_unittest.cc",
@@ -441,14 +503,207 @@ cc_test(
     copts = COPTS,
     data = [
         ":test_plugin",
-    ],
+    ] + glob([
+        "src/google/protobuf/**/*",
+    ]),
     includes = [
         "src/",
     ],
     linkopts = LINK_OPTS,
     deps = [
+        ":cc_test_protos",
         ":protobuf",
         ":protoc_lib",
         "//external:gtest_main",
     ],
 )
+
+################################################################################
+# Java support
+################################################################################
+internal_gen_well_known_protos_java(
+    srcs = WELL_KNOWN_PROTOS,
+)
+
+java_library(
+    name = "protobuf_java",
+    srcs = glob([
+        "java/core/src/main/java/com/google/protobuf/*.java",
+    ]) + [
+        ":gen_well_known_protos_java",
+    ],
+    visibility = ["//visibility:public"],
+)
+
+java_library(
+    name = "protobuf_java_util",
+    srcs = glob([
+        "java/util/src/main/java/com/google/protobuf/util/*.java",
+    ]),
+    deps = [
+        "protobuf_java",
+        "//external:gson",
+        "//external:guava",
+    ],
+    visibility = ["//visibility:public"],
+)
+
+################################################################################
+# Python support
+################################################################################
+
+py_library(
+    name = "python_srcs",
+    srcs = glob(
+        [
+            "python/google/protobuf/*.py",
+            "python/google/protobuf/**/*.py",
+        ],
+        exclude = [
+            "python/google/protobuf/internal/*_test.py",
+            "python/google/protobuf/internal/test_util.py",
+        ],
+    ),
+    srcs_version = "PY2AND3",
+    imports = ["python"],
+)
+
+cc_binary(
+    name = "internal/_api_implementation.so",
+    srcs = ["python/google/protobuf/internal/api_implementation.cc"],
+    copts = COPTS + [
+        "-DPYTHON_PROTO2_CPP_IMPL_V2",
+    ],
+    linkshared = 1,
+    linkstatic = 1,
+    deps = select({
+        "//conditions:default": [],
+        ":use_fast_cpp_protos": ["//external:python_headers"],
+    }),
+)
+
+cc_binary(
+    name = "pyext/_message.so",
+    srcs = glob([
+        "python/google/protobuf/pyext/*.cc",
+        "python/google/protobuf/pyext/*.h",
+    ]),
+    copts = COPTS + [
+        "-DGOOGLE_PROTOBUF_HAS_ONEOF=1",
+    ] + select({
+        "//conditions:default": [],
+        ":allow_oversize_protos": ["-DPROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS=1"],
+    }),
+    includes = [
+        "python/",
+        "src/",
+    ],
+    linkshared = 1,
+    linkstatic = 1,
+    deps = [
+        ":protobuf",
+    ] + select({
+        "//conditions:default": [],
+        ":use_fast_cpp_protos": ["//external:python_headers"],
+    }),
+)
+
+config_setting(
+    name = "use_fast_cpp_protos",
+    values = {
+        "define": "use_fast_cpp_protos=true",
+    },
+)
+
+config_setting(
+    name = "allow_oversize_protos",
+    values = {
+        "define": "allow_oversize_protos=true",
+    },
+)
+
+py_proto_library(
+    name = "protobuf_python",
+    srcs = WELL_KNOWN_PROTOS,
+    include = "src",
+    data = select({
+        "//conditions:default": [],
+        ":use_fast_cpp_protos": [
+            ":internal/_api_implementation.so",
+            ":pyext/_message.so",
+        ],
+    }),
+    default_runtime = "",
+    protoc = ":protoc",
+    py_libs = [
+        ":python_srcs",
+        "//external:six"
+    ],
+    srcs_version = "PY2AND3",
+    visibility = ["//visibility:public"],
+)
+
+py_proto_library(
+    name = "python_common_test_protos",
+    srcs = LITE_TEST_PROTOS + TEST_PROTOS,
+    include = "src",
+    default_runtime = "",
+    protoc = ":protoc",
+    srcs_version = "PY2AND3",
+    deps = [":protobuf_python"],
+)
+
+py_proto_library(
+    name = "python_specific_test_protos",
+    srcs = glob([
+        "python/google/protobuf/internal/*.proto",
+        "python/google/protobuf/internal/import_test_package/*.proto",
+    ]),
+    include = "python",
+    default_runtime = ":protobuf_python",
+    protoc = ":protoc",
+    srcs_version = "PY2AND3",
+    deps = [":python_common_test_protos"],
+)
+
+py_library(
+    name = "python_tests",
+    srcs = glob(
+        [
+            "python/google/protobuf/internal/*_test.py",
+            "python/google/protobuf/internal/test_util.py",
+        ],
+    ),
+    imports = ["python"],
+    srcs_version = "PY2AND3",
+    deps = [
+        ":protobuf_python",
+        ":python_common_test_protos",
+        ":python_specific_test_protos",
+    ],
+)
+
+internal_protobuf_py_tests(
+    name = "python_tests_batch",
+    data = glob([
+        "src/google/protobuf/**/*",
+    ]),
+    modules = [
+        "descriptor_database_test",
+        "descriptor_pool_test",
+        "descriptor_test",
+        "generator_test",
+        "json_format_test",
+        "message_factory_test",
+        "message_test",
+        "proto_builder_test",
+        "reflection_test",
+        "service_reflection_test",
+        "symbol_database_test",
+        "text_encoding_test",
+        "text_format_test",
+        "unknown_fields_test",
+        "wire_format_test",
+    ],
+    deps = [":python_tests"],
+)

+ 177 - 8
CHANGES.txt

@@ -1,3 +1,172 @@
+2016-05-10 version 3.0.0-beta-3 (C++/Java/Python/Ruby/Nano/Objective-C/C#/JavaScript)
+  General
+  * Supported Proto3 lite-runtime in C++/Java for mobile platforms.
+  * Any type now supports APIs to specify prefixes other than
+    type.googleapis.com
+
+  C++ (Beta)
+  * Improved hash maps. TODO(gpike)
+  * Arenas
+      - Several inlined methods in Arena were moved to out-of-line to improve
+        build performance and code size.
+      - Added SpaceAllocatedAndUsed() to report both space used and allocated
+      - Added convenient class UnsafeArenaAllocatedRepeatedPtrFieldBackInserter
+  * Any
+      - Allow custom type URL prefixes in Any packing.
+      - TextFormat now expand the Any type rather than printing bytes.
+  * Performance optimizations and various bug fixes.
+
+  Java (Beta)
+  * Introduced an ExperimentalApi annotation. Annotated APIs are experimental
+    and are subject to change in a backward incompatible way in future releases.
+  * Introduced zero-copy serialization as an ExperimentalApi
+      - Introduction of the `ByteOutput` interface. This is similar to
+        `OutputStream` but provides semantics for lazy writing (i.e. no
+        immediate copy required) of fields that are considered to be immutable.
+      - `ByteString` now supports writing to a `ByteOutput`, which will directly
+        expose the internals of the `ByteString` (i.e. `byte[]` or `ByteBuffer`)
+        to the `ByteOutput` without copying.
+      - `CodedOutputStream` now supports writing to a `ByteOutput`. `ByteString`
+        instances that are too large to fit in the internal buffer will be
+        (lazily) written to the `ByteOutput` directly.
+      - This allows applications using large `ByteString` fields to avoid
+        duplication of these fields entirely. Such an application can supply a
+        `ByteOutput` that chains together the chunks received from
+        `CodedOutputStream` before forwarding them onto the IO system.
+  * Other related changes to `CodedOutputStream`
+      - Additional use of `sun.misc.Unsafe` where possible to perform fast
+        access to `byte[]` and `ByteBuffer` values and avoiding unnecessary
+        range checking.
+      - `ByteBuffer`-backed `CodedOutputStream` now writes directly to the
+        `ByteBuffer` rather than to an intermediate array.
+  * Improved the performance for lite-runtime. TODO(dweis)
+  * Various bug fixes and small feature enhancement.
+      - Fixed stack overflow when in hashCode() for infinite recursive oneofs.
+      - Fixed the lazy field parsing in lite to merge rather than overwrite.
+      - TextFormat now supports reporting line/column numbers on errors.
+
+  Python (Beta)
+  * TODO(jieluo)
+
+  Objective-C (Beta)
+  * Proto comments now come over as HeaderDoc comments in the generated sources
+    so Xcode can pick them up and display them.
+  * The library headers have been updated to use HeaderDoc comments so Xcode can
+    pick them up and display them.
+  * The per message and per field overhead in both generated code and runtime
+    object sizes was reduced.
+  * Generated code now include deprecated annotations when the proto file
+    included them.
+
+  C# (Beta)
+  In general: some changes are breaking, which require regenerating messages.
+  Most user-written code will not be impacted *except* for the renaming of enum
+  values.
+
+  * Allow custom type URL prefixes in `Any` packing, and ignore them when
+    unpacking
+  * `protoc` is now in a separate NuGet package (Google.Protobuf.Tools)
+  * New option: `internal_access` to generate internal classes
+  * Enum values are now PascalCased, and if there's a prefix which matches the
+    name of the enum, that is removed (so an enum `COLOR` with a value
+    `COLOR_BLUE` would generate a value of just `Blue`). An option
+    (`legacy_enum_values`) is temporarily available to disable this, but the
+    option will be removed for GA.
+  * `json_name` option is now honored
+  * If group tags are encountered when parsing, they are validated more
+    thoroughly (although we don't support actual groups)
+  * NuGet dependencies are better specified
+  * Breaking: `Preconditions` is renamed to `ProtoPreconditions`
+  * Breaking: `GeneratedCodeInfo` is renamed to `GeneratedClrTypeInfo`
+  * `JsonFormatter` now allows writing to a `TextWriter`
+  * New interface, `ICustomDiagnosticMessage` to allow more compact
+    representations from `ToString`
+  * `CodedInputStream` and `CodedOutputStream` now implement `IDisposable`,
+    which simply disposes of the streams they were constructed with
+  * Map fields no longer support null values (in line with other languages)
+  * Improvements in JSON formatting and parsing
+
+  Javascript (Alpha)
+  * TODO(haberman)
+
+  C++/Java Lite (Alpha)
+    A new "lite" generator parameter was introduced in the protoc for C++ and
+    Java for Proto3 syntax messages. Example usage:
+
+     ./protoc --cpp_out=lite:$OUTPUT_PATH foo.proto
+
+    The protoc will treat the current input and all the transitive dependencies
+    as LITE. The same generator parameter must be used to generate the
+    dependencies.
+
+    In Proto3 syntax files, "optimized_for=LITE_RUNTIME" is no longer supported.
+
+
+2015-12-30 version 3.0.0-beta-2 (C++/Java/Python/Ruby/Nano/Objective-C/C#/JavaScript)
+  General
+  * Introduced a new language implementation: JavaScript.
+  * Added a new field option "json_name". By default proto field names are
+    converted to "lowerCamelCase" in proto3 JSON format. This option can be
+    used to override this behavior and specify a different JSON name for the
+    field.
+  * Added conformance tests to ensure implementations are following proto3 JSON
+    specification.
+
+  C++ (Beta)
+  * Various bug fixes and improvements to the JSON support utility:
+      - Duplicate map keys in JSON are now rejected (i.e., translation will
+        fail).
+      - Fixed wire-format for google.protobuf.Value/ListValue.
+      - Fixed precision loss when converting google.protobuf.Timestamp.
+      - Fixed a bug when parsing invalid UTF-8 code points.
+      - Fixed a memory leak.
+      - Reduced call stack usage.
+
+  Java (Beta)
+  * Cleaned up some unused methods on CodedOutputStream.
+  * Presized lists for packed fields during parsing in the lite runtime to
+    reduce allocations and improve performance.
+  * Improved the performance of unknown fields in the lite runtime.
+  * Introduced UnsafeByteStrings to support zero-copy ByteString creation.
+  * Various bug fixes and improvements to the JSON support utility:
+      - Fixed a thread-safety bug.
+      - Added a new option “preservingProtoFieldNames” to JsonFormat.
+      - Added a new option “includingDefaultValueFields” to JsonFormat.
+      - Updated the JSON utility to comply with proto3 JSON specification.
+
+  Python (Beta)
+  * Added proto3 JSON format utility. It includes support for all field types
+    and a few well-known types except for Any and Struct.
+  * Added runtime support for Any, Timestamp, Duration and FieldMask.
+  * [ ] is now accepted for repeated scalar fields in text format parser.
+  * Map fields now have proper O(1) performance for lookup/insert/delete
+    when using the Python/C++ implementation. They were previously using O(n)
+    search-based algorithms because the C++ reflection interface didn't
+    support true map operations.
+
+  Objective-C (Beta)
+  * Various bug-fixes and code tweaks to pass more strict compiler warnings.
+  * Now has conformance test coverage and is passing all tests.
+
+  C# (Beta)
+  * Various bug-fixes.
+  * Code generation: Files generated in directories based on namespace.
+  * Code generation: Include comments from .proto files in XML doc
+    comments (naively)
+  * Code generation: Change organization/naming of "reflection class" (access
+    to file descriptor)
+  * Code generation and library: Add Parser property to MessageDescriptor,
+    and introduce a non-generic parser type.
+  * Library: Added TypeRegistry to support JSON parsing/formatting of Any.
+  * Library: Added Any.Pack/Unpack support.
+  * Library: Implemented JSON parsing.
+
+  Javascript (Alpha)
+  * Added proto3 support for JavaScript. The runtime is written in pure
+    JavaScript and works in browsers and in Node.js. To generate JavaScript
+    code for your proto, invoke protoc with "--js_out". See js/README.md
+    for more build instructions.
+
 2015-08-26 version 3.0.0-beta-1 (C++/Java/Python/Ruby/Nano/Objective-C/C#)
   About Beta
   * This is the first beta release of protobuf v3.0.0. Not all languages
@@ -115,7 +284,7 @@
     still disable packed serialization by setting packed to false for now.
   * Added well-known type protos (any.proto, empty.proto, timestamp.proto,
     duration.proto, etc.). Users can import and use these protos just like
-    regular proto files. Addtional runtime support will be added for them in
+    regular proto files. Additional runtime support will be added for them in
     future releases (in the form of utility helper functions, or having them
     replaced by language specific types in generated code).
   * Added a "reserved" keyword in both proto2 and proto3 syntax. User can use
@@ -157,7 +326,7 @@
       https://github.com/jskeet/protobuf-csharp-port. The original project was
       frozen and all the new development will happen here.
     * Codegen plugin for C# was completely rewritten to C++ and is now an
-      intergral part of protoc.
+      integral part of protoc.
     * Some refactorings and cleanup has been applied to the C# runtime library.
     * Only proto2 is supported in C# at the moment, proto3 support is in
       progress and will likely bring significant breaking changes to the API.
@@ -321,7 +490,7 @@
 
     This release (v3.0.0-alpha-1) includes partial proto3 support for C++ and
     Java. Items 6 (well-known types) and 7 (JSON format) in the above feature
-    list are not impelmented.
+    list are not implemented.
 
     A new notion "syntax" is introduced to specify whether a .proto file
     uses proto2 or proto3:
@@ -422,7 +591,7 @@
       }
   * Files, services, enums, messages, methods and enum values can be marked
     as deprecated now.
-  * Added Support for list values, including lists of mesaages, when
+  * Added Support for list values, including lists of messages, when
     parsing text-formatted protos in C++ and Java.
       For example:  foo: [1, 2, 3]
 
@@ -502,7 +671,7 @@
 
   Python
   * Added support for dynamic message creation. DescriptorDatabase,
-    DescriptorPool, and MessageFactory work like their C++ couterparts to
+    DescriptorPool, and MessageFactory work like their C++ counterparts to
     simplify Descriptor construction from *DescriptorProtos, and MessageFactory
     provides a message instance from a Descriptor.
   * Added pickle support for protobuf messages.
@@ -516,7 +685,7 @@
 2011-05-01 version 2.4.1:
 
   C++
-  * Fixed the frendship problem for old compilers to make the library now gcc 3
+  * Fixed the friendship problem for old compilers to make the library now gcc 3
     compatible again.
   * Fixed vcprojects/extract_includes.bat to extract compiler/plugin.h.
 
@@ -783,7 +952,7 @@
   * Fixed tendency for TextFormat's parsing to overflow the stack when
     parsing large string values.  The underlying problem is with Java's
     regex implementation (which unfortunately uses recursive backtracking
-    rather than building an NFA).  Worked around by making use of possesive
+    rather than building an NFA).  Worked around by making use of possessive
     quantifiers.
   * Generated service classes now also generate pure interfaces.  For a service
     Foo, Foo.Interface is a pure interface containing all of the service's
@@ -797,7 +966,7 @@
     RPC implementations will have to implement the new interfaces in order to
     support blocking mode.
   * New I/O methods parseDelimitedFrom(), mergeDelimitedFrom(), and
-    writeDelimitedTo() read and write "delemited" messages from/to a stream,
+    writeDelimitedTo() read and write "delimited" messages from/to a stream,
     meaning that the message size precedes the data.  This way, you can write
     multiple messages to a stream without having to worry about delimiting
     them yourself.

+ 0 - 237
INSTALL.txt

@@ -1,237 +0,0 @@
-This file contains detailed but generic information on building and
-installing the C++ part of this project.  For shorter instructions,
-as well as instructions for compiling and installing the Java or
-Python parts, see README.
-
-======================================================================
-
-Copyright 1994, 1995, 1996, 1999, 2000, 2001, 2002 Free Software
-Foundation, Inc.
-
-   This file is free documentation; the Free Software Foundation gives
-unlimited permission to copy, distribute and modify it.
-
-
-Basic Installation
-==================
-
-   These are generic installation instructions.
-
-   The `configure' shell script attempts to guess correct values for
-various system-dependent variables used during compilation.  It uses
-those values to create a `Makefile' in each directory of the package.
-It may also create one or more `.h' files containing system-dependent
-definitions.  Finally, it creates a shell script `config.status' that
-you can run in the future to recreate the current configuration, and a
-file `config.log' containing compiler output (useful mainly for
-debugging `configure').
-
-   It can also use an optional file (typically called `config.cache'
-and enabled with `--cache-file=config.cache' or simply `-C') that saves
-the results of its tests to speed up reconfiguring.  (Caching is
-disabled by default to prevent problems with accidental use of stale
-cache files.)
-
-   If you need to do unusual things to compile the package, please try
-to figure out how `configure' could check whether to do them, and mail
-diffs or instructions to the address given in the `README' so they can
-be considered for the next release.  If you are using the cache, and at
-some point `config.cache' contains results you don't want to keep, you
-may remove or edit it.
-
-   The file `configure.ac' (or `configure.in') is used to create
-`configure' by a program called `autoconf'.  You only need
-`configure.ac' if you want to change it or regenerate `configure' using
-a newer version of `autoconf'.
-
-The simplest way to compile this package is:
-
-  1. `cd' to the directory containing the package's source code and type
-     `./configure' to configure the package for your system.  If you're
-     using `csh' on an old version of System V, you might need to type
-     `sh ./configure' instead to prevent `csh' from trying to execute
-     `configure' itself.
-
-     Running `configure' takes awhile.  While running, it prints some
-     messages telling which features it is checking for.
-
-  2. Type `make' to compile the package.
-
-  3. Optionally, type `make check' to run any self-tests that come with
-     the package.
-
-  4. Type `make install' to install the programs and any data files and
-     documentation.
-
-  5. You can remove the program binaries and object files from the
-     source code directory by typing `make clean'.  To also remove the
-     files that `configure' created (so you can compile the package for
-     a different kind of computer), type `make distclean'.  There is
-     also a `make maintainer-clean' target, but that is intended mainly
-     for the package's developers.  If you use it, you may have to get
-     all sorts of other programs in order to regenerate files that came
-     with the distribution.
-
-Compilers and Options
-=====================
-
-   Some systems require unusual options for compilation or linking that
-the `configure' script does not know about.  Run `./configure --help'
-for details on some of the pertinent environment variables.
-
-   You can give `configure' initial values for configuration parameters
-by setting variables in the command line or in the environment.  Here
-is an example:
-
-     ./configure CC=c89 CFLAGS=-O2 LIBS=-lposix
-
-   *Note Defining Variables::, for more details.
-
-Compiling For Multiple Architectures
-====================================
-
-   You can compile the package for more than one kind of computer at the
-same time, by placing the object files for each architecture in their
-own directory.  To do this, you must use a version of `make' that
-supports the `VPATH' variable, such as GNU `make'.  `cd' to the
-directory where you want the object files and executables to go and run
-the `configure' script.  `configure' automatically checks for the
-source code in the directory that `configure' is in and in `..'.
-
-   If you have to use a `make' that does not support the `VPATH'
-variable, you have to compile the package for one architecture at a
-time in the source code directory.  After you have installed the
-package for one architecture, use `make distclean' before reconfiguring
-for another architecture.
-
-Installation Names
-==================
-
-   By default, `make install' will install the package's files in
-`/usr/local/bin', `/usr/local/man', etc.  You can specify an
-installation prefix other than `/usr/local' by giving `configure' the
-option `--prefix=PATH'.
-
-   You can specify separate installation prefixes for
-architecture-specific files and architecture-independent files.  If you
-give `configure' the option `--exec-prefix=PATH', the package will use
-PATH as the prefix for installing programs and libraries.
-Documentation and other data files will still use the regular prefix.
-
-   In addition, if you use an unusual directory layout you can give
-options like `--bindir=PATH' to specify different values for particular
-kinds of files.  Run `configure --help' for a list of the directories
-you can set and what kinds of files go in them.
-
-   If the package supports it, you can cause programs to be installed
-with an extra prefix or suffix on their names by giving `configure' the
-option `--program-prefix=PREFIX' or `--program-suffix=SUFFIX'.
-
-Optional Features
-=================
-
-   Some packages pay attention to `--enable-FEATURE' options to
-`configure', where FEATURE indicates an optional part of the package.
-They may also pay attention to `--with-PACKAGE' options, where PACKAGE
-is something like `gnu-as' or `x' (for the X Window System).  The
-`README' should mention any `--enable-' and `--with-' options that the
-package recognizes.
-
-   For packages that use the X Window System, `configure' can usually
-find the X include and library files automatically, but if it doesn't,
-you can use the `configure' options `--x-includes=DIR' and
-`--x-libraries=DIR' to specify their locations.
-
-Specifying the System Type
-==========================
-
-   There may be some features `configure' cannot figure out
-automatically, but needs to determine by the type of machine the package
-will run on.  Usually, assuming the package is built to be run on the
-_same_ architectures, `configure' can figure that out, but if it prints
-a message saying it cannot guess the machine type, give it the
-`--build=TYPE' option.  TYPE can either be a short name for the system
-type, such as `sun4', or a canonical name which has the form:
-
-     CPU-COMPANY-SYSTEM
-
-where SYSTEM can have one of these forms:
-
-     OS KERNEL-OS
-
-   See the file `config.sub' for the possible values of each field.  If
-`config.sub' isn't included in this package, then this package doesn't
-need to know the machine type.
-
-   If you are _building_ compiler tools for cross-compiling, you should
-use the `--target=TYPE' option to select the type of system they will
-produce code for.
-
-   If you want to _use_ a cross compiler, that generates code for a
-platform different from the build platform, you should specify the
-"host" platform (i.e., that on which the generated programs will
-eventually be run) with `--host=TYPE'.
-
-Sharing Defaults
-================
-
-   If you want to set default values for `configure' scripts to share,
-you can create a site shell script called `config.site' that gives
-default values for variables like `CC', `cache_file', and `prefix'.
-`configure' looks for `PREFIX/share/config.site' if it exists, then
-`PREFIX/etc/config.site' if it exists.  Or, you can set the
-`CONFIG_SITE' environment variable to the location of the site script.
-A warning: not all `configure' scripts look for a site script.
-
-Defining Variables
-==================
-
-   Variables not defined in a site shell script can be set in the
-environment passed to `configure'.  However, some packages may run
-configure again during the build, and the customized values of these
-variables may be lost.  In order to avoid this problem, you should set
-them in the `configure' command line, using `VAR=value'.  For example:
-
-     ./configure CC=/usr/local2/bin/gcc
-
-will cause the specified gcc to be used as the C compiler (unless it is
-overridden in the site shell script).
-
-`configure' Invocation
-======================
-
-   `configure' recognizes the following options to control how it
-operates.
-
-`--help'
-`-h'
-     Print a summary of the options to `configure', and exit.
-
-`--version'
-`-V'
-     Print the version of Autoconf used to generate the `configure'
-     script, and exit.
-
-`--cache-file=FILE'
-     Enable the cache: use and save the results of the tests in FILE,
-     traditionally `config.cache'.  FILE defaults to `/dev/null' to
-     disable caching.
-
-`--config-cache'
-`-C'
-     Alias for `--cache-file=config.cache'.
-
-`--quiet'
-`--silent'
-`-q'
-     Do not print messages saying which checks are being made.  To
-     suppress all normal output, redirect it to `/dev/null' (any error
-     messages will still be shown).
-
-`--srcdir=DIR'
-     Look for the package's source code in directory DIR.  Usually
-     `configure' can determine that directory automatically.
-
-`configure' also accepts some other, not widely useful, options.  Run
-`configure --help' for more details.
-

+ 1 - 1
LICENSE

@@ -5,7 +5,7 @@ This license applies to all parts of Protocol Buffers except the following:
     This file is copyrighted by Red Hat Inc.
 
   - Atomicops support for AIX/POWER, located in
-    src/google/protobuf/stubs/atomicops_internals_aix.h.
+    src/google/protobuf/stubs/atomicops_internals_power.h.
     This file is copyrighted by Bloomberg Finance LP.
 
 Copyright 2014, Google Inc.  All rights reserved.

+ 266 - 187
Makefile.am

@@ -9,7 +9,7 @@ AUTOMAKE_OPTIONS = foreign
 SUBDIRS = . src
 
 # Always include gmock in distributions.
-DIST_SUBDIRS = $(subdirs) src conformance
+DIST_SUBDIRS = $(subdirs) src conformance benchmarks
 
 # Build gmock before we build protobuf tests.  We don't add gmock to SUBDIRS
 # because then "make check" would also build and run all of gmock's own tests,
@@ -35,6 +35,14 @@ clean-local:
 	if test -e conformance/Makefile; then \
 	  echo "Making clean in conformance"; \
 	  cd conformance && $(MAKE) $(AM_MAKEFLAGS) clean; \
+	fi; \
+	if test -e benchmarks/Makefile; then \
+	  echo "Making clean in benchmarks"; \
+	  cd benchmarks && $(MAKE) $(AM_MAKEFLAGS) clean; \
+	fi; \
+	if test -e objectivec/DevTools; then \
+	  echo "Cleaning any ObjC pyc files"; \
+	  rm -f objectivec/DevTools/*.pyc; \
 	fi
 
 pkgconfigdir = $(libdir)/pkgconfig
@@ -82,10 +90,13 @@ csharp_EXTRA_DIST=                                                           \
   csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj                \
   csharp/src/Google.Protobuf.Test/IssuesTest.cs                              \
   csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs                       \
+  csharp/src/Google.Protobuf.Test/JsonParserTest.cs                          \
+  csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs                       \
   csharp/src/Google.Protobuf.Test/Properties/AppManifest.xml                 \
   csharp/src/Google.Protobuf.Test/Properties/AssemblyInfo.cs                 \
   csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs              \
   csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs              \
+  csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs             \
   csharp/src/Google.Protobuf.Test/SampleEnum.cs                              \
   csharp/src/Google.Protobuf.Test/SampleMessages.cs                          \
   csharp/src/Google.Protobuf.Test/TestCornerCases.cs                         \
@@ -117,16 +128,20 @@ csharp_EXTRA_DIST=                                                           \
   csharp/src/Google.Protobuf/Google.Protobuf.nuspec                          \
   csharp/src/Google.Protobuf/IDeepCloneable.cs                               \
   csharp/src/Google.Protobuf/IMessage.cs                                     \
+  csharp/src/Google.Protobuf/InvalidJsonException.cs                         \
   csharp/src/Google.Protobuf/InvalidProtocolBufferException.cs               \
   csharp/src/Google.Protobuf/JsonFormatter.cs                                \
+  csharp/src/Google.Protobuf/JsonParser.cs                                   \
+  csharp/src/Google.Protobuf/JsonToken.cs                                    \
+  csharp/src/Google.Protobuf/JsonTokenizer.cs                                \
   csharp/src/Google.Protobuf/LimitedInputStream.cs                           \
   csharp/src/Google.Protobuf/MessageExtensions.cs                            \
   csharp/src/Google.Protobuf/MessageParser.cs                                \
-  csharp/src/Google.Protobuf/Preconditions.cs                                \
+  csharp/src/Google.Protobuf/ProtoPreconditions.cs                           \
   csharp/src/Google.Protobuf/Properties/AssemblyInfo.cs                      \
+  csharp/src/Google.Protobuf/Reflection/Descriptor.cs                        \
   csharp/src/Google.Protobuf/Reflection/DescriptorBase.cs                    \
   csharp/src/Google.Protobuf/Reflection/DescriptorPool.cs                    \
-  csharp/src/Google.Protobuf/Reflection/DescriptorProtoFile.cs               \
   csharp/src/Google.Protobuf/Reflection/DescriptorUtil.cs                    \
   csharp/src/Google.Protobuf/Reflection/DescriptorValidationException.cs     \
   csharp/src/Google.Protobuf/Reflection/EnumDescriptor.cs                    \
@@ -135,7 +150,7 @@ csharp_EXTRA_DIST=                                                           \
   csharp/src/Google.Protobuf/Reflection/FieldDescriptor.cs                   \
   csharp/src/Google.Protobuf/Reflection/FieldType.cs                         \
   csharp/src/Google.Protobuf/Reflection/FileDescriptor.cs                    \
-  csharp/src/Google.Protobuf/Reflection/GeneratedCodeInfo.cs                 \
+  csharp/src/Google.Protobuf/Reflection/GeneratedClrTypeInfo.cs              \
   csharp/src/Google.Protobuf/Reflection/IDescriptor.cs                       \
   csharp/src/Google.Protobuf/Reflection/IFieldAccessor.cs                    \
   csharp/src/Google.Protobuf/Reflection/MapFieldAccessor.cs                  \
@@ -143,12 +158,14 @@ csharp_EXTRA_DIST=                                                           \
   csharp/src/Google.Protobuf/Reflection/MethodDescriptor.cs                  \
   csharp/src/Google.Protobuf/Reflection/OneofAccessor.cs                     \
   csharp/src/Google.Protobuf/Reflection/OneofDescriptor.cs                   \
+  csharp/src/Google.Protobuf/Reflection/OriginalNameAttribute.cs             \
   csharp/src/Google.Protobuf/Reflection/PackageDescriptor.cs                 \
   csharp/src/Google.Protobuf/Reflection/PartialClasses.cs                    \
   csharp/src/Google.Protobuf/Reflection/ReflectionUtil.cs                    \
   csharp/src/Google.Protobuf/Reflection/RepeatedFieldAccessor.cs             \
   csharp/src/Google.Protobuf/Reflection/ServiceDescriptor.cs                 \
   csharp/src/Google.Protobuf/Reflection/SingleFieldAccessor.cs               \
+  csharp/src/Google.Protobuf/Reflection/TypeRegistry.cs                      \
   csharp/src/Google.Protobuf/WellKnownTypes/Any.cs                           \
   csharp/src/Google.Protobuf/WellKnownTypes/AnyPartial.cs                    \
   csharp/src/Google.Protobuf/WellKnownTypes/Api.cs                           \
@@ -162,157 +179,175 @@ csharp_EXTRA_DIST=                                                           \
   csharp/src/Google.Protobuf/WellKnownTypes/Timestamp.cs                     \
   csharp/src/Google.Protobuf/WellKnownTypes/TimestampPartial.cs              \
   csharp/src/Google.Protobuf/WellKnownTypes/Type.cs                          \
+  csharp/src/Google.Protobuf/WellKnownTypes/ValuePartial.cs                  \
   csharp/src/Google.Protobuf/WellKnownTypes/Wrappers.cs                      \
+  csharp/src/Google.Protobuf/WellKnownTypes/WrappersPartial.cs               \
   csharp/src/Google.Protobuf/WireFormat.cs                                   \
+  csharp/src/Google.Protobuf/packages.config                                 \
   csharp/src/packages/repositories.config
 
-java_EXTRA_DIST=                                                             \
-  java/src/main/java/com/google/protobuf/AbstractMessage.java                \
-  java/src/main/java/com/google/protobuf/AbstractMessageLite.java            \
-  java/src/main/java/com/google/protobuf/AbstractParser.java                 \
-  java/src/main/java/com/google/protobuf/AbstractProtobufList.java           \
-  java/src/main/java/com/google/protobuf/BlockingRpcChannel.java             \
-  java/src/main/java/com/google/protobuf/BlockingService.java                \
-  java/src/main/java/com/google/protobuf/BooleanArrayList.java               \
-  java/src/main/java/com/google/protobuf/BoundedByteString.java              \
-  java/src/main/java/com/google/protobuf/ByteString.java                     \
-  java/src/main/java/com/google/protobuf/CodedInputStream.java               \
-  java/src/main/java/com/google/protobuf/CodedOutputStream.java              \
-  java/src/main/java/com/google/protobuf/Descriptors.java                    \
-  java/src/main/java/com/google/protobuf/DoubleArrayList.java                \
-  java/src/main/java/com/google/protobuf/DynamicMessage.java                 \
-  java/src/main/java/com/google/protobuf/Extension.java                      \
-  java/src/main/java/com/google/protobuf/ExtensionLite.java                  \
-  java/src/main/java/com/google/protobuf/ExtensionRegistry.java              \
-  java/src/main/java/com/google/protobuf/ExtensionRegistryLite.java          \
-  java/src/main/java/com/google/protobuf/FieldSet.java                       \
-  java/src/main/java/com/google/protobuf/FloatArrayList.java                 \
-  java/src/main/java/com/google/protobuf/GeneratedMessage.java               \
-  java/src/main/java/com/google/protobuf/GeneratedMessageLite.java           \
-  java/src/main/java/com/google/protobuf/IntArrayList.java                   \
-  java/src/main/java/com/google/protobuf/Internal.java                       \
-  java/src/main/java/com/google/protobuf/InvalidProtocolBufferException.java \
-  java/src/main/java/com/google/protobuf/LazyField.java                      \
-  java/src/main/java/com/google/protobuf/LazyFieldLite.java                  \
-  java/src/main/java/com/google/protobuf/LazyStringArrayList.java            \
-  java/src/main/java/com/google/protobuf/LazyStringList.java                 \
-  java/src/main/java/com/google/protobuf/LiteralByteString.java              \
-  java/src/main/java/com/google/protobuf/LongArrayList.java                  \
-  java/src/main/java/com/google/protobuf/MapEntry.java                       \
-  java/src/main/java/com/google/protobuf/MapEntryLite.java                   \
-  java/src/main/java/com/google/protobuf/MapField.java                       \
-  java/src/main/java/com/google/protobuf/MapFieldLite.java                   \
-  java/src/main/java/com/google/protobuf/Message.java                        \
-  java/src/main/java/com/google/protobuf/MessageLite.java                    \
-  java/src/main/java/com/google/protobuf/MessageLiteOrBuilder.java           \
-  java/src/main/java/com/google/protobuf/MessageOrBuilder.java               \
-  java/src/main/java/com/google/protobuf/MessageReflection.java              \
-  java/src/main/java/com/google/protobuf/MutabilityOracle.java               \
-  java/src/main/java/com/google/protobuf/Parser.java                         \
-  java/src/main/java/com/google/protobuf/ProtobufArrayList.java              \
-  java/src/main/java/com/google/protobuf/ProtocolMessageEnum.java            \
-  java/src/main/java/com/google/protobuf/ProtocolStringList.java             \
-  java/src/main/java/com/google/protobuf/RepeatedFieldBuilder.java           \
-  java/src/main/java/com/google/protobuf/RopeByteString.java                 \
-  java/src/main/java/com/google/protobuf/RpcCallback.java                    \
-  java/src/main/java/com/google/protobuf/RpcChannel.java                     \
-  java/src/main/java/com/google/protobuf/RpcController.java                  \
-  java/src/main/java/com/google/protobuf/RpcUtil.java                        \
-  java/src/main/java/com/google/protobuf/Service.java                        \
-  java/src/main/java/com/google/protobuf/ServiceException.java               \
-  java/src/main/java/com/google/protobuf/SingleFieldBuilder.java             \
-  java/src/main/java/com/google/protobuf/SmallSortedMap.java                 \
-  java/src/main/java/com/google/protobuf/TextFormat.java                     \
-  java/src/main/java/com/google/protobuf/UninitializedMessageException.java  \
-  java/src/main/java/com/google/protobuf/UnknownFieldSet.java                \
-  java/src/main/java/com/google/protobuf/UnknownFieldSetLite.java            \
-  java/src/main/java/com/google/protobuf/UnmodifiableLazyStringList.java     \
-  java/src/main/java/com/google/protobuf/Utf8.java                           \
-  java/src/main/java/com/google/protobuf/WireFormat.java                     \
-  java/src/test/java/com/google/protobuf/AbstractMessageTest.java            \
-  java/src/test/java/com/google/protobuf/AnyTest.java                        \
-  java/src/test/java/com/google/protobuf/BooleanArrayListTest.java           \
-  java/src/test/java/com/google/protobuf/BoundedByteStringTest.java          \
-  java/src/test/java/com/google/protobuf/ByteStringTest.java                 \
-  java/src/test/java/com/google/protobuf/CheckUtf8Test.java                  \
-  java/src/test/java/com/google/protobuf/CodedInputStreamTest.java           \
-  java/src/test/java/com/google/protobuf/CodedOutputStreamTest.java          \
-  java/src/test/java/com/google/protobuf/DeprecatedFieldTest.java            \
-  java/src/test/java/com/google/protobuf/DescriptorsTest.java                \
-  java/src/test/java/com/google/protobuf/DoubleArrayListTest.java            \
-  java/src/test/java/com/google/protobuf/DynamicMessageTest.java             \
-  java/src/test/java/com/google/protobuf/FieldPresenceTest.java              \
-  java/src/test/java/com/google/protobuf/FloatArrayListTest.java             \
-  java/src/test/java/com/google/protobuf/ForceFieldBuildersPreRun.java       \
-  java/src/test/java/com/google/protobuf/GeneratedMessageTest.java           \
-  java/src/test/java/com/google/protobuf/IntArrayListTest.java               \
-  java/src/test/java/com/google/protobuf/IsValidUtf8Test.java                \
-  java/src/test/java/com/google/protobuf/IsValidUtf8TestUtil.java            \
-  java/src/test/java/com/google/protobuf/LazyFieldLiteTest.java              \
-  java/src/test/java/com/google/protobuf/LazyFieldTest.java                  \
-  java/src/test/java/com/google/protobuf/LazyMessageLiteTest.java            \
-  java/src/test/java/com/google/protobuf/LazyStringArrayListTest.java        \
-  java/src/test/java/com/google/protobuf/LazyStringEndToEndTest.java         \
-  java/src/test/java/com/google/protobuf/LiteEqualsAndHashTest.java          \
-  java/src/test/java/com/google/protobuf/LiteTest.java                       \
-  java/src/test/java/com/google/protobuf/LiteralByteStringTest.java          \
-  java/src/test/java/com/google/protobuf/LongArrayListTest.java              \
-  java/src/test/java/com/google/protobuf/MapForProto2LiteTest.java           \
-  java/src/test/java/com/google/protobuf/MapForProto2Test.java               \
-  java/src/test/java/com/google/protobuf/MapTest.java                        \
-  java/src/test/java/com/google/protobuf/MessageTest.java                    \
-  java/src/test/java/com/google/protobuf/NestedBuildersTest.java             \
-  java/src/test/java/com/google/protobuf/ParserTest.java                     \
-  java/src/test/java/com/google/protobuf/ProtobufArrayListTest.java          \
-  java/src/test/java/com/google/protobuf/RepeatedFieldBuilderTest.java       \
-  java/src/test/java/com/google/protobuf/RopeByteStringSubstringTest.java    \
-  java/src/test/java/com/google/protobuf/RopeByteStringTest.java             \
-  java/src/test/java/com/google/protobuf/ServiceTest.java                    \
-  java/src/test/java/com/google/protobuf/SingleFieldBuilderTest.java         \
-  java/src/test/java/com/google/protobuf/SmallSortedMapTest.java             \
-  java/src/test/java/com/google/protobuf/TestBadIdentifiers.java             \
-  java/src/test/java/com/google/protobuf/TestUtil.java                       \
-  java/src/test/java/com/google/protobuf/TextFormatTest.java                 \
-  java/src/test/java/com/google/protobuf/UnknownEnumValueTest.java           \
-  java/src/test/java/com/google/protobuf/UnknownFieldSetLiteTest.java        \
-  java/src/test/java/com/google/protobuf/UnknownFieldSetTest.java            \
-  java/src/test/java/com/google/protobuf/UnmodifiableLazyStringListTest.java \
-  java/src/test/java/com/google/protobuf/WellKnownTypesTest.java             \
-  java/src/test/java/com/google/protobuf/WireFormatTest.java                 \
-  java/src/test/java/com/google/protobuf/any_test.proto                      \
-  java/src/test/java/com/google/protobuf/field_presence_test.proto           \
-  java/src/test/java/com/google/protobuf/lazy_fields_lite.proto              \
-  java/src/test/java/com/google/protobuf/lite_equals_and_hash.proto          \
-  java/src/test/java/com/google/protobuf/map_for_proto2_lite_test.proto      \
-  java/src/test/java/com/google/protobuf/map_for_proto2_test.proto           \
-  java/src/test/java/com/google/protobuf/map_initialization_order_test.proto \
-  java/src/test/java/com/google/protobuf/map_test.proto                      \
-  java/src/test/java/com/google/protobuf/multiple_files_test.proto           \
-  java/src/test/java/com/google/protobuf/nested_builders_test.proto          \
-  java/src/test/java/com/google/protobuf/nested_extension.proto              \
-  java/src/test/java/com/google/protobuf/nested_extension_lite.proto         \
-  java/src/test/java/com/google/protobuf/non_nested_extension.proto          \
-  java/src/test/java/com/google/protobuf/non_nested_extension_lite.proto     \
-  java/src/test/java/com/google/protobuf/outer_class_name_test.proto         \
-  java/src/test/java/com/google/protobuf/outer_class_name_test2.proto        \
-  java/src/test/java/com/google/protobuf/outer_class_name_test3.proto        \
-  java/src/test/java/com/google/protobuf/test_bad_identifiers.proto          \
-  java/src/test/java/com/google/protobuf/test_check_utf8.proto               \
-  java/src/test/java/com/google/protobuf/test_check_utf8_size.proto          \
-  java/src/test/java/com/google/protobuf/test_custom_options.proto           \
-  java/src/test/java/com/google/protobuf/test_extra_interfaces.proto         \
-  java/util/pom.xml                                                          \
-  java/util/src/main/java/com/google/protobuf/util/FieldMaskTree.java        \
-  java/util/src/main/java/com/google/protobuf/util/FieldMaskUtil.java        \
-  java/util/src/main/java/com/google/protobuf/util/JsonFormat.java           \
-  java/util/src/main/java/com/google/protobuf/util/TimeUtil.java             \
-  java/util/src/test/java/com/google/protobuf/util/FieldMaskTreeTest.java    \
-  java/util/src/test/java/com/google/protobuf/util/FieldMaskUtilTest.java    \
-  java/util/src/test/java/com/google/protobuf/util/JsonFormatTest.java       \
-  java/util/src/test/java/com/google/protobuf/util/TimeUtilTest.java         \
-  java/util/src/test/java/com/google/protobuf/util/json_test.proto           \
-  java/pom.xml                                                               \
-  java/README.md
+java_EXTRA_DIST=                                                                   \
+  java/README.md                                                                   \
+  java/core/generate-sources-build.xml                                             \
+  java/core/generate-test-sources-build.xml                                        \
+  java/core/pom.xml                                                                \
+  java/core/src/main/java/com/google/protobuf/AbstractMessage.java                 \
+  java/core/src/main/java/com/google/protobuf/AbstractMessageLite.java             \
+  java/core/src/main/java/com/google/protobuf/AbstractParser.java                  \
+  java/core/src/main/java/com/google/protobuf/AbstractProtobufList.java            \
+  java/core/src/main/java/com/google/protobuf/BlockingRpcChannel.java              \
+  java/core/src/main/java/com/google/protobuf/BlockingService.java                 \
+  java/core/src/main/java/com/google/protobuf/BooleanArrayList.java                \
+  java/core/src/main/java/com/google/protobuf/ByteBufferWriter.java                \
+  java/core/src/main/java/com/google/protobuf/ByteOutput.java                      \
+  java/core/src/main/java/com/google/protobuf/ByteString.java                      \
+  java/core/src/main/java/com/google/protobuf/CodedInputStream.java                \
+  java/core/src/main/java/com/google/protobuf/CodedOutputStream.java               \
+  java/core/src/main/java/com/google/protobuf/Descriptors.java                     \
+  java/core/src/main/java/com/google/protobuf/DoubleArrayList.java                 \
+  java/core/src/main/java/com/google/protobuf/DynamicMessage.java                  \
+  java/core/src/main/java/com/google/protobuf/Extension.java                       \
+  java/core/src/main/java/com/google/protobuf/ExtensionLite.java                   \
+  java/core/src/main/java/com/google/protobuf/ExtensionRegistry.java               \
+  java/core/src/main/java/com/google/protobuf/ExtensionRegistryLite.java           \
+  java/core/src/main/java/com/google/protobuf/FieldSet.java                        \
+  java/core/src/main/java/com/google/protobuf/FloatArrayList.java                  \
+  java/core/src/main/java/com/google/protobuf/GeneratedMessage.java                \
+  java/core/src/main/java/com/google/protobuf/GeneratedMessageLite.java            \
+  java/core/src/main/java/com/google/protobuf/IntArrayList.java                    \
+  java/core/src/main/java/com/google/protobuf/Internal.java                        \
+  java/core/src/main/java/com/google/protobuf/InvalidProtocolBufferException.java  \
+  java/core/src/main/java/com/google/protobuf/LazyField.java                       \
+  java/core/src/main/java/com/google/protobuf/LazyFieldLite.java                   \
+  java/core/src/main/java/com/google/protobuf/LazyStringArrayList.java             \
+  java/core/src/main/java/com/google/protobuf/LazyStringList.java                  \
+  java/core/src/main/java/com/google/protobuf/LongArrayList.java                   \
+  java/core/src/main/java/com/google/protobuf/MapEntry.java                        \
+  java/core/src/main/java/com/google/protobuf/MapEntryLite.java                    \
+  java/core/src/main/java/com/google/protobuf/MapField.java                        \
+  java/core/src/main/java/com/google/protobuf/MapFieldLite.java                    \
+  java/core/src/main/java/com/google/protobuf/Message.java                         \
+  java/core/src/main/java/com/google/protobuf/MessageLite.java                     \
+  java/core/src/main/java/com/google/protobuf/MessageLiteOrBuilder.java            \
+  java/core/src/main/java/com/google/protobuf/MessageLiteToString.java             \
+  java/core/src/main/java/com/google/protobuf/MessageOrBuilder.java                \
+  java/core/src/main/java/com/google/protobuf/MessageReflection.java               \
+  java/core/src/main/java/com/google/protobuf/MutabilityOracle.java                \
+  java/core/src/main/java/com/google/protobuf/NioByteString.java                   \
+  java/core/src/main/java/com/google/protobuf/Parser.java                          \
+  java/core/src/main/java/com/google/protobuf/ProtobufArrayList.java               \
+  java/core/src/main/java/com/google/protobuf/ProtocolMessageEnum.java             \
+  java/core/src/main/java/com/google/protobuf/ProtocolStringList.java              \
+  java/core/src/main/java/com/google/protobuf/RepeatedFieldBuilder.java            \
+  java/core/src/main/java/com/google/protobuf/RopeByteString.java                  \
+  java/core/src/main/java/com/google/protobuf/RpcCallback.java                     \
+  java/core/src/main/java/com/google/protobuf/RpcChannel.java                      \
+  java/core/src/main/java/com/google/protobuf/RpcController.java                   \
+  java/core/src/main/java/com/google/protobuf/RpcUtil.java                         \
+  java/core/src/main/java/com/google/protobuf/Service.java                         \
+  java/core/src/main/java/com/google/protobuf/ServiceException.java                \
+  java/core/src/main/java/com/google/protobuf/SingleFieldBuilder.java              \
+  java/core/src/main/java/com/google/protobuf/SmallSortedMap.java                  \
+  java/core/src/main/java/com/google/protobuf/TextFormat.java                      \
+  java/core/src/main/java/com/google/protobuf/TextFormatEscaper.java               \
+  java/core/src/main/java/com/google/protobuf/TextFormatParseInfoTree.java         \
+  java/core/src/main/java/com/google/protobuf/TextFormatParseLocation.java         \
+  java/core/src/main/java/com/google/protobuf/UninitializedMessageException.java   \
+  java/core/src/main/java/com/google/protobuf/UnknownFieldSet.java                 \
+  java/core/src/main/java/com/google/protobuf/UnknownFieldSetLite.java             \
+  java/core/src/main/java/com/google/protobuf/UnmodifiableLazyStringList.java      \
+  java/core/src/main/java/com/google/protobuf/UnsafeByteOperations.java            \
+  java/core/src/main/java/com/google/protobuf/Utf8.java                            \
+  java/core/src/main/java/com/google/protobuf/WireFormat.java                      \
+  java/core/src/test/java/com/google/protobuf/AbstractMessageTest.java             \
+  java/core/src/test/java/com/google/protobuf/AnyTest.java                         \
+  java/core/src/test/java/com/google/protobuf/BooleanArrayListTest.java            \
+  java/core/src/test/java/com/google/protobuf/BoundedByteStringTest.java           \
+  java/core/src/test/java/com/google/protobuf/ByteBufferWriterTest.java            \
+  java/core/src/test/java/com/google/protobuf/ByteStringTest.java                  \
+  java/core/src/test/java/com/google/protobuf/CheckUtf8Test.java                   \
+  java/core/src/test/java/com/google/protobuf/CodedInputStreamTest.java            \
+  java/core/src/test/java/com/google/protobuf/CodedOutputStreamTest.java           \
+  java/core/src/test/java/com/google/protobuf/DeprecatedFieldTest.java             \
+  java/core/src/test/java/com/google/protobuf/DescriptorsTest.java                 \
+  java/core/src/test/java/com/google/protobuf/DoubleArrayListTest.java             \
+  java/core/src/test/java/com/google/protobuf/DynamicMessageTest.java              \
+  java/core/src/test/java/com/google/protobuf/EnumTest.java                        \
+  java/core/src/test/java/com/google/protobuf/FieldPresenceTest.java               \
+  java/core/src/test/java/com/google/protobuf/FloatArrayListTest.java              \
+  java/core/src/test/java/com/google/protobuf/ForceFieldBuildersPreRun.java        \
+  java/core/src/test/java/com/google/protobuf/GeneratedMessageTest.java            \
+  java/core/src/test/java/com/google/protobuf/IntArrayListTest.java                \
+  java/core/src/test/java/com/google/protobuf/IsValidUtf8Test.java                 \
+  java/core/src/test/java/com/google/protobuf/IsValidUtf8TestUtil.java             \
+  java/core/src/test/java/com/google/protobuf/LazyFieldLiteTest.java               \
+  java/core/src/test/java/com/google/protobuf/LazyFieldTest.java                   \
+  java/core/src/test/java/com/google/protobuf/LazyMessageLiteTest.java             \
+  java/core/src/test/java/com/google/protobuf/LazyStringArrayListTest.java         \
+  java/core/src/test/java/com/google/protobuf/LazyStringEndToEndTest.java          \
+  java/core/src/test/java/com/google/protobuf/LiteEqualsAndHashTest.java           \
+  java/core/src/test/java/com/google/protobuf/LiteTest.java                        \
+  java/core/src/test/java/com/google/protobuf/LiteralByteStringTest.java           \
+  java/core/src/test/java/com/google/protobuf/LongArrayListTest.java               \
+  java/core/src/test/java/com/google/protobuf/MapForProto2LiteTest.java            \
+  java/core/src/test/java/com/google/protobuf/MapForProto2Test.java                \
+  java/core/src/test/java/com/google/protobuf/MapTest.java                         \
+  java/core/src/test/java/com/google/protobuf/MessageTest.java                     \
+  java/core/src/test/java/com/google/protobuf/NestedBuildersTest.java              \
+  java/core/src/test/java/com/google/protobuf/NioByteStringTest.java               \
+  java/core/src/test/java/com/google/protobuf/ParserTest.java                      \
+  java/core/src/test/java/com/google/protobuf/ProtobufArrayListTest.java           \
+  java/core/src/test/java/com/google/protobuf/RepeatedFieldBuilderTest.java        \
+  java/core/src/test/java/com/google/protobuf/RopeByteStringSubstringTest.java     \
+  java/core/src/test/java/com/google/protobuf/RopeByteStringTest.java              \
+  java/core/src/test/java/com/google/protobuf/ServiceTest.java                     \
+  java/core/src/test/java/com/google/protobuf/SingleFieldBuilderTest.java          \
+  java/core/src/test/java/com/google/protobuf/SmallSortedMapTest.java              \
+  java/core/src/test/java/com/google/protobuf/TestBadIdentifiers.java              \
+  java/core/src/test/java/com/google/protobuf/TestUtil.java                        \
+  java/core/src/test/java/com/google/protobuf/TextFormatParseInfoTreeTest.java     \
+  java/core/src/test/java/com/google/protobuf/TextFormatParseLocationTest.java     \
+  java/core/src/test/java/com/google/protobuf/TextFormatTest.java                  \
+  java/core/src/test/java/com/google/protobuf/UnknownEnumValueTest.java            \
+  java/core/src/test/java/com/google/protobuf/UnknownFieldSetLiteTest.java         \
+  java/core/src/test/java/com/google/protobuf/UnknownFieldSetTest.java             \
+  java/core/src/test/java/com/google/protobuf/UnmodifiableLazyStringListTest.java  \
+  java/core/src/test/java/com/google/protobuf/WellKnownTypesTest.java              \
+  java/core/src/test/java/com/google/protobuf/WireFormatTest.java                  \
+  java/core/src/test/proto/com/google/protobuf/any_test.proto                      \
+  java/core/src/test/proto/com/google/protobuf/field_presence_test.proto           \
+  java/core/src/test/proto/com/google/protobuf/lazy_fields_lite.proto              \
+  java/core/src/test/proto/com/google/protobuf/lite_equals_and_hash.proto          \
+  java/core/src/test/proto/com/google/protobuf/map_for_proto2_lite_test.proto      \
+  java/core/src/test/proto/com/google/protobuf/map_for_proto2_test.proto           \
+  java/core/src/test/proto/com/google/protobuf/map_initialization_order_test.proto \
+  java/core/src/test/proto/com/google/protobuf/map_test.proto                      \
+  java/core/src/test/proto/com/google/protobuf/multiple_files_test.proto           \
+  java/core/src/test/proto/com/google/protobuf/nested_builders_test.proto          \
+  java/core/src/test/proto/com/google/protobuf/nested_extension.proto              \
+  java/core/src/test/proto/com/google/protobuf/nested_extension_lite.proto         \
+  java/core/src/test/proto/com/google/protobuf/non_nested_extension.proto          \
+  java/core/src/test/proto/com/google/protobuf/non_nested_extension_lite.proto     \
+  java/core/src/test/proto/com/google/protobuf/outer_class_name_test.proto         \
+  java/core/src/test/proto/com/google/protobuf/outer_class_name_test2.proto        \
+  java/core/src/test/proto/com/google/protobuf/outer_class_name_test3.proto        \
+  java/core/src/test/proto/com/google/protobuf/test_bad_identifiers.proto          \
+  java/core/src/test/proto/com/google/protobuf/test_check_utf8.proto               \
+  java/core/src/test/proto/com/google/protobuf/test_check_utf8_size.proto          \
+  java/core/src/test/proto/com/google/protobuf/test_custom_options.proto           \
+  java/core/src/test/proto/com/google/protobuf/test_extra_interfaces.proto         \
+  java/lite/pom.xml                                                                \
+  java/pom.xml                                                                     \
+  java/util/pom.xml                                                                \
+  java/util/src/main/java/com/google/protobuf/util/FieldMaskTree.java              \
+  java/util/src/main/java/com/google/protobuf/util/FieldMaskUtil.java              \
+  java/util/src/main/java/com/google/protobuf/util/JsonFormat.java                 \
+  java/util/src/main/java/com/google/protobuf/util/TimeUtil.java                   \
+  java/util/src/test/java/com/google/protobuf/util/FieldMaskTreeTest.java          \
+  java/util/src/test/java/com/google/protobuf/util/FieldMaskUtilTest.java          \
+  java/util/src/test/java/com/google/protobuf/util/JsonFormatTest.java             \
+  java/util/src/test/java/com/google/protobuf/util/TimeUtilTest.java               \
+  java/util/src/test/proto/com/google/protobuf/util/json_test.proto
 
 javanano_EXTRA_DIST=                                                                      \
   javanano/src/main/java/com/google/protobuf/nano/CodedOutputByteBufferNano.java          \
@@ -359,13 +394,11 @@ objectivec_EXTRA_DIST=                                                       \
   objectivec/DevTools/full_mac_build.sh                                      \
   objectivec/DevTools/pddm.py                                                \
   objectivec/DevTools/pddm_tests.py                                          \
-  objectivec/generate_descriptors_proto.sh                                   \
+  objectivec/generate_well_known_types.sh                                    \
   objectivec/google/protobuf/Any.pbobjc.h                                    \
   objectivec/google/protobuf/Any.pbobjc.m                                    \
   objectivec/google/protobuf/Api.pbobjc.h                                    \
   objectivec/google/protobuf/Api.pbobjc.m                                    \
-  objectivec/google/protobuf/Descriptor.pbobjc.h                             \
-  objectivec/google/protobuf/Descriptor.pbobjc.m                             \
   objectivec/google/protobuf/Duration.pbobjc.h                               \
   objectivec/google/protobuf/Duration.pbobjc.m                               \
   objectivec/google/protobuf/Empty.pbobjc.h                                  \
@@ -391,6 +424,7 @@ objectivec_EXTRA_DIST=                                                       \
   objectivec/GPBCodedInputStream_PackagePrivate.h                            \
   objectivec/GPBCodedOutputStream.h                                          \
   objectivec/GPBCodedOutputStream.m                                          \
+  objectivec/GPBCodedOutputStream_PackagePrivate.h                           \
   objectivec/GPBDescriptor.h                                                 \
   objectivec/GPBDescriptor.m                                                 \
   objectivec/GPBDescriptor_PackagePrivate.h                                  \
@@ -458,7 +492,6 @@ objectivec_EXTRA_DIST=                                                       \
   objectivec/Tests/GPBMessageTests.m                                         \
   objectivec/Tests/GPBObjectiveCPlusPlusTest.mm                              \
   objectivec/Tests/GPBPerfTests.m                                            \
-  objectivec/Tests/GPBStringTests.m                                          \
   objectivec/Tests/GPBSwiftTests.swift                                       \
   objectivec/Tests/GPBTestUtilities.h                                        \
   objectivec/Tests/GPBTestUtilities.m                                        \
@@ -494,6 +527,15 @@ objectivec_EXTRA_DIST=                                                       \
 
 python_EXTRA_DIST=                                                           \
   python/MANIFEST.in                                                         \
+  python/google/__init__.py                                                  \
+  python/google/protobuf/__init__.py                                         \
+  python/google/protobuf/descriptor.py                                       \
+  python/google/protobuf/descriptor_database.py                              \
+  python/google/protobuf/descriptor_pool.py                                  \
+  python/google/protobuf/internal/__init__.py                                \
+  python/google/protobuf/internal/_parameterized.py                          \
+  python/google/protobuf/internal/any_test.proto                             \
+  python/google/protobuf/internal/any_test.proto                             \
   python/google/protobuf/internal/api_implementation.cc                      \
   python/google/protobuf/internal/api_implementation.py                      \
   python/google/protobuf/internal/containers.py                              \
@@ -508,17 +550,19 @@ python_EXTRA_DIST=                                                           \
   python/google/protobuf/internal/factory_test1.proto                        \
   python/google/protobuf/internal/factory_test2.proto                        \
   python/google/protobuf/internal/generator_test.py                          \
+  python/google/protobuf/internal/import_test_package/__init__.py            \
+  python/google/protobuf/internal/import_test_package/inner.proto            \
+  python/google/protobuf/internal/import_test_package/outer.proto            \
   python/google/protobuf/internal/json_format_test.py                        \
   python/google/protobuf/internal/message_factory_test.py                    \
   python/google/protobuf/internal/message_listener.py                        \
   python/google/protobuf/internal/message_set_extensions.proto               \
   python/google/protobuf/internal/message_test.py                            \
   python/google/protobuf/internal/missing_enum_values.proto                  \
-  python/google/protobuf/internal/more_extensions_dynamic.proto              \
   python/google/protobuf/internal/more_extensions.proto                      \
+  python/google/protobuf/internal/more_extensions_dynamic.proto              \
   python/google/protobuf/internal/more_messages.proto                        \
   python/google/protobuf/internal/packed_field_test.proto                    \
-  python/google/protobuf/internal/_parameterized.py                          \
   python/google/protobuf/internal/proto_builder_test.py                      \
   python/google/protobuf/internal/python_message.py                          \
   python/google/protobuf/internal/reflection_test.py                         \
@@ -530,52 +574,47 @@ python_EXTRA_DIST=                                                           \
   python/google/protobuf/internal/text_format_test.py                        \
   python/google/protobuf/internal/type_checkers.py                           \
   python/google/protobuf/internal/unknown_fields_test.py                     \
+  python/google/protobuf/internal/well_known_types.py                        \
+  python/google/protobuf/internal/well_known_types.py                        \
+  python/google/protobuf/internal/well_known_types_test.py                   \
+  python/google/protobuf/internal/well_known_types_test.py                   \
   python/google/protobuf/internal/wire_format.py                             \
   python/google/protobuf/internal/wire_format_test.py                        \
-  python/google/protobuf/internal/__init__.py                                \
-  python/google/protobuf/internal/import_test_package/__init__.py            \
-  python/google/protobuf/internal/import_test_package/inner.proto            \
-  python/google/protobuf/internal/import_test_package/outer.proto            \
+  python/google/protobuf/json_format.py                                      \
+  python/google/protobuf/message.py                                          \
+  python/google/protobuf/message_factory.py                                  \
+  python/google/protobuf/proto_builder.py                                    \
   python/google/protobuf/pyext/README                                        \
+  python/google/protobuf/pyext/__init__.py                                   \
   python/google/protobuf/pyext/cpp_message.py                                \
-  python/google/protobuf/pyext/descriptor.h                                  \
   python/google/protobuf/pyext/descriptor.cc                                 \
-  python/google/protobuf/pyext/descriptor_pool.h                             \
-  python/google/protobuf/pyext/descriptor_pool.cc                            \
-  python/google/protobuf/pyext/descriptor_containers.h                       \
+  python/google/protobuf/pyext/descriptor.h                                  \
   python/google/protobuf/pyext/descriptor_containers.cc                      \
-  python/google/protobuf/pyext/extension_dict.h                              \
+  python/google/protobuf/pyext/descriptor_containers.h                       \
+  python/google/protobuf/pyext/descriptor_database.cc                        \
+  python/google/protobuf/pyext/descriptor_database.h                         \
+  python/google/protobuf/pyext/descriptor_pool.cc                            \
+  python/google/protobuf/pyext/descriptor_pool.h                             \
   python/google/protobuf/pyext/extension_dict.cc                             \
-  python/google/protobuf/pyext/message.h                                     \
+  python/google/protobuf/pyext/extension_dict.h                              \
+  python/google/protobuf/pyext/map_container.cc                              \
+  python/google/protobuf/pyext/map_container.h                               \
   python/google/protobuf/pyext/message.cc                                    \
-  python/google/protobuf/pyext/message_map_container.cc                      \
-  python/google/protobuf/pyext/message_map_container.h                       \
+  python/google/protobuf/pyext/message.h                                     \
   python/google/protobuf/pyext/proto2_api_test.proto                         \
   python/google/protobuf/pyext/python.proto                                  \
   python/google/protobuf/pyext/python_protobuf.h                             \
-  python/google/protobuf/pyext/repeated_composite_container.h                \
   python/google/protobuf/pyext/repeated_composite_container.cc               \
-  python/google/protobuf/pyext/repeated_scalar_container.h                   \
+  python/google/protobuf/pyext/repeated_composite_container.h                \
   python/google/protobuf/pyext/repeated_scalar_container.cc                  \
-  python/google/protobuf/pyext/scalar_map_container.cc                       \
-  python/google/protobuf/pyext/scalar_map_container.h                        \
+  python/google/protobuf/pyext/repeated_scalar_container.h                   \
   python/google/protobuf/pyext/scoped_pyobject_ptr.h                         \
-  python/google/protobuf/pyext/__init__.py                                   \
-  python/google/protobuf/descriptor.py                                       \
-  python/google/protobuf/descriptor_database.py                              \
-  python/google/protobuf/descriptor_pool.py                                  \
-  python/google/protobuf/json_format.py                                      \
-  python/google/protobuf/message.py                                          \
-  python/google/protobuf/message_factory.py                                  \
-  python/google/protobuf/proto_builder.py                                    \
   python/google/protobuf/reflection.py                                       \
   python/google/protobuf/service.py                                          \
   python/google/protobuf/service_reflection.py                               \
   python/google/protobuf/symbol_database.py                                  \
   python/google/protobuf/text_encoding.py                                    \
   python/google/protobuf/text_format.py                                      \
-  python/google/protobuf/__init__.py                                         \
-  python/google/__init__.py                                                  \
   python/mox.py                                                              \
   python/setup.py                                                            \
   python/stubout.py                                                          \
@@ -626,17 +665,49 @@ ruby_EXTRA_DIST=                                                             \
   ruby/tests/repeated_field_test.rb                                          \
   ruby/tests/stress.rb                                                       \
   ruby/tests/generated_code.proto                                            \
-  ruby/tests/generated_code.rb                                               \
   ruby/tests/generated_code_test.rb                                          \
   ruby/travis-test.sh
 
-all_EXTRA_DIST=$(csharp_EXTRA_DIST) $(java_EXTRA_DIST) $(javanano_EXTRA_DIST) $(objectivec_EXTRA_DIST) $(python_EXTRA_DIST) $(ruby_EXTRA_DIST)
+js_EXTRA_DIST=              \
+  js/README.md              \
+  js/binary/arith.js        \
+  js/binary/arith_test.js   \
+  js/binary/constants.js    \
+  js/binary/decoder.js      \
+  js/binary/decoder_test.js \
+  js/binary/proto_test.js   \
+  js/binary/reader.js       \
+  js/binary/reader_test.js  \
+  js/binary/utils.js        \
+  js/binary/utils_test.js   \
+  js/binary/writer.js       \
+  js/binary/writer_test.js  \
+  js/data.proto             \
+  js/debug.js               \
+  js/debug_test.js          \
+  js/gulpfile.js            \
+  js/jasmine.json           \
+  js/message.js             \
+  js/message_test.js        \
+  js/node_loader.js         \
+  js/package.json           \
+  js/proto3_test.js         \
+  js/proto3_test.proto      \
+  js/test.proto             \
+  js/test2.proto            \
+  js/test3.proto            \
+  js/test4.proto            \
+  js/test5.proto            \
+  js/test_bootstrap.js      \
+  js/testbinary.proto       \
+  js/testempty.proto
+
+all_EXTRA_DIST=$(csharp_EXTRA_DIST) $(java_EXTRA_DIST) $(javanano_EXTRA_DIST) $(objectivec_EXTRA_DIST) $(python_EXTRA_DIST) $(ruby_EXTRA_DIST) $(js_EXTRA_DIST)
 
 EXTRA_DIST = $(@DIST_LANG@_EXTRA_DIST)   \
   autogen.sh                             \
   generate_descriptor_proto.sh           \
   README.md                              \
-  INSTALL.txt                            \
   LICENSE                                \
   CONTRIBUTORS.txt                       \
   CHANGES.txt                            \
@@ -663,15 +734,23 @@ EXTRA_DIST = $(@DIST_LANG@_EXTRA_DIST)   \
   examples/Makefile                      \
   examples/addressbook.proto             \
   examples/add_person.cc                 \
+  examples/add_person.go                 \
+  examples/add_person_test.go            \
   examples/list_people.cc                \
+  examples/list_people.go                \
   examples/AddPerson.java                \
   examples/ListPeople.java               \
   examples/add_person.py                 \
-  examples/list_people.py
+  examples/list_people.py                \
+  examples/list_people_test.go           \
+  protobuf.bzl                           \
+  six.BUILD                              \
+  util/python/BUILD
 
 # Deletes all the files generated by autogen.sh.
 MAINTAINERCLEANFILES =   \
   aclocal.m4             \
+  ar-lib                 \
   config.guess           \
   config.sub             \
   configure              \

+ 4 - 2
Protobuf.podspec

@@ -5,16 +5,18 @@
 # dependent projects use the :git notation to refer to the library.
 Pod::Spec.new do |s|
   s.name     = 'Protobuf'
-  s.version  = '3.0.0-alpha-4.1'
+  s.version  = '3.0.0-beta-2'
   s.summary  = 'Protocol Buffers v.3 runtime library for Objective-C.'
   s.homepage = 'https://github.com/google/protobuf'
   s.license  = 'New BSD'
   s.authors  = { 'The Protocol Buffers contributors' => 'protobuf@googlegroups.com' }
 
+  s.source = { :git => 'https://github.com/google/protobuf.git',
+               :tag => "v#{s.version}" }
+
   s.source_files = 'objectivec/*.{h,m}',
                    'objectivec/google/protobuf/Any.pbobjc.{h,m}',
                    'objectivec/google/protobuf/Api.pbobjc.{h,m}',
-                   'objectivec/google/protobuf/Descriptor.pbobjc.{h,m}',
                    'objectivec/google/protobuf/Duration.pbobjc.h',
                    'objectivec/google/protobuf/Empty.pbobjc.{h,m}',
                    'objectivec/google/protobuf/FieldMask.pbobjc.{h,m}',

+ 45 - 161
README.md

@@ -7,182 +7,66 @@ Copyright 2008 Google Inc.
 
 https://developers.google.com/protocol-buffers/
 
-C++ Installation - Unix
------------------------
+Overview
+--------
 
-If you get the source from github, you need to generate the configure script
-first:
+Protocol Buffers (a.k.a., protobuf) are Google's language-neutral,
+platform-neutral, extensible mechanism for serializing structured data. You
+can find [protobuf's documentation on the Google Developers site](https://developers.google.com/protocol-buffers/).
 
-    $ ./autogen.sh
+This README file contains protobuf installation instructions. To install
+protobuf, you need to install the protocol compiler (used to compile .proto
+files) and the protobuf runtime for your chosen programming language.
 
-This will download gmock source (which is used for C++ Protocol Buffer
-unit-tests) to the current directory and run automake, autoconf, etc.
-to generate the configure script and various template makefiles.
+Protocol Compiler Installation
+------------------------------
 
-You can skip this step if you are using a release package (which already
-contains gmock and the configure script).
+The protocol compiler is written in C++. If you are using C++, please follow
+the [C++ Installation Instructions](src/README.md) to install protoc along
+with the C++ runtime.
 
-To build and install the C++ Protocol Buffer runtime and the Protocol
-Buffer compiler (protoc) execute the following:
+For non-C++ users, the simplest way to install the protocol compiler is to
+download a pre-built binary from our release page:
 
-    $ ./configure
-    $ make
-    $ make check
-    $ make install
+  [https://github.com/google/protobuf/releases](https://github.com/google/protobuf/releases)
 
-If "make check" fails, you can still install, but it is likely that
-some features of this library will not work correctly on your system.
-Proceed at your own risk.
+In the downloads section of each release, you can find pre-built binaries in
+zip packages: protoc-$VERSION-$PLATFORM.zip. It contains the protoc binary
+as well as a set of standard .proto files distributed along with protobuf.
 
-"make install" may require superuser privileges.
+If you are looking for an old version that is not available in the release
+page, check out the maven repo here:
 
-For advanced usage information on configure and make, see INSTALL.txt.
+  [http://repo1.maven.org/maven2/com/google/protobuf/protoc/](http://repo1.maven.org/maven2/com/google/protobuf/protoc/)
 
-**Hint on install location**
+These pre-built binaries are only provided for released versions. If you want
+to use the github master version at HEAD, or you need to modify protobuf code,
+or you are using C++, it's recommended to build your own protoc binary from
+source.
 
-  By default, the package will be installed to /usr/local.  However,
-  on many platforms, /usr/local/lib is not part of LD_LIBRARY_PATH.
-  You can add it, but it may be easier to just install to /usr
-  instead.  To do this, invoke configure as follows:
+If you would like to build protoc binary from source, see the [C++ Installation
+Instructions](src/README.md).
 
-    ./configure --prefix=/usr
+Protobuf Runtime Installation
+-----------------------------
 
-  If you already built the package with a different prefix, make sure
-  to run "make clean" before building again.
+Protobuf supports several different programming languages. For each programming
+language, you can find instructions in the corresponding source directory about
+how to install protobuf runtime for that specific language:
 
-**Compiling dependent packages**
+| Language                             | Source                                                |
+|--------------------------------------|-------------------------------------------------------|
+| C++ (include C++ runtime and protoc) | [src](src)                                            |
+| Java                                 | [java](java)                                          |
+| Python                               | [python](python)                                      |
+| Objective-C                          | [objectivec](objectivec)                              |
+| C#                                   | [csharp](csharp)                                      |
+| JavaNano                             | [javanano](javanano)                                  |
+| JavaScript                           | [js](js)                                              |
+| Ruby                                 | [ruby](ruby)                                          |
+| Go                                   | [golang/protobuf](https://github.com/golang/protobuf) |
+| PHP                                  | TBD                                                   |
 
-  To compile a package that uses Protocol Buffers, you need to pass
-  various flags to your compiler and linker.  As of version 2.2.0,
-  Protocol Buffers integrates with pkg-config to manage this.  If you
-  have pkg-config installed, then you can invoke it to get a list of
-  flags like so:
-
-    pkg-config --cflags protobuf         # print compiler flags
-    pkg-config --libs protobuf           # print linker flags
-    pkg-config --cflags --libs protobuf  # print both
-
-  For example:
-
-    c++ my_program.cc my_proto.pb.cc `pkg-config --cflags --libs protobuf`
-
-  Note that packages written prior to the 2.2.0 release of Protocol
-  Buffers may not yet integrate with pkg-config to get flags, and may
-  not pass the correct set of flags to correctly link against
-  libprotobuf.  If the package in question uses autoconf, you can
-  often fix the problem by invoking its configure script like:
-
-    configure CXXFLAGS="$(pkg-config --cflags protobuf)" \
-              LIBS="$(pkg-config --libs protobuf)"
-
-  This will force it to use the correct flags.
-
-  If you are writing an autoconf-based package that uses Protocol
-  Buffers, you should probably use the PKG_CHECK_MODULES macro in your
-  configure script like:
-
-    PKG_CHECK_MODULES([protobuf], [protobuf])
-
-  See the pkg-config man page for more info.
-
-  If you only want protobuf-lite, substitute "protobuf-lite" in place
-  of "protobuf" in these examples.
-
-**Note for Mac users**
-
-  For a Mac system, Unix tools are not available by default. You will first need
-  to install Xcode from the Mac AppStore and then run the following command from
-  a terminal:
-
-    $ sudo xcode-select --install
-
-  To install Unix tools, you can install "port" following the instructions at
-  https://www.macports.org . This will reside in /opt/local/bin/port for most
-  Mac installations.
-
-    $ sudo /opt/local/bin/port install autoconf automake libtool
-
-  Then follow the Unix instructions above.
-
-**Note for cross-compiling**
-
-  The makefiles normally invoke the protoc executable that they just
-  built in order to build tests.  When cross-compiling, the protoc
-  executable may not be executable on the host machine.  In this case,
-  you must build a copy of protoc for the host machine first, then use
-  the --with-protoc option to tell configure to use it instead.  For
-  example:
-
-    ./configure --with-protoc=protoc
-
-  This will use the installed protoc (found in your $PATH) instead of
-  trying to execute the one built during the build process.  You can
-  also use an executable that hasn't been installed.  For example, if
-  you built the protobuf package for your host machine in ../host,
-  you might do:
-
-    ./configure --with-protoc=../host/src/protoc
-
-  Either way, you must make sure that the protoc executable you use
-  has the same version as the protobuf source code you are trying to
-  use it with.
-
-**Note for Solaris users**
-
-  Solaris 10 x86 has a bug that will make linking fail, complaining
-  about libstdc++.la being invalid.  We have included a work-around
-  in this package.  To use the work-around, run configure as follows:
-
-    ./configure LDFLAGS=-L$PWD/src/solaris
-
-  See src/solaris/libstdc++.la for more info on this bug.
-
-**Note for HP C++ Tru64 users**
-
-  To compile invoke configure as follows:
-
-    ./configure CXXFLAGS="-O -std ansi -ieee -D__USE_STD_IOSTREAM"
-
-  Also, you will need to use gmake instead of make.
-
-**Note for AIX users**
-
-  Compile using the IBM xlC C++ compiler as follows:
-
-    ./configure CXX=xlC
-
-  Also, you will need to use GNU `make` (`gmake`) instead of AIX `make`.
-
-C++ Installation - Windows
---------------------------
-
-If you are using Microsoft Visual C++, see cmake/README.md.
-
-If you are using Cygwin or MinGW, follow the Unix installation
-instructions, above.
-
-Binary Compatibility Warning
-----------------------------
-
-Due to the nature of C++, it is unlikely that any two versions of the
-Protocol Buffers C++ runtime libraries will have compatible ABIs.
-That is, if you linked an executable against an older version of
-libprotobuf, it is unlikely to work with a newer version without
-re-compiling.  This problem, when it occurs, will normally be detected
-immediately on startup of your app.  Still, you may want to consider
-using static linkage.  You can configure this package to install
-static libraries only using:
-
-    ./configure --disable-shared
-
-Java and Python Installation
-----------------------------
-
-The Java and Python runtime libraries for Protocol Buffers are located
-in the java and python directories.  See the README file in each
-directory for more information on how to compile and install them.
-Note that both of them require you to first install the Protocol
-Buffer compiler (protoc), which is part of the C++ package.
 
 Usage
 -----

+ 45 - 8
WORKSPACE

@@ -1,16 +1,53 @@
 new_http_archive(
-  name = "gmock_archive",
-  url = "https://googlemock.googlecode.com/files/gmock-1.7.0.zip",
-  sha256 = "26fcbb5925b74ad5fc8c26b0495dfc96353f4d553492eb97e85a8a6d2f43095b",
-  build_file = "gmock.BUILD",
+    name = "gmock_archive",
+    url = "https://googlemock.googlecode.com/files/gmock-1.7.0.zip",
+    sha256 = "26fcbb5925b74ad5fc8c26b0495dfc96353f4d553492eb97e85a8a6d2f43095b",
+    build_file = "gmock.BUILD",
+)
+
+new_http_archive(
+    name = "six_archive",
+    url = "https://pypi.python.org/packages/source/s/six/six-1.10.0.tar.gz#md5=34eed507548117b2ab523ab14b2f8b55",
+    sha256 = "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a",
+    build_file = "six.BUILD",
+)
+
+bind(
+    name = "python_headers",
+    actual = "//util/python:python_headers",
 )
 
 bind(
-  name = "gtest",
-  actual = "@gmock_archive//:gtest",
+    name = "gtest",
+    actual = "@gmock_archive//:gtest",
+)
+
+bind(
+    name = "gtest_main",
+    actual = "@gmock_archive//:gtest_main",
+)
+
+bind(
+    name = "six",
+    actual = "@six_archive//:six",
+)
+
+maven_jar(
+  name = "guava_maven",
+  artifact = "com.google.guava:guava:18.0",
+)
+
+bind(
+    name = "guava",
+    actual = "@guava_maven//jar",
+)
+
+maven_jar(
+  name = "gson_maven",
+  artifact = "com.google.code.gson:gson:2.3",
 )
 
 bind(
-  name = "gtest_main",
-  actual = "@gmock_archive//:gtest_main",
+    name = "gson",
+    actual = "@gson_maven//jar",
 )

+ 2 - 2
appveyor.bat

@@ -10,7 +10,7 @@ goto :error
 echo Building C++
 mkdir build_msvc
 cd build_msvc
-cmake -G "%generator%" -DBUILD_SHARED_LIBS=%BUILD_DLL% ../cmake
+cmake -G "%generator%" -Dprotobuf_BUILD_SHARED_LIBS=%BUILD_DLL% ../cmake
 msbuild protobuf.sln /p:Platform=%vcplatform% /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" || goto error
 cd %configuration%
 tests.exe || goto error
@@ -26,4 +26,4 @@ goto :EOF
 
 :error
 echo Failed!
-EXIT /b %ERRORLEVEL%
+EXIT /b %ERRORLEVEL%

+ 32 - 32
appveyor.yml

@@ -1,32 +1,32 @@
-# Only test one combination: "Visual Studio 12 + Win64 + Debug + DLL". We can
-# test more combinations but AppVeyor just takes too long to finish (each
-# combination takes ~15mins).
-platform:
-  - Win64
-
-configuration:
-  - Debug
-
-environment:
-  matrix:
-    - language: cpp
-      BUILD_DLL: ON
-
-    - language: csharp
-
-install:
-  - ps: Start-FileDownload https://googlemock.googlecode.com/files/gmock-1.7.0.zip
-  - 7z x gmock-1.7.0.zip
-  - rename gmock-1.7.0 gmock
-
-before_build:
-  - if %platform%==Win32 set generator=Visual Studio 12
-  - if %platform%==Win64 set generator=Visual Studio 12 Win64
-  - if %platform%==Win32 set vcplatform=Win32
-  - if %platform%==Win64 set vcplatform=x64
-
-build_script:
-  - CALL appveyor.bat
-
-skip_commits:
-  message: /.*\[skip appveyor\].*/
+# Only test one combination: "Visual Studio 12 + Win64 + Debug + DLL". We can
+# test more combinations but AppVeyor just takes too long to finish (each
+# combination takes ~15mins).
+platform:
+  - Win64
+
+configuration:
+  - Debug
+
+environment:
+  matrix:
+    - language: cpp
+      BUILD_DLL: ON
+
+    - language: csharp
+
+install:
+  - ps: Start-FileDownload https://googlemock.googlecode.com/files/gmock-1.7.0.zip
+  - 7z x gmock-1.7.0.zip
+  - rename gmock-1.7.0 gmock
+
+before_build:
+  - if %platform%==Win32 set generator=Visual Studio 12
+  - if %platform%==Win64 set generator=Visual Studio 12 Win64
+  - if %platform%==Win32 set vcplatform=Win32
+  - if %platform%==Win64 set vcplatform=x64
+
+build_script:
+  - CALL appveyor.bat
+
+skip_commits:
+  message: /.*\[skip appveyor\].*/

+ 13 - 1
autogen.sh

@@ -6,6 +6,18 @@
 
 set -e
 
+if [ ! -z "$@" ]; then
+  for argument in "$@"; do
+    case $argument in
+	  # make curl silent
+      "-s")
+        curlopts="-s"
+        ;;
+    esac
+  done
+fi
+
+
 # Check that we're being run from the right directory.
 if test ! -f src/google/protobuf/stubs/common.h; then
   cat >&2 << __EOF__
@@ -19,7 +31,7 @@ fi
 # directory is set up as an SVN external.
 if test ! -e gmock; then
   echo "Google Mock not present.  Fetching gmock-1.7.0 from the web..."
-  curl -O https://googlemock.googlecode.com/files/gmock-1.7.0.zip
+  curl $curlopts -O https://googlemock.googlecode.com/files/gmock-1.7.0.zip
   unzip -q gmock-1.7.0.zip
   rm gmock-1.7.0.zip
   mv gmock-1.7.0 gmock

+ 69 - 0
benchmarks/Makefile.am

@@ -0,0 +1,69 @@
+
+benchmarks_protoc_inputs =                                     \
+  benchmarks.proto                                             \
+  benchmark_messages_proto3.proto
+
+benchmarks_protoc_inputs_proto2 =                              \
+  benchmark_messages_proto2.proto
+
+benchmarks_protoc_outputs =                                    \
+  benchmarks.pb.cc                                             \
+  benchmarks.pb.h                                              \
+  benchmark_messages_proto3.pb.cc                              \
+  benchmark_messages_proto3.pb.h
+
+benchmarks_protoc_outputs_proto2 =                             \
+  benchmark_messages_proto2.pb.cc                              \
+  benchmark_messages_proto2.pb.h
+
+bin_PROGRAMS = generate-datasets
+
+generate_datasets_LDADD = $(top_srcdir)/src/libprotobuf.la
+generate_datasets_SOURCES = generate_datasets.cc
+generate_datasets_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)
+nodist_generate_datasets_SOURCES =                             \
+  $(benchmarks_protoc_outputs)                                 \
+  $(benchmarks_protoc_outputs_proto2)
+
+# Explicit deps because BUILT_SOURCES are only done before a "make all/check"
+# so a direct "make test_cpp" could fail if parallel enough.
+# See: https://www.gnu.org/software/automake/manual/html_node/Built-Sources-Example.html#Recording-Dependencies-manually
+generate_datasets-generate_datasets.$(OBJEXT): benchmarks.pb.h
+
+$(benchmarks_protoc_outputs): protoc_middleman
+$(benchmarks_protoc_outputs_proto2): protoc_middleman2
+
+CLEANFILES =                                                   \
+  $(benchmarks_protoc_outputs)                                 \
+  $(benchmarks_protoc_outputs_proto2)                          \
+  protoc_middleman                                             \
+  protoc_middleman2                                            \
+  dataset.*
+
+MAINTAINERCLEANFILES =   \
+  Makefile.in
+
+if USE_EXTERNAL_PROTOC
+
+protoc_middleman: $(benchmarks_protoc_inputs)
+	$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. $(benchmarks_protoc_inputs)
+	touch protoc_middleman
+
+protoc_middleman2: $(benchmarks_protoc_inputs_proto2)
+	$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. $(benchmarks_protoc_inputs_proto2)
+	touch protoc_middleman2
+
+else
+
+# We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
+# relative to srcdir, which may not be the same as the current directory when
+# building out-of-tree.
+protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs) $(well_known_type_protoc_inputs)
+	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd $(benchmarks_protoc_inputs) )
+	touch protoc_middleman
+
+protoc_middleman2: $(top_srcdir)/src/protoc$(EXEEXT) $(benchmarks_protoc_inputs_proto2) $(well_known_type_protoc_inputs)
+	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd $(benchmarks_protoc_inputs_proto2) )
+	touch protoc_middleman
+
+endif

+ 28 - 0
benchmarks/README.md

@@ -0,0 +1,28 @@
+
+# Protocol Buffers Benchmarks
+
+This directory contains benchmarking schemas and data sets that you
+can use to test a variety of performance scenarios against your
+protobuf language runtime.
+
+The schema for the datasets is described in `benchmarks.proto`.
+
+Generate the data sets like so:
+
+```
+$ make
+$ ./generate-datasets
+Wrote dataset: dataset.google_message1_proto3.pb
+Wrote dataset: dataset.google_message1_proto2.pb
+Wrote dataset: dataset.google_message2.pb
+$
+```
+
+Each data set will be written to its own file.  Benchmarks will
+likely want to run several benchmarks against each data set (parse,
+serialize, possibly JSON, possibly using different APIs, etc).
+
+We would like to add more data sets.  In general we will favor data sets
+that make the overall suite diverse without being too large or having
+too many similar tests.  Ideally everyone can run through the entire
+suite without the test run getting too long.

+ 11 - 8
benchmarks/google_speed.proto → benchmarks/benchmark_messages_proto2.proto

@@ -1,11 +1,14 @@
+// Benchmark messages for proto2.
+
 syntax = "proto2";
 
-package benchmarks;
+package benchmarks.proto2;
+option java_package = "com.google.protobuf.benchmarks";
 
-option java_outer_classname = "GoogleSpeed";
+// This is the default, but we specify it here explicitly.
 option optimize_for = SPEED;
 
-message SpeedMessage1 {
+message GoogleMessage1 {
   required string field1 = 1;
   optional string field9 = 9;
   optional string field18 = 18;
@@ -40,7 +43,7 @@ message SpeedMessage1 {
   optional int32 field23 = 23 [default=0];
   optional bool field24 = 24 [default=false];
   optional int32 field25 = 25 [default=0];
-  optional SpeedMessage1SubMessage field15 = 15;
+  optional GoogleMessage1SubMessage field15 = 15;
   optional bool field78 = 78;
   optional int32 field67 = 67 [default=0];
   optional int32 field68 = 68;
@@ -49,7 +52,7 @@ message SpeedMessage1 {
   optional int32 field131 = 131 [default=0];
 }
 
-message SpeedMessage1SubMessage {
+message GoogleMessage1SubMessage {
   optional int32 field1 = 1 [default=0];
   optional int32 field2 = 2 [default=0];
   optional int32 field3 = 3 [default=0];
@@ -72,7 +75,7 @@ message SpeedMessage1SubMessage {
   optional uint64 field300 = 300;
 }
 
-message SpeedMessage2 {
+message GoogleMessage2 {
   optional string field1 = 1;
   optional int64 field3 = 3;
   optional int64 field4 = 4;
@@ -112,7 +115,7 @@ message SpeedMessage2 {
     repeated int32 field73 = 73;
     optional int32 field20 = 20 [default=0];
     optional string field24 = 24;
-    optional SpeedMessage2GroupedMessage field31 = 31;
+    optional GoogleMessage2GroupedMessage field31 = 31;
   }
   repeated string field128 = 128;
   optional int64 field131 = 131;
@@ -123,7 +126,7 @@ message SpeedMessage2 {
   optional bool field206 = 206 [default=false];
 }
 
-message SpeedMessage2GroupedMessage {
+message GoogleMessage2GroupedMessage {
   optional float field1 = 1;
   optional float field2 = 2;
   optional float field3 = 3 [default=0.0];

+ 76 - 0
benchmarks/benchmark_messages_proto3.proto

@@ -0,0 +1,76 @@
+// Benchmark messages for proto3.
+
+syntax = "proto3";
+
+package benchmarks.proto3;
+option java_package = "com.google.protobuf.benchmarks";
+
+// This is the default, but we specify it here explicitly.
+option optimize_for = SPEED;
+
+message GoogleMessage1 {
+  string field1 = 1;
+  string field9 = 9;
+  string field18 = 18;
+  bool field80 = 80;
+  bool field81 = 81;
+  int32 field2 = 2;
+  int32 field3 = 3;
+  int32 field280 = 280;
+  int32 field6 = 6;
+  int64 field22 = 22;
+  string field4 = 4;
+  repeated fixed64 field5 = 5;
+  bool field59 = 59;
+  string field7 = 7;
+  int32 field16 = 16;
+  int32 field130 = 130;
+  bool field12 = 12;
+  bool field17 = 17;
+  bool field13 = 13;
+  bool field14 = 14;
+  int32 field104 = 104;
+  int32 field100 = 100;
+  int32 field101 = 101;
+  string field102 = 102;
+  string field103 = 103;
+  int32 field29 = 29;
+  bool field30 = 30;
+  int32 field60 = 60;
+  int32 field271 = 271;
+  int32 field272 = 272;
+  int32 field150 = 150;
+  int32 field23 = 23;
+  bool field24 = 24;
+  int32 field25 = 25;
+  GoogleMessage1SubMessage field15 = 15;
+  bool field78 = 78;
+  int32 field67 = 67;
+  int32 field68 = 68;
+  int32 field128 = 128;
+  string field129 = 129;
+  int32 field131 = 131;
+}
+
+message GoogleMessage1SubMessage {
+  int32 field1 = 1;
+  int32 field2 = 2;
+  int32 field3 = 3;
+  string field15 = 15;
+  bool field12 = 12;
+  int64 field13 = 13;
+  int64 field14 = 14;
+  int32 field16 = 16;
+  int32 field19 = 19;
+  bool field20  = 20;
+  bool field28 = 28;
+  fixed64 field21 = 21;
+  int32 field22 = 22;
+  bool field23 = 23;
+  bool field206 = 206;
+  fixed32 field203 = 203;
+  int32 field204 = 204;
+  string field205 = 205;
+  uint64 field207 = 207;
+  uint64 field300 = 300;
+}

+ 63 - 0
benchmarks/benchmarks.proto

@@ -0,0 +1,63 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+syntax = "proto3";
+package benchmarks;
+option java_package = "com.google.protobuf.benchmarks";
+
+message BenchmarkDataset {
+  // Name of the benchmark dataset.  This should be unique across all datasets.
+  // Should only contain word characters: [a-zA-Z0-9_]
+  string name = 1;
+
+  // Fully-qualified name of the protobuf message for this dataset.
+  // It will be one of the messages defined benchmark_messages_proto2.proto
+  // or benchmark_messages_proto3.proto.
+  //
+  // Implementations that do not support reflection can implement this with
+  // an explicit "if/else" chain that lists every known message defined
+  // in those files.
+  string message_name = 2;
+
+  // The payload(s) for this dataset.  They should be parsed or serialized
+  // in sequence, in a loop, ie.
+  //
+  //  while (!benchmarkDone) {  // Benchmark runner decides when to exit.
+  //    for (i = 0; i < benchmark.payload.length; i++) {
+  //      parse(benchmark.payload[i])
+  //    }
+  //  }
+  //
+  // This is intended to let datasets include a variety of data to provide
+  // potentially more realistic results than just parsing the same message
+  // over and over.  A single message parsed repeatedly could yield unusually
+  // good branch prediction performance.
+  repeated bytes payload = 3;
+}

+ 117 - 0
benchmarks/generate_datasets.cc

@@ -0,0 +1,117 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#include <fstream>
+#include <iostream>
+#include "benchmarks.pb.h"
+
+using benchmarks::BenchmarkDataset;
+using google::protobuf::Descriptor;
+using google::protobuf::DescriptorPool;
+using google::protobuf::Message;
+using google::protobuf::MessageFactory;
+
+std::set<std::string> names;
+
+const char *file_prefix = "dataset.";
+const char *file_suffix = ".pb";
+
+void WriteFileWithPayloads(const std::string& name,
+                           const std::string& message_name,
+                           const std::vector<std::string>& payload) {
+  if (!names.insert(name).second) {
+    std::cerr << "Duplicate test name: " << name << "\n";
+    abort();
+  }
+
+  // First verify that this message name exists in our set of benchmark messages
+  // and that these payloads are valid for the given message.
+  const Descriptor* d =
+      DescriptorPool::generated_pool()->FindMessageTypeByName(message_name);
+
+  if (!d) {
+    std::cerr << "For dataset " << name << ", no such message: "
+              << message_name << "\n";
+    abort();
+  }
+
+  Message* m = MessageFactory::generated_factory()->GetPrototype(d)->New();
+
+  for (size_t i = 0; i < payload.size(); i++) {
+    if (!m->ParseFromString(payload[i])) {
+      std::cerr << "For dataset " << name << ", payload[" << i << "] fails "
+                << "to parse\n";
+      abort();
+    }
+  }
+
+  BenchmarkDataset dataset;
+  dataset.set_name(name);
+  dataset.set_message_name(message_name);
+  for (size_t i = 0; i < payload.size(); i++) {
+    dataset.add_payload()->assign(payload[i]);
+  }
+
+  std::ofstream writer;
+  std::string fname = file_prefix + name + file_suffix;
+  writer.open(fname.c_str());
+  dataset.SerializeToOstream(&writer);
+  writer.close();
+
+  std::cerr << "Wrote dataset: " << fname << "\n";
+}
+
+void WriteFile(const std::string& name, const std::string& message_name,
+               const std::string& payload) {
+  std::vector<std::string> payloads;
+  payloads.push_back(payload);
+  WriteFileWithPayloads(name, message_name, payloads);
+}
+
+std::string ReadFile(const std::string& name) {
+  std::ifstream file(name.c_str());
+  GOOGLE_CHECK(file.is_open()) << "Couldn't find file '" << name <<
+                                  "', please make sure you are running "
+                                  "this command from the benchmarks/ "
+                                  "directory.\n";
+  return std::string((std::istreambuf_iterator<char>(file)),
+                     std::istreambuf_iterator<char>());
+}
+
+int main() {
+  WriteFile("google_message1_proto3", "benchmarks.proto3.GoogleMessage1",
+            ReadFile("google_message1.dat"));
+  WriteFile("google_message1_proto2", "benchmarks.proto2.GoogleMessage1",
+            ReadFile("google_message1.dat"));
+
+  // Not in proto3 because it has a group, which is not supported.
+  WriteFile("google_message2", "benchmarks.proto2.GoogleMessage2",
+            ReadFile("google_message2.dat"));
+}

+ 72 - 42
cmake/CMakeLists.txt

@@ -4,39 +4,65 @@ cmake_minimum_required(VERSION 2.8)
 # Project
 project(protobuf C CXX)
 
+# CMake policies
+cmake_policy(SET CMP0022 NEW)
+
 # Options
-option(BUILD_TESTING "Build tests" ON)
-option(BUILD_SHARED_LIBS "Build Shared Libraries" OFF)
+option(protobuf_VERBOSE "Enable for verbose output" OFF)
+option(protobuf_BUILD_TESTS "Build tests" ON)
+if (BUILD_SHARED_LIBS)
+  set(protobuf_BUILD_SHARED_LIBS_DEFAULT ON)
+else (BUILD_SHARED_LIBS)
+  set(protobuf_BUILD_SHARED_LIBS_DEFAULT OFF)
+endif (BUILD_SHARED_LIBS)
+option(protobuf_BUILD_SHARED_LIBS "Build Shared Libraries" ${protobuf_BUILD_SHARED_LIBS_DEFAULT})
+option(protobuf_MSVC_STATIC_RUNTIME "Link static runtime libraries" ON)
 if (MSVC)
-  option(ZLIB "Build with zlib support" OFF)
+  set(protobuf_WITH_ZLIB_DEFAULT OFF)
+else (MSVC)
+  set(protobuf_WITH_ZLIB_DEFAULT ON)
 endif (MSVC)
+option(protobuf_WITH_ZLIB "Build with zlib support" ${protobuf_WITH_ZLIB_DEFAULT})
+set(protobuf_DEBUG_POSTFIX "d"
+  CACHE STRING "Default debug postfix")
 
 # Path to main configure script
 set(protobuf_CONFIGURE_SCRIPT "../configure.ac")
 
-# Parse version from configure script
-file(STRINGS "${protobuf_CONFIGURE_SCRIPT}" protobuf_VERSION_LINE
-  LIMIT_COUNT 1
-  REGEX "^AC_INIT")
-# Replace special characters
-string(REPLACE "(" "_" protobuf_VERSION_LINE ${protobuf_VERSION_LINE})
-string(REPLACE ")" "_" protobuf_VERSION_LINE ${protobuf_VERSION_LINE})
-string(REPLACE "[" "_" protobuf_VERSION_LINE ${protobuf_VERSION_LINE})
-string(REPLACE "]" "_" protobuf_VERSION_LINE ${protobuf_VERSION_LINE})
-# Parse version string
-string(REGEX REPLACE "^AC_INIT__Protocol Buffers_,_([^_]+).*$" "\\1"
-    protobuf_VERSION_STRING "${protobuf_VERSION_LINE}")
+# Parse configure script
+set(protobuf_AC_INIT_REGEX
+  "^AC_INIT\\(\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\],\\[([^]]+)\\]\\)$")
+file(STRINGS "${protobuf_CONFIGURE_SCRIPT}" protobuf_AC_INIT_LINE
+  LIMIT_COUNT 1 REGEX "^AC_INIT")
+# Description
+string(REGEX REPLACE        "${protobuf_AC_INIT_REGEX}" "\\1"
+    protobuf_DESCRIPTION    "${protobuf_AC_INIT_LINE}")
+# Version
+string(REGEX REPLACE        "${protobuf_AC_INIT_REGEX}" "\\2"
+    protobuf_VERSION_STRING "${protobuf_AC_INIT_LINE}")
+# Contact
+string(REGEX REPLACE        "${protobuf_AC_INIT_REGEX}" "\\3"
+    protobuf_CONTACT        "${protobuf_AC_INIT_LINE}")
 # Parse version tweaks
-string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+).*$" "\\1"
+set(protobuf_VERSION_REGEX "^([0-9]+)\\.([0-9]+)\\.([0-9]+).*$")
+string(REGEX REPLACE     "${protobuf_VERSION_REGEX}" "\\1"
   protobuf_VERSION_MAJOR "${protobuf_VERSION_STRING}")
-string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+).*$" "\\2"
+string(REGEX REPLACE     "${protobuf_VERSION_REGEX}" "\\2"
   protobuf_VERSION_MINOR "${protobuf_VERSION_STRING}")
-string(REGEX REPLACE "^([0-9]+)\\.([0-9]+)\\.([0-9]+).*$" "\\3"
+string(REGEX REPLACE     "${protobuf_VERSION_REGEX}" "\\3"
   protobuf_VERSION_PATCH "${protobuf_VERSION_STRING}")
 # Package version
 set(protobuf_VERSION
   "${protobuf_VERSION_MAJOR}.${protobuf_VERSION_MINOR}.${protobuf_VERSION_PATCH}")
 
+if(protobuf_VERBOSE)
+  message(STATUS "Configuration script parsing status [")
+  message(STATUS "  Description : ${protobuf_DESCRIPTION}")
+  message(STATUS "  Version     : ${protobuf_VERSION} (${protobuf_VERSION_STRING})")
+  message(STATUS "  Contact     : ${protobuf_CONTACT}")
+  message(STATUS "]")
+endif()
+
 add_definitions(-DGOOGLE_PROTOBUF_CMAKE_BUILD)
 
 find_package(Threads REQUIRED)
@@ -44,18 +70,17 @@ if (CMAKE_USE_PTHREADS_INIT)
   add_definitions(-DHAVE_PTHREAD)
 endif (CMAKE_USE_PTHREADS_INIT)
 
-if (MSVC)
-  if (ZLIB)
-    set(HAVE_ZLIB 1)
-    find_path(ZLIB_INCLUDE_DIRECTORIES zlib.h ${protobuf_SOURCE_DIR})
-    find_library(ZLIB_LIBRARIES zdll ${protobuf_SOURCE_DIR})
-  else (ZLIB)
-    set(HAVE_ZLIB 0)
-  endif (ZLIB)
-else (MSVC)
+if (protobuf_WITH_ZLIB)
   find_package(ZLIB)
   if (ZLIB_FOUND)
     set(HAVE_ZLIB 1)
+    # FindZLIB module define ZLIB_INCLUDE_DIRS variable
+    # Set ZLIB_INCLUDE_DIRECTORIES for compatible
+    set(ZLIB_INCLUDE_DIRECTORIES ${ZLIB_INCLUDE_DIRECTORIES} ${ZLIB_INCLUDE_DIRS})
+    # Using imported target if exists
+    if (TARGET ZLIB::ZLIB)
+      set(ZLIB_LIBRARIES ZLIB::ZLIB)
+    endif (TARGET ZLIB::ZLIB)
   else (ZLIB_FOUND)
     set(HAVE_ZLIB 0)
     # Explicitly set these to empty (override NOT_FOUND) so cmake doesn't
@@ -63,21 +88,22 @@ else (MSVC)
     set(ZLIB_INCLUDE_DIRECTORIES)
     set(ZLIB_LIBRARIES)
   endif (ZLIB_FOUND)
-endif (MSVC)
+endif (protobuf_WITH_ZLIB)
 
 if (HAVE_ZLIB)
   add_definitions(-DHAVE_ZLIB)
 endif (HAVE_ZLIB)
 
-if (MSVC)
-  if (BUILD_SHARED_LIBS)
-    add_definitions(-DPROTOBUF_USE_DLLS)
-  else (BUILD_SHARED_LIBS)
-    # In case we are building static libraries, link also the runtime library statically
-	# so that MSVCR*.DLL is not required at runtime.
-    # https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx
-    # This is achieved by replacing msvc option /MD with /MT and /MDd with /MTd
-    # http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F
+if (protobuf_BUILD_SHARED_LIBS)
+  set(protobuf_SHARED_OR_STATIC "SHARED")
+else (protobuf_BUILD_SHARED_LIBS)
+  set(protobuf_SHARED_OR_STATIC "STATIC")
+  # In case we are building static libraries, link also the runtime library statically
+  # so that MSVCR*.DLL is not required at runtime.
+  # https://msdn.microsoft.com/en-us/library/2kzt1wy3.aspx
+  # This is achieved by replacing msvc option /MD with /MT and /MDd with /MTd
+  # http://www.cmake.org/Wiki/CMake_FAQ#How_can_I_build_my_MSVC_application_with_a_static_runtime.3F
+  if (MSVC AND protobuf_MSVC_STATIC_RUNTIME)
     foreach(flag_var
         CMAKE_CXX_FLAGS CMAKE_CXX_FLAGS_DEBUG CMAKE_CXX_FLAGS_RELEASE
         CMAKE_CXX_FLAGS_MINSIZEREL CMAKE_CXX_FLAGS_RELWITHDEBINFO)
@@ -85,11 +111,15 @@ if (MSVC)
         string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
       endif(${flag_var} MATCHES "/MD")
     endforeach(flag_var)
-  endif (BUILD_SHARED_LIBS)
-  add_definitions(/wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146 /wd4305)
-endif (MSVC)
+  endif (MSVC AND protobuf_MSVC_STATIC_RUNTIME)
+endif (protobuf_BUILD_SHARED_LIBS)
 
 if (MSVC)
+  # Build with multiple processes
+  add_definitions(/MP)
+  add_definitions(/wd4244 /wd4267 /wd4018 /wd4355 /wd4800 /wd4251 /wd4996 /wd4146 /wd4305)
+  # Allow big object
+  add_definitions(/bigobj)
   string(REPLACE "/" "\\" PROTOBUF_SOURCE_WIN32_PATH ${protobuf_SOURCE_DIR})
   string(REPLACE "/" "\\" PROTOBUF_BINARY_WIN32_PATH ${protobuf_BINARY_DIR})
   configure_file(extract_includes.bat.in extract_includes.bat)
@@ -116,8 +146,8 @@ include(libprotobuf.cmake)
 include(libprotoc.cmake)
 include(protoc.cmake)
 
-if (BUILD_TESTING)
+if (protobuf_BUILD_TESTS)
   include(tests.cmake)
-endif (BUILD_TESTING)
+endif (protobuf_BUILD_TESTS)
 
 include(install.cmake)

+ 13 - 12
cmake/README.md

@@ -28,11 +28,11 @@ Create a folder where protobuf headers/libraries/binaries will be installed afte
 
     C:\Path\to>mkdir install
 
-If *cmake* coomand is not avaliable from *Command Promt*, add it to system *PATH* variable:
+If *cmake* command is not available from *Command Prompt*, add it to system *PATH* variable:
 
     C:\Path\to>set PATH=%PATH%;C:\Program Files (x86)\CMake\bin
 
-If *git* coomand is not avaliable from *Command Promt*, add it to system *PATH* variable:
+If *git* command is not available from *Command Prompt*, add it to system *PATH* variable:
 
     C:\Path\to>set PATH=%PATH%;C:\Program Files\Git\cmd
 
@@ -64,7 +64,7 @@ You can download gmock as follows:
 
      C:\Path\to\protobuf>git clone -b release-1.7.0 https://github.com/google/googlemock.git gmock
 
-Then go to *gmock* folder and downdload gtest:
+Then go to *gmock* folder and download gtest:
 
      C:\Path\to\protobuf>cd gmock
      C:\Path\to\protobuf\gmock>git clone -b release-1.7.0 https://github.com/google/googletest.git gtest
@@ -123,12 +123,12 @@ It will generate *nmake* *Makefile* in current directory.
 
 To create *Visual Studio* solution file:
 
-     C:\Path\to\protobuf\cmake>mkdir solution & cd solution
-     C:\Path\to\protobuf\cmake\solution>cmake -G "Visual Studio 12 2013 Win64" ^
+     C:\Path\to\protobuf\cmake\build>mkdir solution & cd solution
+     C:\Path\to\protobuf\cmake\build\solution>cmake -G "Visual Studio 12 2013 Win64" ^
      -DCMAKE_INSTALL_PREFIX=../../../../install ^
      ../..
 
-It will generate *Visual Studion* solution file *protobuf.sln* in current directory.
+It will generate *Visual Studio* solution file *protobuf.sln* in current directory.
 
 If the *gmock* directory does not exist, and you do not want to build protobuf unit tests,
 you need to add *cmake* command argument `-Dprotobuf_BUILD_TESTS=OFF` to disable testing.
@@ -146,24 +146,25 @@ or
 
 And wait for the compilation to finish.
 
-You prefer to use the IDE:
+If you prefer to use the IDE:
 
   * Open the generated protobuf.sln file in Microsoft Visual Studio.
   * Choose "Debug" or "Release" configuration as desired.
   * From the Build menu, choose "Build Solution".
 
-wait for the compilation to finish.
+And wait for the compilation to finish.
 
 Testing
 =======
 
-To run unit-tests:
+To run unit-tests, first you must compile protobuf as described above.
+Then run:
 
      C:\Path\to\protobuf\cmake\build\release>nmake check
 
 or
 
-     C:\Path\to\protobuf\cmake\build\debug>nmake  check
+     C:\Path\to\protobuf\cmake\build\debug>nmake check
 
 You can also build project *check* from Visual Studio solution.
 Yes, it may sound strange, but it works.
@@ -211,14 +212,14 @@ To install protobuf to the specified *install* folder:
 
 or
 
-     C:\Path\to\protobuf\cmake\build\debug>nmake  install
+     C:\Path\to\protobuf\cmake\build\debug>nmake install
 
 You can also build project *INSTALL* from Visual Studio solution.
 It sounds not so strange and it works.
 
 This will create the following folders under the *install* location:
   * bin - that contains protobuf *protoc.exe* compiler;
-  * inclue - that contains C++ headers and protobuf *.proto files;
+  * include - that contains C++ headers and protobuf *.proto files;
   * lib - that contains linking libraries and *CMake* configuration files for *protobuf* package.
 
 Now you can if needed:

+ 3 - 1
cmake/extract_includes.bat.in

@@ -6,6 +6,7 @@ mkdir include\google\protobuf\compiler\cpp
 mkdir include\google\protobuf\compiler\csharp
 mkdir include\google\protobuf\compiler\java
 mkdir include\google\protobuf\compiler\javanano
+mkdir include\google\protobuf\compiler\js
 mkdir include\google\protobuf\compiler\objectivec
 mkdir include\google\protobuf\compiler\python
 mkdir include\google\protobuf\compiler\ruby
@@ -26,6 +27,7 @@ copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\importer.h in
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\java\java_generator.h include\google\protobuf\compiler\java\java_generator.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\java\java_names.h include\google\protobuf\compiler\java\java_names.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\javanano\javanano_generator.h include\google\protobuf\compiler\javanano\javanano_generator.h
+copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\js\js_generator.h include\google\protobuf\compiler\js\js_generator.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\objectivec\objectivec_generator.h include\google\protobuf\compiler\objectivec\objectivec_generator.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\objectivec\objectivec_helpers.h include\google\protobuf\compiler\objectivec\objectivec_helpers.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\compiler\parser.h include\google\protobuf\compiler\parser.h
@@ -72,7 +74,6 @@ copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\source_context.pb.h in
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\struct.pb.h include\google\protobuf\struct.pb.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomic_sequence_num.h include\google\protobuf\stubs\atomic_sequence_num.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops.h include\google\protobuf\stubs\atomicops.h
-copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_aix.h include\google\protobuf\stubs\atomicops_internals_aix.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm64_gcc.h include\google\protobuf\stubs\atomicops_internals_arm64_gcc.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm_gcc.h include\google\protobuf\stubs\atomicops_internals_arm_gcc.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_arm_qnx.h include\google\protobuf\stubs\atomicops_internals_arm_qnx.h
@@ -81,6 +82,7 @@ copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_intern
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_macosx.h include\google\protobuf\stubs\atomicops_internals_macosx.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_mips_gcc.h include\google\protobuf\stubs\atomicops_internals_mips_gcc.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_pnacl.h include\google\protobuf\stubs\atomicops_internals_pnacl.h
+copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_power.h include\google\protobuf\stubs\atomicops_internals_power.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_solaris.h include\google\protobuf\stubs\atomicops_internals_solaris.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_tsan.h include\google\protobuf\stubs\atomicops_internals_tsan.h
 copy ${PROTOBUF_SOURCE_WIN32_PATH}\..\src\google\protobuf\stubs\atomicops_internals_x86_gcc.h include\google\protobuf\stubs\atomicops_internals_x86_gcc.h

+ 9 - 3
cmake/libprotobuf-lite.cmake

@@ -24,9 +24,15 @@ set(libprotobuf_lite_files
   ${protobuf_source_dir}/src/google/protobuf/wire_format_lite.cc
 )
 
-add_library(libprotobuf-lite ${libprotobuf_lite_files})
+add_library(libprotobuf-lite ${protobuf_SHARED_OR_STATIC}
+  ${libprotobuf_lite_files})
 target_link_libraries(libprotobuf-lite ${CMAKE_THREAD_LIBS_INIT})
 target_include_directories(libprotobuf-lite PUBLIC ${protobuf_source_dir}/src)
+if(MSVC AND protobuf_BUILD_SHARED_LIBS)
+  target_compile_definitions(libprotobuf-lite
+    PUBLIC  PROTOBUF_USE_DLLS
+    PRIVATE LIBPROTOBUF_EXPORTS)
+endif()
 set_target_properties(libprotobuf-lite PROPERTIES
-    COMPILE_DEFINITIONS LIBPROTOBUF_EXPORTS
-    OUTPUT_NAME ${LIB_PREFIX}protobuf-lite)
+    OUTPUT_NAME ${LIB_PREFIX}protobuf-lite
+    DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}")

+ 10 - 3
cmake/libprotobuf.cmake

@@ -40,6 +40,7 @@ set(libprotobuf_files
   ${protobuf_source_dir}/src/google/protobuf/util/internal/json_objectwriter.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/json_stream_parser.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/object_writer.cc
+  ${protobuf_source_dir}/src/google/protobuf/util/internal/proto_writer.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectsource.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info.cc
@@ -53,9 +54,15 @@ set(libprotobuf_files
   ${protobuf_source_dir}/src/google/protobuf/wrappers.pb.cc
 )
 
-add_library(libprotobuf ${libprotobuf_lite_files} ${libprotobuf_files})
+add_library(libprotobuf ${protobuf_SHARED_OR_STATIC}
+  ${libprotobuf_lite_files} ${libprotobuf_files})
 target_link_libraries(libprotobuf ${CMAKE_THREAD_LIBS_INIT} ${ZLIB_LIBRARIES})
 target_include_directories(libprotobuf PUBLIC ${protobuf_source_dir}/src)
+if(MSVC AND protobuf_BUILD_SHARED_LIBS)
+  target_compile_definitions(libprotobuf
+    PUBLIC  PROTOBUF_USE_DLLS
+    PRIVATE LIBPROTOBUF_EXPORTS)
+endif()
 set_target_properties(libprotobuf PROPERTIES
-    COMPILE_DEFINITIONS LIBPROTOBUF_EXPORTS
-    OUTPUT_NAME ${LIB_PREFIX}protobuf)
+    OUTPUT_NAME ${LIB_PREFIX}protobuf
+    DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}")

+ 12 - 3
cmake/libprotoc.cmake

@@ -24,11 +24,11 @@ set(libprotoc_files
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_message_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_primitive_field.cc
+  ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_reflection_class.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_message_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_source_generator_base.cc
-  ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_umbrella_class.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_wrapper_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_context.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment.cc
@@ -37,6 +37,7 @@ set(libprotoc_files
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_field_lite.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_enum_lite.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension.cc
+  ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_extension_lite.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_file.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_generator.cc
@@ -70,6 +71,7 @@ set(libprotoc_files
   ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_message.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_message_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/javanano/javanano_primitive_field.cc
+  ${protobuf_source_dir}/src/google/protobuf/compiler/js/js_generator.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_enum_field.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/objectivec/objectivec_extension.cc
@@ -90,8 +92,15 @@ set(libprotoc_files
   ${protobuf_source_dir}/src/google/protobuf/compiler/zip_writer.cc
 )
 
-add_library(libprotoc ${libprotoc_files})
+add_library(libprotoc ${protobuf_SHARED_OR_STATIC}
+  ${libprotoc_files})
 target_link_libraries(libprotoc libprotobuf)
+if(MSVC AND protobuf_BUILD_SHARED_LIBS)
+  target_compile_definitions(libprotoc
+    PUBLIC  PROTOBUF_USE_DLLS
+    PRIVATE LIBPROTOC_EXPORTS)
+endif()
 set_target_properties(libprotoc PROPERTIES
     COMPILE_DEFINITIONS LIBPROTOC_EXPORTS
-    OUTPUT_NAME ${LIB_PREFIX}protoc)
+    OUTPUT_NAME ${LIB_PREFIX}protoc
+    DEBUG_POSTFIX "${protobuf_DEBUG_POSTFIX}")

+ 18 - 0
cmake/tests.cmake

@@ -2,6 +2,9 @@ if (NOT EXISTS "${PROJECT_SOURCE_DIR}/../gmock/CMakeLists.txt")
   message(FATAL_ERROR "Cannot find gmock directory.")
 endif()
 
+option(protobuf_ABSOLUTE_TEST_PLUGIN_PATH
+  "Using absolute test_plugin path in tests" ON)
+
 include_directories(
   ${protobuf_source_dir}/gmock
   ${protobuf_source_dir}/gmock/gtest
@@ -49,6 +52,8 @@ set(tests_protos
   google/protobuf/unittest_preserve_unknown_enum.proto
   google/protobuf/unittest_preserve_unknown_enum2.proto
   google/protobuf/unittest_proto3_arena.proto
+  google/protobuf/unittest_proto3_arena_lite.proto
+  google/protobuf/unittest_proto3_lite.proto
   google/protobuf/unittest_well_known_types.proto
   google/protobuf/util/internal/testdata/anys.proto
   google/protobuf/util/internal/testdata/books.proto
@@ -60,6 +65,7 @@ set(tests_protos
   google/protobuf/util/internal/testdata/struct.proto
   google/protobuf/util/internal/testdata/timestamp_duration.proto
   google/protobuf/util/json_format_proto3.proto
+  google/protobuf/util/message_differencer_unittest.proto
 )
 
 macro(compile_proto_file filename)
@@ -112,6 +118,7 @@ set(tests_files
   ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_bootstrap_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_plugin_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/cpp_unittest.cc
+  ${protobuf_source_dir}/src/google/protobuf/compiler/cpp/metadata_test.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/csharp/csharp_generator_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/importer_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/compiler/java/java_doc_comment_unittest.cc
@@ -136,7 +143,9 @@ set(tests_files
   ${protobuf_source_dir}/src/google/protobuf/message_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/no_field_presence_test.cc
   ${protobuf_source_dir}/src/google/protobuf/preserve_unknown_enum_test.cc
+  ${protobuf_source_dir}/src/google/protobuf/proto3_arena_lite_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/proto3_arena_unittest.cc
+  ${protobuf_source_dir}/src/google/protobuf/proto3_lite_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/reflection_ops_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/repeated_field_reflection_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/repeated_field_unittest.cc
@@ -164,12 +173,17 @@ set(tests_files
   ${protobuf_source_dir}/src/google/protobuf/util/internal/protostream_objectwriter_test.cc
   ${protobuf_source_dir}/src/google/protobuf/util/internal/type_info_test_helper.cc
   ${protobuf_source_dir}/src/google/protobuf/util/json_util_test.cc
+  ${protobuf_source_dir}/src/google/protobuf/util/message_differencer_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/util/time_util_test.cc
   ${protobuf_source_dir}/src/google/protobuf/util/type_resolver_util_test.cc
   ${protobuf_source_dir}/src/google/protobuf/well_known_types_unittest.cc
   ${protobuf_source_dir}/src/google/protobuf/wire_format_unittest.cc
 )
 
+if(protobuf_ABSOLUTE_TEST_PLUGIN_PATH)
+  add_compile_options(-DGOOGLE_PROTOBUF_TEST_PLUGIN_PATH="$<TARGET_FILE:test_plugin>")
+endif()
+
 add_executable(tests ${tests_files} ${common_test_files} ${tests_proto_files} ${lite_test_proto_files})
 target_link_libraries(tests libprotoc libprotobuf gmock_main)
 
@@ -194,3 +208,7 @@ set(lite_arena_test_files
 )
 add_executable(lite-arena-test ${lite_arena_test_files} ${common_lite_test_files} ${lite_test_proto_files})
 target_link_libraries(lite-arena-test libprotobuf-lite gmock_main)
+
+add_custom_target(check
+  COMMAND tests
+  WORKING_DIRECTORY ${protobuf_source_dir})

+ 13 - 3
configure.ac

@@ -12,7 +12,7 @@ AC_PREREQ(2.59)
 # In the SVN trunk, the version should always be the next anticipated release
 # version with the "-pre" suffix.  (We used to use "-SNAPSHOT" but this pushed
 # the size of one file name in the dist tarfile over the 99-char limit.)
-AC_INIT([Protocol Buffers],[3.0.0-beta-1],[protobuf@googlegroups.com],[protobuf])
+AC_INIT([Protocol Buffers],[3.0.0-beta-3],[protobuf@googlegroups.com],[protobuf])
 
 AM_MAINTAINER_MODE([enable])
 
@@ -26,7 +26,7 @@ AC_CONFIG_MACRO_DIR([m4])
 AC_ARG_VAR(DIST_LANG, [language to include in the distribution package (i.e., make dist)])
 case "$DIST_LANG" in
   "") DIST_LANG=all ;;
-  all | cpp | csharp | java | python | javanano | objectivec | ruby) ;;
+  all | cpp | csharp | java | python | javanano | objectivec | ruby | js) ;;
   *) AC_MSG_FAILURE([unknown language: $DIST_LANG]) ;;
 esac
 AC_SUBST(DIST_LANG)
@@ -59,6 +59,7 @@ AC_LANG([C++])
 ACX_USE_SYSTEM_EXTENSIONS
 m4_ifdef([AM_PROG_AR], [AM_PROG_AR])
 AM_CONDITIONAL(GCC, test "$GCC" = yes)   # let the Makefile know if we're gcc
+AC_PROG_OBJC
 
 # test_util.cc takes forever to compile with GCC and optimization turned on.
 AC_MSG_CHECKING([C++ compiler flags...])
@@ -163,6 +164,15 @@ case "$target_os" in
     ;;
 esac
 
+# Enable ObjC support for conformance directory on OS X.
+OBJC_CONFORMANCE_TEST=0
+case "$target_os" in
+  darwin*)
+    OBJC_CONFORMANCE_TEST=1
+    ;;
+esac
+AM_CONDITIONAL([OBJC_CONFORMANCE_TEST], [test $OBJC_CONFORMANCE_TEST = 1])
+
 # HACK:  Make gmock's configure script pick up our copy of CFLAGS and CXXFLAGS,
 #   since the flags added by ACX_CHECK_SUNCC must be used when compiling gmock
 #   too.
@@ -170,5 +180,5 @@ export CFLAGS
 export CXXFLAGS
 AC_CONFIG_SUBDIRS([gmock])
 
-AC_CONFIG_FILES([Makefile src/Makefile conformance/Makefile protobuf.pc protobuf-lite.pc])
+AC_CONFIG_FILES([Makefile src/Makefile benchmarks/Makefile conformance/Makefile protobuf.pc protobuf-lite.pc])
 AC_OUTPUT

+ 26 - 4
conformance/ConformanceJava.java

@@ -1,9 +1,12 @@
 
 import com.google.protobuf.conformance.Conformance;
+import com.google.protobuf.util.JsonFormat;
+import com.google.protobuf.util.JsonFormat.TypeRegistry;
 import com.google.protobuf.InvalidProtocolBufferException;
 
 class ConformanceJava {
   private int testCount = 0;
+  private TypeRegistry typeRegistry;
 
   private boolean readFromStdin(byte[] buf, int len) throws Exception {
     int ofs = 0;
@@ -29,7 +32,10 @@ class ConformanceJava {
     if (!readFromStdin(buf, 4)) {
       return -1;
     }
-    return buf[0] | (buf[1] << 1) | (buf[2] << 2) | (buf[3] << 3);
+    return (buf[0] & 0xff)
+        | ((buf[1] & 0xff) << 8)
+        | ((buf[2] & 0xff) << 16)
+        | ((buf[3] & 0xff) << 24);
   }
 
   private void writeLittleEndianIntToStdout(int val) throws Exception {
@@ -54,7 +60,15 @@ class ConformanceJava {
         break;
       }
       case JSON_PAYLOAD: {
-        return Conformance.ConformanceResponse.newBuilder().setSkipped("JSON not yet supported.").build();
+        try {
+          Conformance.TestAllTypes.Builder builder = Conformance.TestAllTypes.newBuilder();
+          JsonFormat.parser().usingTypeRegistry(typeRegistry)
+              .merge(request.getJsonPayload(), builder);
+          testMessage = builder.build();
+        } catch (InvalidProtocolBufferException e) {
+          return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build();
+        }
+        break;
       }
       case PAYLOAD_NOT_SET: {
         throw new RuntimeException("Request didn't have payload.");
@@ -73,7 +87,13 @@ class ConformanceJava {
         return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(testMessage.toByteString()).build();
 
       case JSON:
-        return Conformance.ConformanceResponse.newBuilder().setSkipped("JSON not yet supported.").build();
+        try {
+          return Conformance.ConformanceResponse.newBuilder().setJsonPayload(
+              JsonFormat.printer().usingTypeRegistry(typeRegistry).print(testMessage)).build();
+        } catch (InvalidProtocolBufferException | IllegalArgumentException e) {
+          return Conformance.ConformanceResponse.newBuilder().setSerializeError(
+              e.getMessage()).build();
+        }
 
       default: {
         throw new RuntimeException("Unexpected request output.");
@@ -106,8 +126,10 @@ class ConformanceJava {
   }
 
   public void run() throws Exception {
+    typeRegistry = TypeRegistry.newBuilder().add(
+        Conformance.TestAllTypes.getDescriptor()).build();
     while (doTestIo()) {
-      // Empty.
+      this.testCount++;
     }
 
     System.err.println("ConformanceJava: received EOF from test runner after " +

+ 125 - 0
conformance/ConformanceJavaLite.java

@@ -0,0 +1,125 @@
+
+import com.google.protobuf.conformance.Conformance;
+import com.google.protobuf.InvalidProtocolBufferException;
+
+class ConformanceJavaLite {
+  private int testCount = 0;
+
+  private boolean readFromStdin(byte[] buf, int len) throws Exception {
+    int ofs = 0;
+    while (len > 0) {
+      int read = System.in.read(buf, ofs, len);
+      if (read == -1) {
+        return false;  // EOF
+      }
+      ofs += read;
+      len -= read;
+    }
+
+    return true;
+  }
+
+  private void writeToStdout(byte[] buf) throws Exception {
+    System.out.write(buf);
+  }
+
+  // Returns -1 on EOF (the actual values will always be positive).
+  private int readLittleEndianIntFromStdin() throws Exception {
+    byte[] buf = new byte[4];
+    if (!readFromStdin(buf, 4)) {
+      return -1;
+    }
+    return (buf[0] & 0xff)
+        | ((buf[1] & 0xff) << 8)
+        | ((buf[2] & 0xff) << 16)
+        | ((buf[3] & 0xff) << 24);
+  }
+
+  private void writeLittleEndianIntToStdout(int val) throws Exception {
+    byte[] buf = new byte[4];
+    buf[0] = (byte)val;
+    buf[1] = (byte)(val >> 8);
+    buf[2] = (byte)(val >> 16);
+    buf[3] = (byte)(val >> 24);
+    writeToStdout(buf);
+  }
+
+  private Conformance.ConformanceResponse doTest(Conformance.ConformanceRequest request) {
+    Conformance.TestAllTypes testMessage;
+
+    switch (request.getPayloadCase()) {
+      case PROTOBUF_PAYLOAD: {
+        try {
+          testMessage = Conformance.TestAllTypes.parseFrom(request.getProtobufPayload());
+        } catch (InvalidProtocolBufferException e) {
+          return Conformance.ConformanceResponse.newBuilder().setParseError(e.getMessage()).build();
+        }
+        break;
+      }
+      case JSON_PAYLOAD: {
+        return Conformance.ConformanceResponse.newBuilder().setSkipped(
+            "Lite runtime does not suport Json Formant.").build();
+      }
+      case PAYLOAD_NOT_SET: {
+        throw new RuntimeException("Request didn't have payload.");
+      }
+
+      default: {
+        throw new RuntimeException("Unexpected payload case.");
+      }
+    }
+
+    switch (request.getRequestedOutputFormat()) {
+      case UNSPECIFIED:
+        throw new RuntimeException("Unspecified output format.");
+
+      case PROTOBUF:
+        return Conformance.ConformanceResponse.newBuilder().setProtobufPayload(testMessage.toByteString()).build();
+
+      case JSON:
+        return Conformance.ConformanceResponse.newBuilder().setSkipped(
+            "Lite runtime does not suport Json Formant.").build();
+
+      default: {
+        throw new RuntimeException("Unexpected request output.");
+      }
+    }
+  }
+
+  private boolean doTestIo() throws Exception {
+    int bytes = readLittleEndianIntFromStdin();
+
+    if (bytes == -1) {
+      return false;  // EOF
+    }
+
+    byte[] serializedInput = new byte[bytes];
+
+    if (!readFromStdin(serializedInput, bytes)) {
+      throw new RuntimeException("Unexpected EOF from test program.");
+    }
+
+    Conformance.ConformanceRequest request =
+        Conformance.ConformanceRequest.parseFrom(serializedInput);
+    Conformance.ConformanceResponse response = doTest(request);
+    byte[] serializedOutput = response.toByteArray();
+
+    writeLittleEndianIntToStdout(serializedOutput.length);
+    writeToStdout(serializedOutput);
+
+    return true;
+  }
+
+  public void run() throws Exception {
+    while (doTestIo()) {
+      this.testCount++;
+    }
+
+    System.err.println("ConformanceJavaLite: received EOF from test runner after " +
+        this.testCount + " tests");
+  }
+
+  public static void main(String[] args) throws Exception {
+    new ConformanceJavaLite().run();
+  }
+}

+ 218 - 15
conformance/Makefile.am

@@ -1,28 +1,198 @@
 ## Process this file with automake to produce Makefile.in
 
-protoc_inputs =                                                \
+conformance_protoc_inputs =                                    \
   conformance.proto
 
+well_known_type_protoc_inputs =                                \
+  $(top_srcdir)/src/google/protobuf/any.proto                  \
+  $(top_srcdir)/src/google/protobuf/duration.proto             \
+  $(top_srcdir)/src/google/protobuf/field_mask.proto           \
+  $(top_srcdir)/src/google/protobuf/struct.proto               \
+  $(top_srcdir)/src/google/protobuf/timestamp.proto            \
+  $(top_srcdir)/src/google/protobuf/wrappers.proto
+
+
 protoc_outputs =                                               \
   conformance.pb.cc                                            \
   conformance.pb.h
 
+other_language_protoc_outputs =                                \
+  conformance_pb2.py                                           \
+  Conformance.pbobjc.h                                         \
+  Conformance.pbobjc.m                                         \
+  conformance.rb                                               \
+  com/google/protobuf/Any.java                                 \
+  com/google/protobuf/AnyOrBuilder.java                        \
+  com/google/protobuf/AnyProto.java                            \
+  com/google/protobuf/BoolValue.java                           \
+  com/google/protobuf/BoolValueOrBuilder.java                  \
+  com/google/protobuf/BytesValue.java                          \
+  com/google/protobuf/BytesValueOrBuilder.java                 \
+  com/google/protobuf/conformance/Conformance.java             \
+  com/google/protobuf/DoubleValue.java                         \
+  com/google/protobuf/DoubleValueOrBuilder.java                \
+  com/google/protobuf/Duration.java                            \
+  com/google/protobuf/DurationOrBuilder.java                   \
+  com/google/protobuf/DurationProto.java                       \
+  com/google/protobuf/FieldMask.java                           \
+  com/google/protobuf/FieldMaskOrBuilder.java                  \
+  com/google/protobuf/FieldMaskProto.java                      \
+  com/google/protobuf/FloatValue.java                          \
+  com/google/protobuf/FloatValueOrBuilder.java                 \
+  com/google/protobuf/Int32Value.java                          \
+  com/google/protobuf/Int32ValueOrBuilder.java                 \
+  com/google/protobuf/Int64Value.java                          \
+  com/google/protobuf/Int64ValueOrBuilder.java                 \
+  com/google/protobuf/ListValue.java                           \
+  com/google/protobuf/ListValueOrBuilder.java                  \
+  com/google/protobuf/NullValue.java                           \
+  com/google/protobuf/StringValue.java                         \
+  com/google/protobuf/StringValueOrBuilder.java                \
+  com/google/protobuf/Struct.java                              \
+  com/google/protobuf/StructOrBuilder.java                     \
+  com/google/protobuf/StructProto.java                         \
+  com/google/protobuf/Timestamp.java                           \
+  com/google/protobuf/TimestampOrBuilder.java                  \
+  com/google/protobuf/TimestampProto.java                      \
+  com/google/protobuf/UInt32Value.java                         \
+  com/google/protobuf/UInt32ValueOrBuilder.java                \
+  com/google/protobuf/UInt64Value.java                         \
+  com/google/protobuf/UInt64ValueOrBuilder.java                \
+  com/google/protobuf/Value.java                               \
+  com/google/protobuf/ValueOrBuilder.java                      \
+  com/google/protobuf/WrappersProto.java                       \
+  google/protobuf/any.pb.cc                                    \
+  google/protobuf/any.pb.h                                     \
+  google/protobuf/any.rb                                       \
+  google/protobuf/any_pb2.py                                   \
+  google/protobuf/duration.pb.cc                               \
+  google/protobuf/duration.pb.h                                \
+  google/protobuf/duration.rb                                  \
+  google/protobuf/duration_pb2.py                              \
+  google/protobuf/field_mask.pb.cc                             \
+  google/protobuf/field_mask.pb.h                              \
+  google/protobuf/field_mask.rb                                \
+  google/protobuf/field_mask_pb2.py                            \
+  google/protobuf/struct.pb.cc                                 \
+  google/protobuf/struct.pb.h                                  \
+  google/protobuf/struct.rb                                    \
+  google/protobuf/struct_pb2.py                                \
+  google/protobuf/timestamp.pb.cc                              \
+  google/protobuf/timestamp.pb.h                               \
+  google/protobuf/timestamp.rb                                 \
+  google/protobuf/timestamp_pb2.py                             \
+  google/protobuf/wrappers.pb.cc                               \
+  google/protobuf/wrappers.pb.h                                \
+  google/protobuf/wrappers.rb                                  \
+  google/protobuf/wrappers_pb2.py                              \
+  lite/com/google/protobuf/Any.java                            \
+  lite/com/google/protobuf/AnyOrBuilder.java                   \
+  lite/com/google/protobuf/AnyProto.java                       \
+  lite/com/google/protobuf/BoolValue.java                      \
+  lite/com/google/protobuf/BoolValueOrBuilder.java             \
+  lite/com/google/protobuf/BytesValue.java                     \
+  lite/com/google/protobuf/BytesValueOrBuilder.java            \
+  lite/com/google/protobuf/conformance/Conformance.java        \
+  lite/com/google/protobuf/DoubleValue.java                    \
+  lite/com/google/protobuf/DoubleValueOrBuilder.java           \
+  lite/com/google/protobuf/Duration.java                       \
+  lite/com/google/protobuf/DurationOrBuilder.java              \
+  lite/com/google/protobuf/DurationProto.java                  \
+  lite/com/google/protobuf/FieldMask.java                      \
+  lite/com/google/protobuf/FieldMaskOrBuilder.java             \
+  lite/com/google/protobuf/FieldMaskProto.java                 \
+  lite/com/google/protobuf/FloatValue.java                     \
+  lite/com/google/protobuf/FloatValueOrBuilder.java            \
+  lite/com/google/protobuf/Int32Value.java                     \
+  lite/com/google/protobuf/Int32ValueOrBuilder.java            \
+  lite/com/google/protobuf/Int64Value.java                     \
+  lite/com/google/protobuf/Int64ValueOrBuilder.java            \
+  lite/com/google/protobuf/ListValue.java                      \
+  lite/com/google/protobuf/ListValueOrBuilder.java             \
+  lite/com/google/protobuf/NullValue.java                      \
+  lite/com/google/protobuf/StringValue.java                    \
+  lite/com/google/protobuf/StringValueOrBuilder.java           \
+  lite/com/google/protobuf/Struct.java                         \
+  lite/com/google/protobuf/StructOrBuilder.java                \
+  lite/com/google/protobuf/StructProto.java                    \
+  lite/com/google/protobuf/Timestamp.java                      \
+  lite/com/google/protobuf/TimestampOrBuilder.java             \
+  lite/com/google/protobuf/TimestampProto.java                 \
+  lite/com/google/protobuf/UInt32Value.java                    \
+  lite/com/google/protobuf/UInt32ValueOrBuilder.java           \
+  lite/com/google/protobuf/UInt64Value.java                    \
+  lite/com/google/protobuf/UInt64ValueOrBuilder.java           \
+  lite/com/google/protobuf/Value.java                          \
+  lite/com/google/protobuf/ValueOrBuilder.java                 \
+  lite/com/google/protobuf/WrappersProto.java
+
 bin_PROGRAMS = conformance-test-runner conformance-cpp
 
+# All source files excepet C++/Objective-C ones should be explicitly listed
+# here because the autoconf tools don't include files of other languages
+# automatically.
+EXTRA_DIST =                  \
+  ConformanceJava.java        \
+  ConformanceJavaLite.java    \
+  README.md                   \
+  conformance.proto           \
+  conformance_python.py       \
+  conformance_ruby.rb         \
+  failure_list_cpp.txt        \
+  failure_list_csharp.txt     \
+  failure_list_java.txt       \
+  failure_list_objc.txt       \
+  failure_list_python.txt     \
+  failure_list_python_cpp.txt \
+  failure_list_python-post26.txt \
+  failure_list_ruby.txt
+
 conformance_test_runner_LDADD = $(top_srcdir)/src/libprotobuf.la
-conformance_test_runner_SOURCES = conformance_test.cc conformance_test_runner.cc
+conformance_test_runner_SOURCES = conformance_test.h conformance_test.cc \
+                                  conformance_test_runner.cc             \
+                                  third_party/jsoncpp/json.h             \
+                                  third_party/jsoncpp/jsoncpp.cpp
 nodist_conformance_test_runner_SOURCES = conformance.pb.cc
-conformance_test_runner_CPPFLAGS = -I$(top_srcdir)/src
+conformance_test_runner_CPPFLAGS = -I$(top_srcdir)/src -I$(srcdir)
+conformance_test_runner_CXXFLAGS = -std=c++11
+# Explicit deps beacuse BUILT_SOURCES are only done before a "make all/check"
+# so a direct "make test_cpp" could fail if parallel enough.
+conformance_test_runner-conformance_test.$(OBJEXT): conformance.pb.h
+conformance_test_runner-conformance_test_runner.$(OBJEXT): conformance.pb.h
 
 conformance_cpp_LDADD = $(top_srcdir)/src/libprotobuf.la
 conformance_cpp_SOURCES = conformance_cpp.cc
 nodist_conformance_cpp_SOURCES = conformance.pb.cc
 conformance_cpp_CPPFLAGS = -I$(top_srcdir)/src
+# Explicit dep beacuse BUILT_SOURCES are only done before a "make all/check"
+# so a direct "make test_cpp" could fail if parallel enough.
+conformance_cpp-conformance_cpp.$(OBJEXT): conformance.pb.h
+
+if OBJC_CONFORMANCE_TEST
+
+bin_PROGRAMS += conformance-objc
+
+conformance_objc_SOURCES = conformance_objc.m ../objectivec/GPBProtocolBuffers.m
+nodist_conformance_objc_SOURCES = Conformance.pbobjc.m
+# On travis, the build fails without the isysroot because whatever system
+# headers are being found don't include generics support for
+# NSArray/NSDictionary, the only guess is their image at one time had an odd
+# setup for Xcode and old frameworks are being found.
+conformance_objc_CPPFLAGS = -I$(top_srcdir)/objectivec -isysroot `xcrun --sdk macosx --show-sdk-path`
+conformance_objc_LDFLAGS = -framework Foundation
+# Explicit dep beacuse BUILT_SOURCES are only done before a "make all/check"
+# so a direct "make test_objc" could fail if parallel enough.
+conformance_objc-conformance_objc.$(OBJEXT): Conformance.pbobjc.h
+
+endif
 
 if USE_EXTERNAL_PROTOC
 
-protoc_middleman: $(protoc_inputs)
-	$(PROTOC) -I$(srcdir) --cpp_out=. --java_out=. --ruby_out=. $^
+# Some implementations include pre-generated versions of well-known types.
+protoc_middleman: $(conformance_protoc_inputs) $(well_known_type_protoc_inputs)
+	$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --objc_out=. --python_out=. $(conformance_protoc_inputs)
+	$(PROTOC) -I$(srcdir) -I$(top_srcdir) --cpp_out=. --java_out=. --ruby_out=. --python_out=. $(well_known_type_protoc_inputs)
+	$(PROTOC) -I$(srcdir) -I$(top_srcdir) --java_out=lite:lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs)
 	touch protoc_middleman
 
 else
@@ -30,35 +200,50 @@ else
 # We have to cd to $(srcdir) before executing protoc because $(protoc_inputs) is
 # relative to srcdir, which may not be the same as the current directory when
 # building out-of-tree.
-protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(protoc_inputs)
-	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd $(protoc_inputs) )
+protoc_middleman: $(top_srcdir)/src/protoc$(EXEEXT) $(conformance_protoc_inputs) $(well_known_type_protoc_inputs)
+	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --objc_out=$$oldpwd --python_out=$$oldpwd $(conformance_protoc_inputs) )
+	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --cpp_out=$$oldpwd --java_out=$$oldpwd --ruby_out=$$oldpwd --python_out=$$oldpwd $(well_known_type_protoc_inputs) )
+	@mkdir -p lite
+	oldpwd=`pwd` && ( cd $(srcdir) && $$oldpwd/../src/protoc$(EXEEXT) -I. -I$(top_srcdir)/src --java_out=lite:$$oldpwd/lite $(conformance_protoc_inputs) $(well_known_type_protoc_inputs) )
 	touch protoc_middleman
 
 endif
 
 $(protoc_outputs): protoc_middleman
 
-BUILT_SOURCES = $(protoc_outputs)
+$(other_language_protoc_outputs): protoc_middleman
+
+BUILT_SOURCES = $(protoc_outputs) $(other_language_protoc_outputs)
 
-CLEANFILES = $(protoc_outputs) protoc_middleman javac_middleman conformance-java conformance-csharp
+CLEANFILES = $(protoc_outputs) protoc_middleman javac_middleman conformance-java javac_middleman_lite conformance-java-lite conformance-csharp $(other_language_protoc_outputs)
 
 MAINTAINERCLEANFILES =   \
   Makefile.in
 
-javac_middleman: ConformanceJava.java protoc_middleman
-	javac -classpath ../java/target/classes ConformanceJava.java com/google/protobuf/conformance/Conformance.java
+javac_middleman: ConformanceJava.java protoc_middleman $(other_language_protoc_outputs)
+	jar=`ls ../java/util/target/*jar-with-dependencies.jar` && javac -classpath ../java/target/classes:$$jar ConformanceJava.java com/google/protobuf/conformance/Conformance.java
 	@touch javac_middleman
 
 conformance-java: javac_middleman
 	@echo "Writing shortcut script conformance-java..."
 	@echo '#! /bin/sh' > conformance-java
-	@echo 'java -classpath .:../java/target/classes ConformanceJava "$$@"' >> conformance-java
+	@jar=`ls ../java/util/target/*jar-with-dependencies.jar` && echo java -classpath .:../java/target/classes:$$jar ConformanceJava '$$@' >> conformance-java
 	@chmod +x conformance-java
 
+javac_middleman_lite: ConformanceJavaLite.java protoc_middleman $(other_language_protoc_outputs)
+	javac -classpath ../java/lite/target/classes:lite ConformanceJavaLite.java lite/com/google/protobuf/conformance/Conformance.java
+	@touch javac_middleman_lite
+
+conformance-java-lite: javac_middleman_lite
+	@echo "Writing shortcut script conformance-java-lite..."
+	@echo '#! /bin/sh' > conformance-java-lite
+	@echo java -classpath .:../java/lite/target/classes:lite ConformanceJavaLite '$$@' >> conformance-java-lite
+	@chmod +x conformance-java-lite
+
 # Currently the conformance code is alongside the rest of the C#
 # source, as it's easier to maintain there. We assume we've already
 # built that, so we just need a script to run it.
-conformance-csharp:
+conformance-csharp: $(other_language_protoc_outputs)
 	@echo "Writing shortcut script conformance-csharp..."
 	@echo '#! /bin/sh' > conformance-csharp
 	@echo 'mono ../csharp/src/Google.Protobuf.Conformance/bin/Release/Google.Protobuf.Conformance.exe "$$@"' >> conformance-csharp
@@ -69,10 +254,28 @@ test_cpp: protoc_middleman conformance-test-runner conformance-cpp
 	./conformance-test-runner --failure_list failure_list_cpp.txt ./conformance-cpp
 
 test_java: protoc_middleman conformance-test-runner conformance-java
-	./conformance-test-runner ./conformance-java
+	./conformance-test-runner --failure_list failure_list_java.txt ./conformance-java
+
+test_java_lite: protoc_middleman conformance-test-runner conformance-java-lite
+	./conformance-test-runner ./conformance-java-lite
 
 test_csharp: protoc_middleman conformance-test-runner conformance-csharp
 	./conformance-test-runner --failure_list failure_list_csharp.txt ./conformance-csharp
 
-test_ruby: protoc_middleman conformance-test-runner
+test_ruby: protoc_middleman conformance-test-runner $(other_language_protoc_outputs)
 	RUBYLIB=../ruby/lib:. ./conformance-test-runner --failure_list failure_list_ruby.txt ./conformance_ruby.rb
+
+# These depend on library paths being properly set up.  The easiest way to
+# run them is to just use "tox" from the python dir.
+test_python: protoc_middleman conformance-test-runner
+	./conformance-test-runner --failure_list failure_list_python.txt $(CONFORMANCE_PYTHON_EXTRA_FAILURES) ./conformance_python.py
+
+test_python_cpp: protoc_middleman conformance-test-runner
+	./conformance-test-runner --failure_list failure_list_python_cpp.txt $(CONFORMANCE_PYTHON_EXTRA_FAILURES) ./conformance_python.py
+
+if OBJC_CONFORMANCE_TEST
+
+test_objc: protoc_middleman conformance-test-runner conformance-objc
+	./conformance-test-runner --failure_list failure_list_objc.txt ./conformance-objc
+
+endif

+ 61 - 0
conformance/conformance.proto

@@ -32,6 +32,13 @@ syntax = "proto3";
 package conformance;
 option java_package = "com.google.protobuf.conformance";
 
+import "google/protobuf/any.proto";
+import "google/protobuf/duration.proto";
+import "google/protobuf/field_mask.proto";
+import "google/protobuf/struct.proto";
+import "google/protobuf/timestamp.proto";
+import "google/protobuf/wrappers.proto";
+
 // This defines the conformance testing protocol.  This protocol exists between
 // the conformance test suite itself and the code being tested.  For each test,
 // the suite will send a ConformanceRequest message and expect a
@@ -84,6 +91,11 @@ message ConformanceResponse {
     // test.  Some of the test cases are intentionally invalid input.
     string parse_error = 1;
 
+    // If the input was successfully parsed but errors occurred when
+    // serializing it to the requested output format, set the error message in
+    // this field.
+    string serialize_error = 6;
+
     // This should be set if some other error occurred.  This will always
     // indicate that the test failed.  The string can provide more information
     // about the failure.
@@ -199,6 +211,55 @@ message TestAllTypes {
     string oneof_string = 113;
     bytes oneof_bytes = 114;
   }
+
+  // Well-known types
+  google.protobuf.BoolValue optional_bool_wrapper = 201;
+  google.protobuf.Int32Value optional_int32_wrapper = 202;
+  google.protobuf.Int64Value optional_int64_wrapper = 203;
+  google.protobuf.UInt32Value optional_uint32_wrapper = 204;
+  google.protobuf.UInt64Value optional_uint64_wrapper = 205;
+  google.protobuf.FloatValue optional_float_wrapper = 206;
+  google.protobuf.DoubleValue optional_double_wrapper = 207;
+  google.protobuf.StringValue optional_string_wrapper = 208;
+  google.protobuf.BytesValue optional_bytes_wrapper = 209;
+
+  repeated google.protobuf.BoolValue repeated_bool_wrapper = 211;
+  repeated google.protobuf.Int32Value repeated_int32_wrapper = 212;
+  repeated google.protobuf.Int64Value repeated_int64_wrapper = 213;
+  repeated google.protobuf.UInt32Value repeated_uint32_wrapper = 214;
+  repeated google.protobuf.UInt64Value repeated_uint64_wrapper = 215;
+  repeated google.protobuf.FloatValue repeated_float_wrapper = 216;
+  repeated google.protobuf.DoubleValue repeated_double_wrapper = 217;
+  repeated google.protobuf.StringValue repeated_string_wrapper = 218;
+  repeated google.protobuf.BytesValue repeated_bytes_wrapper = 219;
+
+  google.protobuf.Duration optional_duration = 301;
+  google.protobuf.Timestamp optional_timestamp = 302;
+  google.protobuf.FieldMask optional_field_mask = 303;
+  google.protobuf.Struct optional_struct = 304;
+  google.protobuf.Any optional_any = 305;
+  google.protobuf.Value optional_value = 306;
+
+  repeated google.protobuf.Duration repeated_duration = 311;
+  repeated google.protobuf.Timestamp repeated_timestamp = 312;
+  repeated google.protobuf.FieldMask repeated_fieldmask = 313;
+  repeated google.protobuf.Struct repeated_struct = 324;
+  repeated google.protobuf.Any repeated_any = 315;
+  repeated google.protobuf.Value repeated_value = 316;
+
+  // Test field-name-to-JSON-name convention.
+  int32 fieldname1 = 401;
+  int32 field_name2 = 402;
+  int32 _field_name3 = 403;
+  int32 field__name4_ = 404;
+  int32 field0name5 = 405;
+  int32 field_0_name6 = 406;
+  int32 fieldName7 = 407;
+  int32 FieldName8 = 408;
+  int32 field_Name9 = 409;
+  int32 Field_Name10 = 410;
+  int32 FIELD_NAME11 = 411;
+  int32 FIELD_name12 = 412;
 }
 
 message ForeignMessage {

+ 15 - 2
conformance/conformance_cpp.cc

@@ -108,7 +108,11 @@ void DoTest(const ConformanceRequest& request, ConformanceResponse* response) {
         return;
       }
 
-      GOOGLE_CHECK(test_message.ParseFromString(proto_binary));
+      if (!test_message.ParseFromString(proto_binary)) {
+        response->set_runtime_error(
+            "Parsing JSON generates invalid proto output.");
+        return;
+      }
       break;
     }
 
@@ -132,9 +136,18 @@ void DoTest(const ConformanceRequest& request, ConformanceResponse* response) {
       GOOGLE_CHECK(test_message.SerializeToString(&proto_binary));
       Status status = BinaryToJsonString(type_resolver, *type_url, proto_binary,
                                          response->mutable_json_payload());
-      GOOGLE_CHECK(status.ok());
+      if (!status.ok()) {
+        response->set_serialize_error(
+            string("Failed to serialize JSON output: ") +
+            status.error_message().as_string());
+        return;
+      }
       break;
     }
+
+    default:
+      GOOGLE_LOG(FATAL) << "Unknown output format: "
+                        << request.requested_output_format();
   }
 }
 

+ 179 - 0
conformance/conformance_objc.m

@@ -0,0 +1,179 @@
+// Protocol Buffers - Google's data interchange format
+// Copyright 2015 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#import <Foundation/Foundation.h>
+
+#import "Conformance.pbobjc.h"
+
+static void Die(NSString *format, ...) __dead2;
+
+static BOOL verbose = NO;
+static int32_t testCount = 0;
+
+static void Die(NSString *format, ...) {
+  va_list args;
+  va_start(args, format);
+  NSString *msg = [[NSString alloc] initWithFormat:format arguments:args];
+  NSLog(@"%@", msg);
+  va_end(args);
+  [msg release];
+  exit(66);
+}
+
+static NSData *CheckedReadDataOfLength(NSFileHandle *handle, NSUInteger numBytes) {
+  NSData *data = [handle readDataOfLength:numBytes];
+  NSUInteger dataLen = data.length;
+  if (dataLen == 0) {
+    return nil;  // EOF.
+  }
+  if (dataLen != numBytes) {
+    Die(@"Failed to read the request length (%d), only got: %@",
+        numBytes, data);
+  }
+  return data;
+}
+
+static ConformanceResponse *DoTest(ConformanceRequest *request) {
+  ConformanceResponse *response = [ConformanceResponse message];
+  TestAllTypes *testMessage = nil;
+
+  switch (request.payloadOneOfCase) {
+    case ConformanceRequest_Payload_OneOfCase_GPBUnsetOneOfCase:
+      Die(@"Request didn't have a payload: %@", request);
+      break;
+
+    case ConformanceRequest_Payload_OneOfCase_ProtobufPayload: {
+      NSError *error = nil;
+      testMessage = [TestAllTypes parseFromData:request.protobufPayload
+                                          error:&error];
+      if (!testMessage) {
+        response.parseError =
+            [NSString stringWithFormat:@"Parse error: %@", error];
+      }
+      break;
+    }
+
+    case ConformanceRequest_Payload_OneOfCase_JsonPayload:
+      response.skipped = @"ObjC doesn't support parsing JSON";
+      break;
+  }
+
+  if (testMessage) {
+    switch (request.requestedOutputFormat) {
+      case WireFormat_GPBUnrecognizedEnumeratorValue:
+      case WireFormat_Unspecified:
+        Die(@"Unrecognized/unspecified output format: %@", request);
+        break;
+
+      case WireFormat_Protobuf:
+        response.protobufPayload = testMessage.data;
+        if (!response.protobufPayload) {
+          response.serializeError =
+            [NSString stringWithFormat:@"Failed to make data from: %@", testMessage];
+        }
+        break;
+
+      case WireFormat_Json:
+        response.skipped = @"ObjC doesn't support generating JSON";
+        break;
+    }
+  }
+
+  return response;
+}
+
+static uint32_t UInt32FromLittleEndianData(NSData *data) {
+  if (data.length != sizeof(uint32_t)) {
+    Die(@"Data not the right size for uint32_t: %@", data);
+  }
+  uint32_t value;
+  memcpy(&value, data.bytes, sizeof(uint32_t));
+  return CFSwapInt32LittleToHost(value);
+}
+
+static NSData *UInt32ToLittleEndianData(uint32_t num) {
+  uint32_t value = CFSwapInt32HostToLittle(num);
+  return [NSData dataWithBytes:&value length:sizeof(uint32_t)];
+}
+
+static BOOL DoTestIo(NSFileHandle *input, NSFileHandle *output) {
+  // See conformance_test_runner.cc for the wire format.
+  NSData *data = CheckedReadDataOfLength(input, sizeof(uint32_t));
+  if (!data) {
+    // EOF.
+    return NO;
+  }
+  uint32_t numBytes = UInt32FromLittleEndianData(data);
+  data = CheckedReadDataOfLength(input, numBytes);
+  if (!data) {
+    Die(@"Failed to read request");
+  }
+
+  NSError *error = nil;
+  ConformanceRequest *request = [ConformanceRequest parseFromData:data
+                                                            error:&error];
+  if (!request) {
+    Die(@"Failed to parse the message data: %@", error);
+  }
+
+  ConformanceResponse *response = DoTest(request);
+  if (!response) {
+    Die(@"Failed to make a reply from %@", request);
+  }
+
+  data = response.data;
+  [output writeData:UInt32ToLittleEndianData((int32_t)data.length)];
+  [output writeData:data];
+
+  if (verbose) {
+    NSLog(@"Request: %@", request);
+    NSLog(@"Response: %@", response);
+  }
+
+  ++testCount;
+  return YES;
+}
+
+int main(int argc, const char *argv[]) {
+  @autoreleasepool {
+    NSFileHandle *input = [[NSFileHandle fileHandleWithStandardInput] retain];
+    NSFileHandle *output = [[NSFileHandle fileHandleWithStandardOutput] retain];
+
+    BOOL notDone = YES;
+    while (notDone) {
+      @autoreleasepool {
+        notDone = DoTestIo(input, output);
+      }
+    }
+
+    NSLog(@"Received EOF from test runner after %d tests, exiting.", testCount);
+  }
+  return 0;
+}

+ 130 - 0
conformance/conformance_python.py

@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# Protocol Buffers - Google's data interchange format
+# Copyright 2008 Google Inc.  All rights reserved.
+# https://developers.google.com/protocol-buffers/
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+#     * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""A conformance test implementation for the Python protobuf library.
+
+See conformance.proto for more information.
+"""
+
+import struct
+import sys
+import os
+from google.protobuf import message
+from google.protobuf import json_format
+import conformance_pb2
+
+sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0)
+sys.stdin = os.fdopen(sys.stdin.fileno(), 'rb', 0)
+
+test_count = 0
+verbose = False
+
+class ProtocolError(Exception):
+  pass
+
+def do_test(request):
+  test_message = conformance_pb2.TestAllTypes()
+  response = conformance_pb2.ConformanceResponse()
+  test_message = conformance_pb2.TestAllTypes()
+
+  try:
+    if request.WhichOneof('payload') == 'protobuf_payload':
+      try:
+        test_message.ParseFromString(request.protobuf_payload)
+      except message.DecodeError as e:
+        response.parse_error = str(e)
+        return response
+
+    elif request.WhichOneof('payload') == 'json_payload':
+      try:
+        json_format.Parse(request.json_payload, test_message)
+      except json_format.ParseError as e:
+        response.parse_error = str(e)
+        return response
+
+    else:
+      raise ProtocolError("Request didn't have payload.")
+
+    if request.requested_output_format == conformance_pb2.UNSPECIFIED:
+      raise ProtocolError("Unspecified output format")
+
+    elif request.requested_output_format == conformance_pb2.PROTOBUF:
+      response.protobuf_payload = test_message.SerializeToString()
+
+    elif request.requested_output_format == conformance_pb2.JSON:
+      response.json_payload = json_format.MessageToJson(test_message)
+
+  except Exception as e:
+    response.runtime_error = str(e)
+
+  return response
+
+def do_test_io():
+  length_bytes = sys.stdin.read(4)
+  if len(length_bytes) == 0:
+    return False   # EOF
+  elif len(length_bytes) != 4:
+    raise IOError("I/O error")
+
+  # "I" is "unsigned int", so this depends on running on a platform with
+  # 32-bit "unsigned int" type.  The Python struct module unfortunately
+  # has no format specifier for uint32_t.
+  length = struct.unpack("<I", length_bytes)[0]
+  serialized_request = sys.stdin.read(length)
+  if len(serialized_request) != length:
+    raise IOError("I/O error")
+
+  request = conformance_pb2.ConformanceRequest()
+  request.ParseFromString(serialized_request)
+
+  response = do_test(request)
+
+  serialized_response = response.SerializeToString()
+  sys.stdout.write(struct.pack("<I", len(serialized_response)))
+  sys.stdout.write(serialized_response)
+  sys.stdout.flush()
+
+  if verbose:
+    sys.stderr.write("conformance_python: request=%s, response=%s\n" % (
+                       request.ShortDebugString().c_str(),
+                       response.ShortDebugString().c_str()))
+
+  global test_count
+  test_count += 1
+
+  return True
+
+while True:
+  if not do_test_io():
+    sys.stderr.write("conformance_python: received EOF from test runner " +
+                     "after %s tests, exiting\n" % (test_count))
+    sys.exit(0)

+ 12 - 4
conformance/conformance_ruby.rb

@@ -51,7 +51,12 @@ def do_test(request)
       end
 
     when :json_payload
-      test_message = Conformance::TestAllTypes.decode_json(request.json_payload)
+      begin
+        test_message = Conformance::TestAllTypes.decode_json(request.json_payload)
+      rescue Google::Protobuf::ParseError => err
+        response.parse_error = err.message.encode('utf-8')
+        return response
+      end
 
     when nil
       fail "Request didn't have payload"
@@ -66,6 +71,9 @@ def do_test(request)
 
     when :JSON
       response.json_payload = test_message.to_json
+
+    when nil
+      fail "Request didn't have requested output format"
     end
   rescue StandardError => err
     response.runtime_error = err.message.encode('utf-8')
@@ -96,8 +104,8 @@ def do_test_io
   STDOUT.flush
 
   if $verbose
-    STDERR.puts("conformance-cpp: request={request.to_json}, " \
-                                 "response={response.to_json}\n")
+    STDERR.puts("conformance_ruby: request=#{request.to_json}, " \
+                                 "response=#{response.to_json}\n")
   end
 
   $test_count += 1
@@ -107,7 +115,7 @@ end
 
 loop do
   unless do_test_io
-    STDERR.puts('conformance-cpp: received EOF from test runner ' \
+    STDERR.puts('conformance_ruby: received EOF from test runner ' \
                 "after #{$test_count} tests, exiting")
     break
   end

+ 1495 - 23
conformance/conformance_test.cc

@@ -37,10 +37,13 @@
 #include <google/protobuf/stubs/stringprintf.h>
 #include <google/protobuf/text_format.h>
 #include <google/protobuf/util/json_util.h>
+#include <google/protobuf/util/field_comparator.h>
 #include <google/protobuf/util/message_differencer.h>
 #include <google/protobuf/util/type_resolver_util.h>
 #include <google/protobuf/wire_format_lite.h>
 
+#include "third_party/jsoncpp/json.h"
+
 using conformance::ConformanceRequest;
 using conformance::ConformanceResponse;
 using conformance::TestAllTypes;
@@ -49,6 +52,7 @@ using google::protobuf::Descriptor;
 using google::protobuf::FieldDescriptor;
 using google::protobuf::internal::WireFormatLite;
 using google::protobuf::TextFormat;
+using google::protobuf::util::DefaultFieldComparator;
 using google::protobuf::util::JsonToBinaryString;
 using google::protobuf::util::MessageDifferencer;
 using google::protobuf::util::NewTypeResolverForDescriptorPool;
@@ -220,7 +224,7 @@ void ConformanceTestSuite::RunTest(const string& test_name,
   string serialized_response;
   request.SerializeToString(&serialized_request);
 
-  runner_->RunTest(serialized_request, &serialized_response);
+  runner_->RunTest(test_name, serialized_request, &serialized_response);
 
   if (!response->ParseFromString(serialized_response)) {
     response->Clear();
@@ -240,7 +244,9 @@ void ConformanceTestSuite::RunValidInputTest(
     const string& equivalent_text_format, WireFormat requested_output) {
   TestAllTypes reference_message;
   GOOGLE_CHECK(
-      TextFormat::ParseFromString(equivalent_text_format, &reference_message));
+      TextFormat::ParseFromString(equivalent_text_format, &reference_message))
+          << "Failed to parse data for test case: " << test_name
+          << ", data: " << equivalent_text_format;
 
   ConformanceRequest request;
   ConformanceResponse response;
@@ -254,9 +260,8 @@ void ConformanceTestSuite::RunValidInputTest(
       request.set_json_payload(input);
       break;
 
-    case conformance::UNSPECIFIED:
+    default:
       GOOGLE_LOG(FATAL) << "Unspecified input format";
-
   }
 
   request.set_requested_output_format(requested_output);
@@ -268,8 +273,9 @@ void ConformanceTestSuite::RunValidInputTest(
   switch (response.result_case()) {
     case ConformanceResponse::kParseError:
     case ConformanceResponse::kRuntimeError:
+    case ConformanceResponse::kSerializeError:
       ReportFailure(test_name, request, response,
-                    "Failed to parse valid JSON input.");
+                    "Failed to parse JSON input or produce JSON output.");
       return;
 
     case ConformanceResponse::kSkipped:
@@ -293,7 +299,13 @@ void ConformanceTestSuite::RunValidInputTest(
         return;
       }
 
-      GOOGLE_CHECK(test_message.ParseFromString(binary_protobuf));
+      if (!test_message.ParseFromString(binary_protobuf)) {
+        ReportFailure(test_name, request, response,
+                      "INTERNAL ERROR: internal JSON->protobuf transcode "
+                      "yielded unparseable proto.");
+        return;
+      }
+
       break;
     }
 
@@ -313,13 +325,20 @@ void ConformanceTestSuite::RunValidInputTest(
 
       break;
     }
+
+    default:
+      GOOGLE_LOG(FATAL) << test_name << ": unknown payload type: "
+                        << response.result_case();
   }
 
   MessageDifferencer differencer;
+  DefaultFieldComparator field_comparator;
+  field_comparator.set_treat_nan_as_equal(true);
+  differencer.set_field_comparator(&field_comparator);
   string differences;
   differencer.ReportDifferencesToString(&differences);
 
-  if (differencer.Equals(reference_message, test_message)) {
+  if (differencer.Compare(reference_message, test_message)) {
     ReportSuccess(test_name);
   } else {
     ReportFailure(test_name, request, response,
@@ -343,6 +362,8 @@ void ConformanceTestSuite::ExpectParseFailureForProto(
   RunTest(effective_test_name, request, &response);
   if (response.result_case() == ConformanceResponse::kParseError) {
     ReportSuccess(effective_test_name);
+  } else if (response.result_case() == ConformanceResponse::kSkipped) {
+    ReportSkip(effective_test_name, request, response);
   } else {
     ReportFailure(effective_test_name, request, response,
                   "Should have failed to parse, but didn't.");
@@ -362,13 +383,112 @@ void ConformanceTestSuite::ExpectHardParseFailureForProto(
 void ConformanceTestSuite::RunValidJsonTest(
     const string& test_name, const string& input_json,
     const string& equivalent_text_format) {
-  RunValidInputTest("JsonInput." + test_name + ".JsonOutput", input_json,
+  RunValidInputTest("JsonInput." + test_name + ".ProtobufOutput", input_json,
                     conformance::JSON, equivalent_text_format,
                     conformance::PROTOBUF);
-  RunValidInputTest("JsonInput." + test_name + ".ProtobufOutput", input_json, conformance::JSON,
+  RunValidInputTest("JsonInput." + test_name + ".JsonOutput", input_json,
+                    conformance::JSON, equivalent_text_format,
+                    conformance::JSON);
+}
+
+void ConformanceTestSuite::RunValidJsonTestWithProtobufInput(
+    const string& test_name, const TestAllTypes& input,
+    const string& equivalent_text_format) {
+  RunValidInputTest("ProtobufInput." + test_name + ".JsonOutput",
+                    input.SerializeAsString(), conformance::PROTOBUF,
                     equivalent_text_format, conformance::JSON);
 }
 
+// According to proto3 JSON specification, JSON serializers follow more strict
+// rules than parsers (e.g., a serializer must serialize int32 values as JSON
+// numbers while the parser is allowed to accept them as JSON strings). This
+// method allows strict checking on a proto3 JSON serializer by inspecting
+// the JSON output directly.
+void ConformanceTestSuite::RunValidJsonTestWithValidator(
+    const string& test_name, const string& input_json,
+    const Validator& validator) {
+  ConformanceRequest request;
+  ConformanceResponse response;
+  request.set_json_payload(input_json);
+  request.set_requested_output_format(conformance::JSON);
+
+  string effective_test_name = "JsonInput." + test_name + ".Validator";
+
+  RunTest(effective_test_name, request, &response);
+
+  if (response.result_case() == ConformanceResponse::kSkipped) {
+    ReportSkip(effective_test_name, request, response);
+    return;
+  }
+
+  if (response.result_case() != ConformanceResponse::kJsonPayload) {
+    ReportFailure(effective_test_name, request, response,
+                  "Expected JSON payload but got type %d.",
+                  response.result_case());
+    return;
+  }
+  Json::Reader reader;
+  Json::Value value;
+  if (!reader.parse(response.json_payload(), value)) {
+    ReportFailure(effective_test_name, request, response,
+                  "JSON payload cannot be parsed as valid JSON: %s",
+                  reader.getFormattedErrorMessages().c_str());
+    return;
+  }
+  if (!validator(value)) {
+    ReportFailure(effective_test_name, request, response,
+                  "JSON payload validation failed.");
+    return;
+  }
+  ReportSuccess(effective_test_name);
+}
+
+void ConformanceTestSuite::ExpectParseFailureForJson(
+    const string& test_name, const string& input_json) {
+  ConformanceRequest request;
+  ConformanceResponse response;
+  request.set_json_payload(input_json);
+  string effective_test_name = "JsonInput." + test_name;
+
+  // We don't expect output, but if the program erroneously accepts the protobuf
+  // we let it send its response as this.  We must not leave it unspecified.
+  request.set_requested_output_format(conformance::JSON);
+
+  RunTest(effective_test_name, request, &response);
+  if (response.result_case() == ConformanceResponse::kParseError) {
+    ReportSuccess(effective_test_name);
+  } else if (response.result_case() == ConformanceResponse::kSkipped) {
+    ReportSkip(effective_test_name, request, response);
+  } else {
+    ReportFailure(effective_test_name, request, response,
+                  "Should have failed to parse, but didn't.");
+  }
+}
+
+void ConformanceTestSuite::ExpectSerializeFailureForJson(
+    const string& test_name, const string& text_format) {
+  TestAllTypes payload_message;
+  GOOGLE_CHECK(
+      TextFormat::ParseFromString(text_format, &payload_message))
+          << "Failed to parse: " << text_format;
+
+  ConformanceRequest request;
+  ConformanceResponse response;
+  request.set_protobuf_payload(payload_message.SerializeAsString());
+  string effective_test_name = test_name + ".JsonOutput";
+  request.set_requested_output_format(conformance::JSON);
+
+  RunTest(effective_test_name, request, &response);
+  if (response.result_case() == ConformanceResponse::kSerializeError) {
+    ReportSuccess(effective_test_name);
+  } else if (response.result_case() == ConformanceResponse::kSkipped) {
+    ReportSkip(effective_test_name, request, response);
+  } else {
+    ReportFailure(effective_test_name, request, response,
+                  "Should have failed to serialize, but didn't.");
+  }
+}
+
 void ConformanceTestSuite::TestPrematureEOFForType(FieldDescriptor::Type type) {
   // Incomplete values for each wire type.
   static const string incompletes[6] = {
@@ -500,23 +620,1375 @@ bool ConformanceTestSuite::RunSuite(ConformanceTestRunner* runner,
   RunValidJsonTest("HelloWorld", "{\"optionalString\":\"Hello, World!\"}",
                    "optional_string: 'Hello, World!'");
 
-  bool ok =
-      CheckSetEmpty(expected_to_fail_,
-                    "These tests were listed in the failure list, but they "
-                    "don't exist.  Remove them from the failure list") &&
+  // Test field name conventions.
+  RunValidJsonTest(
+      "FieldNameInSnakeCase",
+      R"({
+        "fieldname1": 1,
+        "fieldName2": 2,
+        "FieldName3": 3
+      })",
+      R"(
+        fieldname1: 1
+        field_name2: 2
+        _field_name3: 3
+      )");
+  RunValidJsonTest(
+      "FieldNameWithNumbers",
+      R"({
+        "field0name5": 5,
+        "field0Name6": 6
+      })",
+      R"(
+        field0name5: 5
+        field_0_name6: 6
+      )");
+  RunValidJsonTest(
+      "FieldNameWithMixedCases",
+      R"({
+        "fieldName7": 7,
+        "fieldName8": 8,
+        "fieldName9": 9,
+        "fieldName10": 10,
+        "fIELDNAME11": 11,
+        "fIELDName12": 12
+      })",
+      R"(
+        fieldName7: 7
+        FieldName8: 8
+        field_Name9: 9
+        Field_Name10: 10
+        FIELD_NAME11: 11
+        FIELD_name12: 12
+      )");
+  // Using the original proto field name in JSON is also allowed.
+  RunValidJsonTest(
+      "OriginalProtoFieldName",
+      R"({
+        "fieldname1": 1,
+        "field_name2": 2,
+        "_field_name3": 3,
+        "field0name5": 5,
+        "field_0_name6": 6,
+        "fieldName7": 7,
+        "FieldName8": 8,
+        "field_Name9": 9,
+        "Field_Name10": 10,
+        "FIELD_NAME11": 11,
+        "FIELD_name12": 12
+      })",
+      R"(
+        fieldname1: 1
+        field_name2: 2
+        _field_name3: 3
+        field0name5: 5
+        field_0_name6: 6
+        fieldName7: 7
+        FieldName8: 8
+        field_Name9: 9
+        Field_Name10: 10
+        FIELD_NAME11: 11
+        FIELD_name12: 12
+      )");
+  // Field names can be escaped.
+  RunValidJsonTest(
+      "FieldNameEscaped",
+      R"({"fieldn\u0061me1": 1})",
+      "fieldname1: 1");
+  // Field names must be quoted (or it's not valid JSON).
+  ExpectParseFailureForJson(
+      "FieldNameNotQuoted",
+      "{fieldname1: 1}");
+  // Trailing comma is not allowed (not valid JSON).
+  ExpectParseFailureForJson(
+      "TrailingCommaInAnObject",
+      R"({"fieldname1":1,})");
+  // JSON doesn't support comments.
+  ExpectParseFailureForJson(
+      "JsonWithComments",
+      R"({
+        // This is a comment.
+        "fieldname1": 1
+      })");
+  // Duplicated field names are not allowed.
+  ExpectParseFailureForJson(
+      "FieldNameDuplicate",
+      R"({
+        "optionalNestedMessage": {a: 1},
+        "optionalNestedMessage": {}
+      })");
+  ExpectParseFailureForJson(
+      "FieldNameDuplicateDifferentCasing1",
+      R"({
+        "optional_nested_message": {a: 1},
+        "optionalNestedMessage": {}
+      })");
+  ExpectParseFailureForJson(
+      "FieldNameDuplicateDifferentCasing2",
+      R"({
+        "optionalNestedMessage": {a: 1},
+        "optional_nested_message": {}
+      })");
+  // Serializers should use lowerCamelCase by default.
+  RunValidJsonTestWithValidator(
+      "FieldNameInLowerCamelCase",
+      R"({
+        "fieldname1": 1,
+        "fieldName2": 2,
+        "FieldName3": 3
+      })",
+      [](const Json::Value& value) {
+        return value.isMember("fieldname1") &&
+            value.isMember("fieldName2") &&
+            value.isMember("FieldName3");
+      });
+  RunValidJsonTestWithValidator(
+      "FieldNameWithNumbers",
+      R"({
+        "field0name5": 5,
+        "field0Name6": 6
+      })",
+      [](const Json::Value& value) {
+        return value.isMember("field0name5") &&
+            value.isMember("field0Name6");
+      });
+  RunValidJsonTestWithValidator(
+      "FieldNameWithMixedCases",
+      R"({
+        "fieldName7": 7,
+        "fieldName8": 8,
+        "fieldName9": 9,
+        "fieldName10": 10,
+        "fIELDNAME11": 11,
+        "fIELDName12": 12
+      })",
+      [](const Json::Value& value) {
+        return value.isMember("fieldName7") &&
+            value.isMember("fieldName8") &&
+            value.isMember("fieldName9") &&
+            value.isMember("fieldName10") &&
+            value.isMember("fIELDNAME11") &&
+            value.isMember("fIELDName12");
+      });
+
+  // Integer fields.
+  RunValidJsonTest(
+      "Int32FieldMaxValue",
+      R"({"optionalInt32": 2147483647})",
+      "optional_int32: 2147483647");
+  RunValidJsonTest(
+      "Int32FieldMinValue",
+      R"({"optionalInt32": -2147483648})",
+      "optional_int32: -2147483648");
+  RunValidJsonTest(
+      "Uint32FieldMaxValue",
+      R"({"optionalUint32": 4294967295})",
+      "optional_uint32: 4294967295");
+  RunValidJsonTest(
+      "Int64FieldMaxValue",
+      R"({"optionalInt64": "9223372036854775807"})",
+      "optional_int64: 9223372036854775807");
+  RunValidJsonTest(
+      "Int64FieldMinValue",
+      R"({"optionalInt64": "-9223372036854775808"})",
+      "optional_int64: -9223372036854775808");
+  RunValidJsonTest(
+      "Uint64FieldMaxValue",
+      R"({"optionalUint64": "18446744073709551615"})",
+      "optional_uint64: 18446744073709551615");
+  RunValidJsonTest(
+      "Int64FieldMaxValueNotQuoted",
+      R"({"optionalInt64": 9223372036854775807})",
+      "optional_int64: 9223372036854775807");
+  RunValidJsonTest(
+      "Int64FieldMinValueNotQuoted",
+      R"({"optionalInt64": -9223372036854775808})",
+      "optional_int64: -9223372036854775808");
+  RunValidJsonTest(
+      "Uint64FieldMaxValueNotQuoted",
+      R"({"optionalUint64": 18446744073709551615})",
+      "optional_uint64: 18446744073709551615");
+  // Values can be represented as JSON strings.
+  RunValidJsonTest(
+      "Int32FieldStringValue",
+      R"({"optionalInt32": "2147483647"})",
+      "optional_int32: 2147483647");
+  RunValidJsonTest(
+      "Int32FieldStringValueEscaped",
+      R"({"optionalInt32": "2\u003147483647"})",
+      "optional_int32: 2147483647");
+
+  // Parsers reject out-of-bound integer values.
+  ExpectParseFailureForJson(
+      "Int32FieldTooLarge",
+      R"({"optionalInt32": 2147483648})");
+  ExpectParseFailureForJson(
+      "Int32FieldTooSmall",
+      R"({"optionalInt32": -2147483649})");
+  ExpectParseFailureForJson(
+      "Uint32FieldTooLarge",
+      R"({"optionalUint32": 4294967296})");
+  ExpectParseFailureForJson(
+      "Int64FieldTooLarge",
+      R"({"optionalInt64": "9223372036854775808"})");
+  ExpectParseFailureForJson(
+      "Int64FieldTooSmall",
+      R"({"optionalInt64": "-9223372036854775809"})");
+  ExpectParseFailureForJson(
+      "Uint64FieldTooLarge",
+      R"({"optionalUint64": "18446744073709551616"})");
+  // Parser reject non-integer numeric values as well.
+  ExpectParseFailureForJson(
+      "Int32FieldNotInteger",
+      R"({"optionalInt32": 0.5})");
+  ExpectParseFailureForJson(
+      "Uint32FieldNotInteger",
+      R"({"optionalUint32": 0.5})");
+  ExpectParseFailureForJson(
+      "Int64FieldNotInteger",
+      R"({"optionalInt64": "0.5"})");
+  ExpectParseFailureForJson(
+      "Uint64FieldNotInteger",
+      R"({"optionalUint64": "0.5"})");
+
+  // Integers but represented as float values are accepted.
+  RunValidJsonTest(
+      "Int32FieldFloatTrailingZero",
+      R"({"optionalInt32": 100000.000})",
+      "optional_int32: 100000");
+  RunValidJsonTest(
+      "Int32FieldExponentialFormat",
+      R"({"optionalInt32": 1e5})",
+      "optional_int32: 100000");
+  RunValidJsonTest(
+      "Int32FieldMaxFloatValue",
+      R"({"optionalInt32": 2.147483647e9})",
+      "optional_int32: 2147483647");
+  RunValidJsonTest(
+      "Int32FieldMinFloatValue",
+      R"({"optionalInt32": -2.147483648e9})",
+      "optional_int32: -2147483648");
+  RunValidJsonTest(
+      "Uint32FieldMaxFloatValue",
+      R"({"optionalUint32": 4.294967295e9})",
+      "optional_uint32: 4294967295");
+
+  // Parser reject non-numeric values.
+  ExpectParseFailureForJson(
+      "Int32FieldNotNumber",
+      R"({"optionalInt32": "3x3"})");
+  ExpectParseFailureForJson(
+      "Uint32FieldNotNumber",
+      R"({"optionalUint32": "3x3"})");
+  ExpectParseFailureForJson(
+      "Int64FieldNotNumber",
+      R"({"optionalInt64": "3x3"})");
+  ExpectParseFailureForJson(
+      "Uint64FieldNotNumber",
+      R"({"optionalUint64": "3x3"})");
+  // JSON does not allow "+" on numric values.
+  ExpectParseFailureForJson(
+      "Int32FieldPlusSign",
+      R"({"optionalInt32": +1})");
+  // JSON doesn't allow leading 0s.
+  ExpectParseFailureForJson(
+      "Int32FieldLeadingZero",
+      R"({"optionalInt32": 01})");
+  ExpectParseFailureForJson(
+      "Int32FieldNegativeWithLeadingZero",
+      R"({"optionalInt32": -01})");
+  // String values must follow the same syntax rule. Specifically leading
+  // or traling spaces are not allowed.
+  ExpectParseFailureForJson(
+      "Int32FieldLeadingSpace",
+      R"({"optionalInt32": " 1"})");
+  ExpectParseFailureForJson(
+      "Int32FieldTrailingSpace",
+      R"({"optionalInt32": "1 "})");
+
+  // 64-bit values are serialized as strings.
+  RunValidJsonTestWithValidator(
+      "Int64FieldBeString",
+      R"({"optionalInt64": 1})",
+      [](const Json::Value& value) {
+        return value["optionalInt64"].type() == Json::stringValue &&
+            value["optionalInt64"].asString() == "1";
+      });
+  RunValidJsonTestWithValidator(
+      "Uint64FieldBeString",
+      R"({"optionalUint64": 1})",
+      [](const Json::Value& value) {
+        return value["optionalUint64"].type() == Json::stringValue &&
+            value["optionalUint64"].asString() == "1";
+      });
+
+  // Bool fields.
+  RunValidJsonTest(
+      "BoolFieldTrue",
+      R"({"optionalBool":true})",
+      "optional_bool: true");
+  RunValidJsonTest(
+      "BoolFieldFalse",
+      R"({"optionalBool":false})",
+      "optional_bool: false");
+
+  // Other forms are not allowed.
+  ExpectParseFailureForJson(
+      "BoolFieldIntegerZero",
+      R"({"optionalBool":0})");
+  ExpectParseFailureForJson(
+      "BoolFieldIntegerOne",
+      R"({"optionalBool":1})");
+  ExpectParseFailureForJson(
+      "BoolFieldCamelCaseTrue",
+      R"({"optionalBool":True})");
+  ExpectParseFailureForJson(
+      "BoolFieldCamelCaseFalse",
+      R"({"optionalBool":False})");
+  ExpectParseFailureForJson(
+      "BoolFieldAllCapitalTrue",
+      R"({"optionalBool":TRUE})");
+  ExpectParseFailureForJson(
+      "BoolFieldAllCapitalFalse",
+      R"({"optionalBool":FALSE})");
+  ExpectParseFailureForJson(
+      "BoolFieldDoubleQuotedTrue",
+      R"({"optionalBool":"true"})");
+  ExpectParseFailureForJson(
+      "BoolFieldDoubleQuotedFalse",
+      R"({"optionalBool":"false"})");
+
+  // Float fields.
+  RunValidJsonTest(
+      "FloatFieldMinPositiveValue",
+      R"({"optionalFloat": 1.175494e-38})",
+      "optional_float: 1.175494e-38");
+  RunValidJsonTest(
+      "FloatFieldMaxNegativeValue",
+      R"({"optionalFloat": -1.175494e-38})",
+      "optional_float: -1.175494e-38");
+  RunValidJsonTest(
+      "FloatFieldMaxPositiveValue",
+      R"({"optionalFloat": 3.402823e+38})",
+      "optional_float: 3.402823e+38");
+  RunValidJsonTest(
+      "FloatFieldMinNegativeValue",
+      R"({"optionalFloat": 3.402823e+38})",
+      "optional_float: 3.402823e+38");
+  // Values can be quoted.
+  RunValidJsonTest(
+      "FloatFieldQuotedValue",
+      R"({"optionalFloat": "1"})",
+      "optional_float: 1");
+  // Special values.
+  RunValidJsonTest(
+      "FloatFieldNan",
+      R"({"optionalFloat": "NaN"})",
+      "optional_float: nan");
+  RunValidJsonTest(
+      "FloatFieldInfinity",
+      R"({"optionalFloat": "Infinity"})",
+      "optional_float: inf");
+  RunValidJsonTest(
+      "FloatFieldNegativeInfinity",
+      R"({"optionalFloat": "-Infinity"})",
+      "optional_float: -inf");
+  // Non-cannonical Nan will be correctly normalized.
+  {
+    TestAllTypes message;
+    // IEEE floating-point standard 32-bit quiet NaN:
+    //   0111 1111 1xxx xxxx xxxx xxxx xxxx xxxx
+    message.set_optional_float(
+        WireFormatLite::DecodeFloat(0x7FA12345));
+    RunValidJsonTestWithProtobufInput(
+        "FloatFieldNormalizeQuietNan", message,
+        "optional_float: nan");
+    // IEEE floating-point standard 64-bit signaling NaN:
+    //   1111 1111 1xxx xxxx xxxx xxxx xxxx xxxx
+    message.set_optional_float(
+        WireFormatLite::DecodeFloat(0xFFB54321));
+    RunValidJsonTestWithProtobufInput(
+        "FloatFieldNormalizeSignalingNan", message,
+        "optional_float: nan");
+  }
+
+  // Special values must be quoted.
+  ExpectParseFailureForJson(
+      "FloatFieldNanNotQuoted",
+      R"({"optionalFloat": NaN})");
+  ExpectParseFailureForJson(
+      "FloatFieldInfinityNotQuoted",
+      R"({"optionalFloat": Infinity})");
+  ExpectParseFailureForJson(
+      "FloatFieldNegativeInfinityNotQuoted",
+      R"({"optionalFloat": -Infinity})");
+  // Parsers should reject out-of-bound values.
+  ExpectParseFailureForJson(
+      "FloatFieldTooSmall",
+      R"({"optionalFloat": -3.502823e+38})");
+  ExpectParseFailureForJson(
+      "FloatFieldTooLarge",
+      R"({"optionalFloat": 3.502823e+38})");
+
+  // Double fields.
+  RunValidJsonTest(
+      "DoubleFieldMinPositiveValue",
+      R"({"optionalDouble": 2.22507e-308})",
+      "optional_double: 2.22507e-308");
+  RunValidJsonTest(
+      "DoubleFieldMaxNegativeValue",
+      R"({"optionalDouble": -2.22507e-308})",
+      "optional_double: -2.22507e-308");
+  RunValidJsonTest(
+      "DoubleFieldMaxPositiveValue",
+      R"({"optionalDouble": 1.79769e+308})",
+      "optional_double: 1.79769e+308");
+  RunValidJsonTest(
+      "DoubleFieldMinNegativeValue",
+      R"({"optionalDouble": -1.79769e+308})",
+      "optional_double: -1.79769e+308");
+  // Values can be quoted.
+  RunValidJsonTest(
+      "DoubleFieldQuotedValue",
+      R"({"optionalDouble": "1"})",
+      "optional_double: 1");
+  // Speical values.
+  RunValidJsonTest(
+      "DoubleFieldNan",
+      R"({"optionalDouble": "NaN"})",
+      "optional_double: nan");
+  RunValidJsonTest(
+      "DoubleFieldInfinity",
+      R"({"optionalDouble": "Infinity"})",
+      "optional_double: inf");
+  RunValidJsonTest(
+      "DoubleFieldNegativeInfinity",
+      R"({"optionalDouble": "-Infinity"})",
+      "optional_double: -inf");
+  // Non-cannonical Nan will be correctly normalized.
+  {
+    TestAllTypes message;
+    message.set_optional_double(
+        WireFormatLite::DecodeDouble(0x7FFA123456789ABCLL));
+    RunValidJsonTestWithProtobufInput(
+        "DoubleFieldNormalizeQuietNan", message,
+        "optional_double: nan");
+    message.set_optional_double(
+        WireFormatLite::DecodeDouble(0xFFFBCBA987654321LL));
+    RunValidJsonTestWithProtobufInput(
+        "DoubleFieldNormalizeSignalingNan", message,
+        "optional_double: nan");
+  }
 
-      CheckSetEmpty(unexpected_failing_tests_,
-                    "These tests failed.  If they can't be fixed right now, "
-                    "you can add them to the failure list so the overall "
-                    "suite can succeed") &&
+  // Special values must be quoted.
+  ExpectParseFailureForJson(
+      "DoubleFieldNanNotQuoted",
+      R"({"optionalDouble": NaN})");
+  ExpectParseFailureForJson(
+      "DoubleFieldInfinityNotQuoted",
+      R"({"optionalDouble": Infinity})");
+  ExpectParseFailureForJson(
+      "DoubleFieldNegativeInfinityNotQuoted",
+      R"({"optionalDouble": -Infinity})");
+
+  // Parsers should reject out-of-bound values.
+  ExpectParseFailureForJson(
+      "DoubleFieldTooSmall",
+      R"({"optionalDouble": -1.89769e+308})");
+  ExpectParseFailureForJson(
+      "DoubleFieldTooLarge",
+      R"({"optionalDouble": +1.89769e+308})");
+
+  // Enum fields.
+  RunValidJsonTest(
+      "EnumField",
+      R"({"optionalNestedEnum": "FOO"})",
+      "optional_nested_enum: FOO");
+  // Enum values must be represented as strings.
+  ExpectParseFailureForJson(
+      "EnumFieldNotQuoted",
+      R"({"optionalNestedEnum": FOO})");
+  // Numeric values are allowed.
+  RunValidJsonTest(
+      "EnumFieldNumericValueZero",
+      R"({"optionalNestedEnum": 0})",
+      "optional_nested_enum: FOO");
+  RunValidJsonTest(
+      "EnumFieldNumericValueNonZero",
+      R"({"optionalNestedEnum": 1})",
+      "optional_nested_enum: BAR");
+  // Unknown enum values are represented as numeric values.
+  RunValidJsonTestWithValidator(
+      "EnumFieldUnknownValue",
+      R"({"optionalNestedEnum": 123})",
+      [](const Json::Value& value) {
+        return value["optionalNestedEnum"].type() == Json::intValue &&
+            value["optionalNestedEnum"].asInt() == 123;
+      });
+
+  // String fields.
+  RunValidJsonTest(
+      "StringField",
+      R"({"optionalString": "Hello world!"})",
+      "optional_string: \"Hello world!\"");
+  RunValidJsonTest(
+      "StringFieldUnicode",
+      // Google in Chinese.
+      R"({"optionalString": "谷歌"})",
+      R"(optional_string: "谷歌")");
+  RunValidJsonTest(
+      "StringFieldEscape",
+      R"({"optionalString": "\"\\\/\b\f\n\r\t"})",
+      R"(optional_string: "\"\\/\b\f\n\r\t")");
+  RunValidJsonTest(
+      "StringFieldUnicodeEscape",
+      R"({"optionalString": "\u8C37\u6B4C"})",
+      R"(optional_string: "谷歌")");
+  RunValidJsonTest(
+      "StringFieldUnicodeEscapeWithLowercaseHexLetters",
+      R"({"optionalString": "\u8c37\u6b4c"})",
+      R"(optional_string: "谷歌")");
+  RunValidJsonTest(
+      "StringFieldSurrogatePair",
+      // The character is an emoji: grinning face with smiling eyes. 😁
+      R"({"optionalString": "\uD83D\uDE01"})",
+      R"(optional_string: "\xF0\x9F\x98\x81")");
+
+  // Unicode escapes must start with "\u" (lowercase u).
+  ExpectParseFailureForJson(
+      "StringFieldUppercaseEscapeLetter",
+      R"({"optionalString": "\U8C37\U6b4C"})");
+  ExpectParseFailureForJson(
+      "StringFieldInvalidEscape",
+      R"({"optionalString": "\uXXXX\u6B4C"})");
+  ExpectParseFailureForJson(
+      "StringFieldUnterminatedEscape",
+      R"({"optionalString": "\u8C3"})");
+  ExpectParseFailureForJson(
+      "StringFieldUnpairedHighSurrogate",
+      R"({"optionalString": "\uD800"})");
+  ExpectParseFailureForJson(
+      "StringFieldUnpairedLowSurrogate",
+      R"({"optionalString": "\uDC00"})");
+  ExpectParseFailureForJson(
+      "StringFieldSurrogateInWrongOrder",
+      R"({"optionalString": "\uDE01\uD83D"})");
+  ExpectParseFailureForJson(
+      "StringFieldNotAString",
+      R"({"optionalString": 12345})");
+
+  // Bytes fields.
+  RunValidJsonTest(
+      "BytesField",
+      R"({"optionalBytes": "AQI="})",
+      R"(optional_bytes: "\x01\x02")");
+  ExpectParseFailureForJson(
+      "BytesFieldNoPadding",
+      R"({"optionalBytes": "AQI"})");
+  ExpectParseFailureForJson(
+      "BytesFieldInvalidBase64Characters",
+      R"({"optionalBytes": "-_=="})");
+
+  // Message fields.
+  RunValidJsonTest(
+      "MessageField",
+      R"({"optionalNestedMessage": {"a": 1234}})",
+      "optional_nested_message: {a: 1234}");
+
+  // Oneof fields.
+  ExpectParseFailureForJson(
+      "OneofFieldDuplicate",
+      R"({"oneofUint32": 1, "oneofString": "test"})");
+
+  // Repeated fields.
+  RunValidJsonTest(
+      "PrimitiveRepeatedField",
+      R"({"repeatedInt32": [1, 2, 3, 4]})",
+      "repeated_int32: [1, 2, 3, 4]");
+  RunValidJsonTest(
+      "EnumRepeatedField",
+      R"({"repeatedNestedEnum": ["FOO", "BAR", "BAZ"]})",
+      "repeated_nested_enum: [FOO, BAR, BAZ]");
+  RunValidJsonTest(
+      "StringRepeatedField",
+      R"({"repeatedString": ["Hello", "world"]})",
+      R"(repeated_string: ["Hello", "world"])");
+  RunValidJsonTest(
+      "BytesRepeatedField",
+      R"({"repeatedBytes": ["AAEC", "AQI="]})",
+      R"(repeated_bytes: ["\x00\x01\x02", "\x01\x02"])");
+  RunValidJsonTest(
+      "MessageRepeatedField",
+      R"({"repeatedNestedMessage": [{"a": 1234}, {"a": 5678}]})",
+      "repeated_nested_message: {a: 1234}"
+      "repeated_nested_message: {a: 5678}");
+
+  // Repeated field elements are of incorrect type.
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingIntegersGotBool",
+      R"({"repeatedInt32": [1, false, 3, 4]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingIntegersGotString",
+      R"({"repeatedInt32": [1, 2, "name", 4]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingIntegersGotMessage",
+      R"({"repeatedInt32": [1, 2, 3, {"a": 4}]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingStringsGotInt",
+      R"({"repeatedString": ["1", 2, "3", "4"]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingStringsGotBool",
+      R"({"repeatedString": ["1", "2", false, "4"]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingStringsGotMessage",
+      R"({"repeatedString": ["1", 2, "3", {"a": 4}]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingMessagesGotInt",
+      R"({"repeatedNestedMessage": [{"a": 1}, 2]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingMessagesGotBool",
+      R"({"repeatedNestedMessage": [{"a": 1}, false]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldWrongElementTypeExpectingMessagesGotString",
+      R"({"repeatedNestedMessage": [{"a": 1}, "2"]})");
+  // Trailing comma in the repeated field is not allowed.
+  ExpectParseFailureForJson(
+      "RepeatedFieldTrailingComma",
+      R"({"repeatedInt32": [1, 2, 3, 4,]})");
+
+  // Map fields.
+  RunValidJsonTest(
+      "Int32MapField",
+      R"({"mapInt32Int32": {"1": 2, "3": 4}})",
+      "map_int32_int32: {key: 1 value: 2}"
+      "map_int32_int32: {key: 3 value: 4}");
+  ExpectParseFailureForJson(
+      "Int32MapFieldKeyNotQuoted",
+      R"({"mapInt32Int32": {1: 2, 3: 4}})");
+  RunValidJsonTest(
+      "Uint32MapField",
+      R"({"mapUint32Uint32": {"1": 2, "3": 4}})",
+      "map_uint32_uint32: {key: 1 value: 2}"
+      "map_uint32_uint32: {key: 3 value: 4}");
+  ExpectParseFailureForJson(
+      "Uint32MapFieldKeyNotQuoted",
+      R"({"mapUint32Uint32": {1: 2, 3: 4}})");
+  RunValidJsonTest(
+      "Int64MapField",
+      R"({"mapInt64Int64": {"1": 2, "3": 4}})",
+      "map_int64_int64: {key: 1 value: 2}"
+      "map_int64_int64: {key: 3 value: 4}");
+  ExpectParseFailureForJson(
+      "Int64MapFieldKeyNotQuoted",
+      R"({"mapInt64Int64": {1: 2, 3: 4}})");
+  RunValidJsonTest(
+      "Uint64MapField",
+      R"({"mapUint64Uint64": {"1": 2, "3": 4}})",
+      "map_uint64_uint64: {key: 1 value: 2}"
+      "map_uint64_uint64: {key: 3 value: 4}");
+  ExpectParseFailureForJson(
+      "Uint64MapFieldKeyNotQuoted",
+      R"({"mapUint64Uint64": {1: 2, 3: 4}})");
+  RunValidJsonTest(
+      "BoolMapField",
+      R"({"mapBoolBool": {"true": true, "false": false}})",
+      "map_bool_bool: {key: true value: true}"
+      "map_bool_bool: {key: false value: false}");
+  ExpectParseFailureForJson(
+      "BoolMapFieldKeyNotQuoted",
+      R"({"mapBoolBool": {true: true, false: false}})");
+  RunValidJsonTest(
+      "MessageMapField",
+      R"({
+        "mapStringNestedMessage": {
+          "hello": {"a": 1234},
+          "world": {"a": 5678}
+        }
+      })",
+      R"(
+        map_string_nested_message: {
+          key: "hello"
+          value: {a: 1234}
+        }
+        map_string_nested_message: {
+          key: "world"
+          value: {a: 5678}
+        }
+      )");
+  // Since Map keys are represented as JSON strings, escaping should be allowed.
+  RunValidJsonTest(
+      "Int32MapEscapedKey",
+      R"({"mapInt32Int32": {"\u0031": 2}})",
+      "map_int32_int32: {key: 1 value: 2}");
+  RunValidJsonTest(
+      "Int64MapEscapedKey",
+      R"({"mapInt64Int64": {"\u0031": 2}})",
+      "map_int64_int64: {key: 1 value: 2}");
+  RunValidJsonTest(
+      "BoolMapEscapedKey",
+      R"({"mapBoolBool": {"tr\u0075e": true}})",
+      "map_bool_bool: {key: true value: true}");
+
+  // "null" is accepted for all fields types.
+  RunValidJsonTest(
+      "AllFieldAcceptNull",
+      R"({
+        "optionalInt32": null,
+        "optionalInt64": null,
+        "optionalUint32": null,
+        "optionalUint64": null,
+        "optionalBool": null,
+        "optionalString": null,
+        "optionalBytes": null,
+        "optionalNestedEnum": null,
+        "optionalNestedMessage": null,
+        "repeatedInt32": null,
+        "repeatedInt64": null,
+        "repeatedUint32": null,
+        "repeatedUint64": null,
+        "repeatedBool": null,
+        "repeatedString": null,
+        "repeatedBytes": null,
+        "repeatedNestedEnum": null,
+        "repeatedNestedMessage": null,
+        "mapInt32Int32": null,
+        "mapBoolBool": null,
+        "mapStringNestedMessage": null
+      })",
+      "");
+
+  // Repeated field elements cannot be null.
+  ExpectParseFailureForJson(
+      "RepeatedFieldPrimitiveElementIsNull",
+      R"({"repeatedInt32": [1, null, 2]})");
+  ExpectParseFailureForJson(
+      "RepeatedFieldMessageElementIsNull",
+      R"({"repeatedNestedMessage": [{"a":1}, null, {"a":2}]})");
+  // Map field keys cannot be null.
+  ExpectParseFailureForJson(
+      "MapFieldKeyIsNull",
+      R"({"mapInt32Int32": {null: 1}})");
+  // Map field values cannot be null.
+  ExpectParseFailureForJson(
+      "MapFieldValueIsNull",
+      R"({"mapInt32Int32": {"0": null}})");
+
+  // Wrapper types.
+  RunValidJsonTest(
+      "OptionalBoolWrapper",
+      R"({"optionalBoolWrapper": false})",
+      "optional_bool_wrapper: {value: false}");
+  RunValidJsonTest(
+      "OptionalInt32Wrapper",
+      R"({"optionalInt32Wrapper": 0})",
+      "optional_int32_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalUint32Wrapper",
+      R"({"optionalUint32Wrapper": 0})",
+      "optional_uint32_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalInt64Wrapper",
+      R"({"optionalInt64Wrapper": 0})",
+      "optional_int64_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalUint64Wrapper",
+      R"({"optionalUint64Wrapper": 0})",
+      "optional_uint64_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalFloatWrapper",
+      R"({"optionalFloatWrapper": 0})",
+      "optional_float_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalDoubleWrapper",
+      R"({"optionalDoubleWrapper": 0})",
+      "optional_double_wrapper: {value: 0}");
+  RunValidJsonTest(
+      "OptionalStringWrapper",
+      R"({"optionalStringWrapper": ""})",
+      R"(optional_string_wrapper: {value: ""})");
+  RunValidJsonTest(
+      "OptionalBytesWrapper",
+      R"({"optionalBytesWrapper": ""})",
+      R"(optional_bytes_wrapper: {value: ""})");
+  RunValidJsonTest(
+      "OptionalWrapperTypesWithNonDefaultValue",
+      R"({
+        "optionalBoolWrapper": true,
+        "optionalInt32Wrapper": 1,
+        "optionalUint32Wrapper": 1,
+        "optionalInt64Wrapper": "1",
+        "optionalUint64Wrapper": "1",
+        "optionalFloatWrapper": 1,
+        "optionalDoubleWrapper": 1,
+        "optionalStringWrapper": "1",
+        "optionalBytesWrapper": "AQI="
+      })",
+      R"(
+        optional_bool_wrapper: {value: true}
+        optional_int32_wrapper: {value: 1}
+        optional_uint32_wrapper: {value: 1}
+        optional_int64_wrapper: {value: 1}
+        optional_uint64_wrapper: {value: 1}
+        optional_float_wrapper: {value: 1}
+        optional_double_wrapper: {value: 1}
+        optional_string_wrapper: {value: "1"}
+        optional_bytes_wrapper: {value: "\x01\x02"}
+      )");
+  RunValidJsonTest(
+      "RepeatedBoolWrapper",
+      R"({"repeatedBoolWrapper": [true, false]})",
+      "repeated_bool_wrapper: {value: true}"
+      "repeated_bool_wrapper: {value: false}");
+  RunValidJsonTest(
+      "RepeatedInt32Wrapper",
+      R"({"repeatedInt32Wrapper": [0, 1]})",
+      "repeated_int32_wrapper: {value: 0}"
+      "repeated_int32_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedUint32Wrapper",
+      R"({"repeatedUint32Wrapper": [0, 1]})",
+      "repeated_uint32_wrapper: {value: 0}"
+      "repeated_uint32_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedInt64Wrapper",
+      R"({"repeatedInt64Wrapper": [0, 1]})",
+      "repeated_int64_wrapper: {value: 0}"
+      "repeated_int64_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedUint64Wrapper",
+      R"({"repeatedUint64Wrapper": [0, 1]})",
+      "repeated_uint64_wrapper: {value: 0}"
+      "repeated_uint64_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedFloatWrapper",
+      R"({"repeatedFloatWrapper": [0, 1]})",
+      "repeated_float_wrapper: {value: 0}"
+      "repeated_float_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedDoubleWrapper",
+      R"({"repeatedDoubleWrapper": [0, 1]})",
+      "repeated_double_wrapper: {value: 0}"
+      "repeated_double_wrapper: {value: 1}");
+  RunValidJsonTest(
+      "RepeatedStringWrapper",
+      R"({"repeatedStringWrapper": ["", "AQI="]})",
+      R"(
+        repeated_string_wrapper: {value: ""}
+        repeated_string_wrapper: {value: "AQI="}
+      )");
+  RunValidJsonTest(
+      "RepeatedBytesWrapper",
+      R"({"repeatedBytesWrapper": ["", "AQI="]})",
+      R"(
+        repeated_bytes_wrapper: {value: ""}
+        repeated_bytes_wrapper: {value: "\x01\x02"}
+      )");
+  RunValidJsonTest(
+      "WrapperTypesWithNullValue",
+      R"({
+        "optionalBoolWrapper": null,
+        "optionalInt32Wrapper": null,
+        "optionalUint32Wrapper": null,
+        "optionalInt64Wrapper": null,
+        "optionalUint64Wrapper": null,
+        "optionalFloatWrapper": null,
+        "optionalDoubleWrapper": null,
+        "optionalStringWrapper": null,
+        "optionalBytesWrapper": null,
+        "repeatedBoolWrapper": null,
+        "repeatedInt32Wrapper": null,
+        "repeatedUint32Wrapper": null,
+        "repeatedInt64Wrapper": null,
+        "repeatedUint64Wrapper": null,
+        "repeatedFloatWrapper": null,
+        "repeatedDoubleWrapper": null,
+        "repeatedStringWrapper": null,
+        "repeatedBytesWrapper": null
+      })",
+      "");
+
+  // Duration
+  RunValidJsonTest(
+      "DurationMinValue",
+      R"({"optionalDuration": "-315576000000.999999999s"})",
+      "optional_duration: {seconds: -315576000000 nanos: -999999999}");
+  RunValidJsonTest(
+      "DurationMaxValue",
+      R"({"optionalDuration": "315576000000.999999999s"})",
+      "optional_duration: {seconds: 315576000000 nanos: 999999999}");
+  RunValidJsonTest(
+      "DurationRepeatedValue",
+      R"({"repeatedDuration": ["1.5s", "-1.5s"]})",
+      "repeated_duration: {seconds: 1 nanos: 500000000}"
+      "repeated_duration: {seconds: -1 nanos: -500000000}");
+
+  ExpectParseFailureForJson(
+      "DurationMissingS",
+      R"({"optionalDuration": "1"})");
+  ExpectParseFailureForJson(
+      "DurationJsonInputTooSmall",
+      R"({"optionalDuration": "-315576000001.000000000s"})");
+  ExpectParseFailureForJson(
+      "DurationJsonInputTooLarge",
+      R"({"optionalDuration": "315576000001.000000000s"})");
+  ExpectSerializeFailureForJson(
+      "DurationProtoInputTooSmall",
+      "optional_duration: {seconds: -315576000001 nanos: 0}");
+  ExpectSerializeFailureForJson(
+      "DurationProtoInputTooLarge",
+      "optional_duration: {seconds: 315576000001 nanos: 0}");
+
+  RunValidJsonTestWithValidator(
+      "DurationHasZeroFractionalDigit",
+      R"({"optionalDuration": "1.000000000s"})",
+      [](const Json::Value& value) {
+        return value["optionalDuration"].asString() == "1s";
+      });
+  RunValidJsonTestWithValidator(
+      "DurationHas3FractionalDigits",
+      R"({"optionalDuration": "1.010000000s"})",
+      [](const Json::Value& value) {
+        return value["optionalDuration"].asString() == "1.010s";
+      });
+  RunValidJsonTestWithValidator(
+      "DurationHas6FractionalDigits",
+      R"({"optionalDuration": "1.000010000s"})",
+      [](const Json::Value& value) {
+        return value["optionalDuration"].asString() == "1.000010s";
+      });
+  RunValidJsonTestWithValidator(
+      "DurationHas9FractionalDigits",
+      R"({"optionalDuration": "1.000000010s"})",
+      [](const Json::Value& value) {
+        return value["optionalDuration"].asString() == "1.000000010s";
+      });
+
+  // Timestamp
+  RunValidJsonTest(
+      "TimestampMinValue",
+      R"({"optionalTimestamp": "0001-01-01T00:00:00Z"})",
+      "optional_timestamp: {seconds: -62135596800}");
+  RunValidJsonTest(
+      "TimestampMaxValue",
+      R"({"optionalTimestamp": "9999-12-31T23:59:59.999999999Z"})",
+      "optional_timestamp: {seconds: 253402300799 nanos: 999999999}");
+  RunValidJsonTest(
+      "TimestampRepeatedValue",
+      R"({
+        "repeatedTimestamp": [
+          "0001-01-01T00:00:00Z",
+          "9999-12-31T23:59:59.999999999Z"
+        ]
+      })",
+      "repeated_timestamp: {seconds: -62135596800}"
+      "repeated_timestamp: {seconds: 253402300799 nanos: 999999999}");
+  RunValidJsonTest(
+      "TimestampWithPositiveOffset",
+      R"({"optionalTimestamp": "1970-01-01T08:00:00+08:00"})",
+      "optional_timestamp: {seconds: 0}");
+  RunValidJsonTest(
+      "TimestampWithNegativeOffset",
+      R"({"optionalTimestamp": "1969-12-31T16:00:00-08:00"})",
+      "optional_timestamp: {seconds: 0}");
+
+  ExpectParseFailureForJson(
+      "TimestampJsonInputTooSmall",
+      R"({"optionalTimestamp": "0000-01-01T00:00:00Z"})");
+  ExpectParseFailureForJson(
+      "TimestampJsonInputTooLarge",
+      R"({"optionalTimestamp": "10000-01-01T00:00:00Z"})");
+  ExpectParseFailureForJson(
+      "TimestampJsonInputMissingZ",
+      R"({"optionalTimestamp": "0001-01-01T00:00:00"})");
+  ExpectParseFailureForJson(
+      "TimestampJsonInputMissingT",
+      R"({"optionalTimestamp": "0001-01-01 00:00:00Z"})");
+  ExpectParseFailureForJson(
+      "TimestampJsonInputLowercaseZ",
+      R"({"optionalTimestamp": "0001-01-01T00:00:00z"})");
+  ExpectParseFailureForJson(
+      "TimestampJsonInputLowercaseT",
+      R"({"optionalTimestamp": "0001-01-01t00:00:00Z"})");
+  ExpectSerializeFailureForJson(
+      "TimestampProtoInputTooSmall",
+      "optional_timestamp: {seconds: -62135596801}");
+  ExpectSerializeFailureForJson(
+      "TimestampProtoInputTooLarge",
+      "optional_timestamp: {seconds: 253402300800}");
+  RunValidJsonTestWithValidator(
+      "TimestampZeroNormalized",
+      R"({"optionalTimestamp": "1969-12-31T16:00:00-08:00"})",
+      [](const Json::Value& value) {
+        return value["optionalTimestamp"].asString() ==
+            "1970-01-01T00:00:00Z";
+      });
+  RunValidJsonTestWithValidator(
+      "TimestampHasZeroFractionalDigit",
+      R"({"optionalTimestamp": "1970-01-01T00:00:00.000000000Z"})",
+      [](const Json::Value& value) {
+        return value["optionalTimestamp"].asString() ==
+            "1970-01-01T00:00:00Z";
+      });
+  RunValidJsonTestWithValidator(
+      "TimestampHas3FractionalDigits",
+      R"({"optionalTimestamp": "1970-01-01T00:00:00.010000000Z"})",
+      [](const Json::Value& value) {
+        return value["optionalTimestamp"].asString() ==
+            "1970-01-01T00:00:00.010Z";
+      });
+  RunValidJsonTestWithValidator(
+      "TimestampHas6FractionalDigits",
+      R"({"optionalTimestamp": "1970-01-01T00:00:00.000010000Z"})",
+      [](const Json::Value& value) {
+        return value["optionalTimestamp"].asString() ==
+            "1970-01-01T00:00:00.000010Z";
+      });
+  RunValidJsonTestWithValidator(
+      "TimestampHas9FractionalDigits",
+      R"({"optionalTimestamp": "1970-01-01T00:00:00.000000010Z"})",
+      [](const Json::Value& value) {
+        return value["optionalTimestamp"].asString() ==
+            "1970-01-01T00:00:00.000000010Z";
+      });
+
+  // FieldMask
+  RunValidJsonTest(
+      "FieldMask",
+      R"({"optionalFieldMask": "foo,barBaz"})",
+      R"(optional_field_mask: {paths: "foo" paths: "bar_baz"})");
+  ExpectParseFailureForJson(
+      "FieldMaskInvalidCharacter",
+      R"({"optionalFieldMask": "foo,bar_bar"})");
+  ExpectSerializeFailureForJson(
+      "FieldMaskPathsDontRoundTrip",
+      R"(optional_field_mask: {paths: "fooBar"})");
+  ExpectSerializeFailureForJson(
+      "FieldMaskNumbersDontRoundTrip",
+      R"(optional_field_mask: {paths: "foo_3_bar"})");
+  ExpectSerializeFailureForJson(
+      "FieldMaskTooManyUnderscore",
+      R"(optional_field_mask: {paths: "foo__bar"})");
+
+  // Struct
+  RunValidJsonTest(
+      "Struct",
+      R"({
+        "optionalStruct": {
+          "nullValue": null,
+          "intValue": 1234,
+          "boolValue": true,
+          "doubleValue": 1234.5678,
+          "stringValue": "Hello world!",
+          "listValue": [1234, "5678"],
+          "objectValue": {
+            "value": 0
+          }
+        }
+      })",
+      R"(
+        optional_struct: {
+          fields: {
+            key: "nullValue"
+            value: {null_value: NULL_VALUE}
+          }
+          fields: {
+            key: "intValue"
+            value: {number_value: 1234}
+          }
+          fields: {
+            key: "boolValue"
+            value: {bool_value: true}
+          }
+          fields: {
+            key: "doubleValue"
+            value: {number_value: 1234.5678}
+          }
+          fields: {
+            key: "stringValue"
+            value: {string_value: "Hello world!"}
+          }
+          fields: {
+            key: "listValue"
+            value: {
+              list_value: {
+                values: {
+                  number_value: 1234
+                }
+                values: {
+                  string_value: "5678"
+                }
+              }
+            }
+          }
+          fields: {
+            key: "objectValue"
+            value: {
+              struct_value: {
+                fields: {
+                  key: "value"
+                  value: {
+                    number_value: 0
+                  }
+                }
+              }
+            }
+          }
+        }
+      )");
+  // Value
+  RunValidJsonTest(
+      "ValueAcceptInteger",
+      R"({"optionalValue": 1})",
+      "optional_value: { number_value: 1}");
+  RunValidJsonTest(
+      "ValueAcceptFloat",
+      R"({"optionalValue": 1.5})",
+      "optional_value: { number_value: 1.5}");
+  RunValidJsonTest(
+      "ValueAcceptBool",
+      R"({"optionalValue": false})",
+      "optional_value: { bool_value: false}");
+  RunValidJsonTest(
+      "ValueAcceptNull",
+      R"({"optionalValue": null})",
+      "optional_value: { null_value: NULL_VALUE}");
+  RunValidJsonTest(
+      "ValueAcceptString",
+      R"({"optionalValue": "hello"})",
+      R"(optional_value: { string_value: "hello"})");
+  RunValidJsonTest(
+      "ValueAcceptList",
+      R"({"optionalValue": [0, "hello"]})",
+      R"(
+        optional_value: {
+          list_value: {
+            values: {
+              number_value: 0
+            }
+            values: {
+              string_value: "hello"
+            }
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "ValueAcceptObject",
+      R"({"optionalValue": {"value": 1}})",
+      R"(
+        optional_value: {
+          struct_value: {
+            fields: {
+              key: "value"
+              value: {
+                number_value: 1
+              }
+            }
+          }
+        }
+      )");
+
+  // Any
+  RunValidJsonTest(
+      "Any",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/conformance.TestAllTypes",
+          "optionalInt32": 12345
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/conformance.TestAllTypes] {
+            optional_int32: 12345
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyNested",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Any",
+          "value": {
+            "@type": "type.googleapis.com/conformance.TestAllTypes",
+            "optionalInt32": 12345
+          }
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Any] {
+            [type.googleapis.com/conformance.TestAllTypes] {
+              optional_int32: 12345
+            }
+          }
+        }
+      )");
+  // The special "@type" tag is not required to appear first.
+  RunValidJsonTest(
+      "AnyUnorderedTypeTag",
+      R"({
+        "optionalAny": {
+          "optionalInt32": 12345,
+          "@type": "type.googleapis.com/conformance.TestAllTypes"
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/conformance.TestAllTypes] {
+            optional_int32: 12345
+          }
+        }
+      )");
+  // Well-known types in Any.
+  RunValidJsonTest(
+      "AnyWithInt32ValueWrapper",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Int32Value",
+          "value": 12345
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Int32Value] {
+            value: 12345
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithDuration",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Duration",
+          "value": "1.5s"
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Duration] {
+            seconds: 1
+            nanos: 500000000
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithTimestamp",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Timestamp",
+          "value": "1970-01-01T00:00:00Z"
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Timestamp] {
+            seconds: 0
+            nanos: 0
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithFieldMask",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.FieldMask",
+          "value": "foo,barBaz"
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.FieldMask] {
+            paths: ["foo", "bar_baz"]
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithStruct",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Struct",
+          "value": {
+            "foo": 1
+          }
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Struct] {
+            fields: {
+              key: "foo"
+              value: {
+                number_value: 1
+              }
+            }
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithValueForJsonObject",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Value",
+          "value": {
+            "foo": 1
+          }
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Value] {
+            struct_value: {
+              fields: {
+                key: "foo"
+                value: {
+                  number_value: 1
+                }
+              }
+            }
+          }
+        }
+      )");
+  RunValidJsonTest(
+      "AnyWithValueForInteger",
+      R"({
+        "optionalAny": {
+          "@type": "type.googleapis.com/google.protobuf.Value",
+          "value": 1
+        }
+      })",
+      R"(
+        optional_any: {
+          [type.googleapis.com/google.protobuf.Value] {
+            number_value: 1
+          }
+        }
+      )");
+
+  bool ok = true;
+  if (!CheckSetEmpty(expected_to_fail_,
+                     "These tests were listed in the failure list, but they "
+                     "don't exist.  Remove them from the failure list")) {
+    ok = false;
+  }
+  if (!CheckSetEmpty(unexpected_failing_tests_,
+                     "These tests failed.  If they can't be fixed right now, "
+                     "you can add them to the failure list so the overall "
+                     "suite can succeed")) {
+    ok = false;
+  }
 
-      CheckSetEmpty(unexpected_succeeding_tests_,
-                    "These tests succeeded, even though they were listed in "
-                    "the failure list.  Remove them from the failure list");
+  // Sometimes the testee may be fixed before we update the failure list (e.g.,
+  // the testee is from a different component). We warn about this case but
+  // don't consider it an overall test failure.
+  CheckSetEmpty(unexpected_succeeding_tests_,
+                "These tests succeeded, even though they were listed in "
+                "the failure list.  Remove them from the failure list");
 
-  CheckSetEmpty(skipped_,
-                "These tests were skipped (probably because support for some "
-                "features is not implemented)");
+  if (verbose_) {
+    CheckSetEmpty(skipped_,
+                  "These tests were skipped (probably because support for some "
+                  "features is not implemented)");
+  }
 
   StringAppendF(&output_,
                 "CONFORMANCE SUITE %s: %d successes, %d skipped, "

+ 21 - 1
conformance/conformance_test.h

@@ -38,14 +38,18 @@
 #ifndef CONFORMANCE_CONFORMANCE_TEST_H
 #define CONFORMANCE_CONFORMANCE_TEST_H
 
+#include <functional>
 #include <string>
 #include <google/protobuf/stubs/common.h>
 #include <google/protobuf/util/type_resolver.h>
 #include <google/protobuf/wire_format_lite.h>
 
+#include "third_party/jsoncpp/json.h"
+
 namespace conformance {
 class ConformanceRequest;
 class ConformanceResponse;
+class TestAllTypes;
 }  // namespace conformance
 
 namespace google {
@@ -53,6 +57,8 @@ namespace protobuf {
 
 class ConformanceTestRunner {
  public:
+  virtual ~ConformanceTestRunner() {}
+
   // Call to run a single conformance test.
   //
   // "input" is a serialized conformance.ConformanceRequest.
@@ -60,7 +66,9 @@ class ConformanceTestRunner {
   //
   // If there is any error in running the test itself, set "runtime_error" in
   // the response.
-  virtual void RunTest(const std::string& input, std::string* output) = 0;
+  virtual void RunTest(const std::string& test_name,
+                       const std::string& input,
+                       std::string* output) = 0;
 };
 
 // Class representing the test suite itself.  To run it, implement your own
@@ -118,6 +126,18 @@ class ConformanceTestSuite {
                          conformance::WireFormat requested_output);
   void RunValidJsonTest(const string& test_name, const string& input_json,
                         const string& equivalent_text_format);
+  void RunValidJsonTestWithProtobufInput(const string& test_name,
+                                         const conformance::TestAllTypes& input,
+                                         const string& equivalent_text_format);
+
+  typedef std::function<bool(const Json::Value&)> Validator;
+  void RunValidJsonTestWithValidator(const string& test_name,
+                                     const string& input_json,
+                                     const Validator& validator);
+  void ExpectParseFailureForJson(const string& test_name,
+                                 const string& input_json);
+  void ExpectSerializeFailureForJson(const string& test_name,
+                                     const string& text_format);
   void ExpectParseFailureForProto(const std::string& proto,
                                   const std::string& test_name);
   void ExpectHardParseFailureForProto(const std::string& proto,

+ 74 - 14
conformance/conformance_test_runner.cc

@@ -53,17 +53,23 @@
 //   3. testee sends 4-byte length M (little endian)
 //   4. testee sends M bytes representing a ConformanceResponse proto
 
+#include <algorithm>
 #include <errno.h>
-#include <unistd.h>
 #include <fstream>
+#include <sys/types.h>
+#include <sys/wait.h>
+#include <unistd.h>
 #include <vector>
 
+#include <google/protobuf/stubs/stringprintf.h>
+
 #include "conformance.pb.h"
 #include "conformance_test.h"
 
 using conformance::ConformanceRequest;
 using conformance::ConformanceResponse;
 using google::protobuf::internal::scoped_array;
+using google::protobuf::StringAppendF;
 using std::string;
 using std::vector;
 
@@ -80,17 +86,47 @@ using std::vector;
 class ForkPipeRunner : public google::protobuf::ConformanceTestRunner {
  public:
   ForkPipeRunner(const std::string &executable)
-      : executable_(executable), running_(false) {}
+      : child_pid_(-1), executable_(executable) {}
 
-  void RunTest(const std::string& request, std::string* response) {
-    if (!running_) {
+  virtual ~ForkPipeRunner() {}
+
+  void RunTest(const std::string& test_name,
+               const std::string& request,
+               std::string* response) {
+    if (child_pid_ < 0) {
       SpawnTestProgram();
     }
 
+    current_test_name_ = test_name;
+
     uint32_t len = request.size();
     CheckedWrite(write_fd_, &len, sizeof(uint32_t));
     CheckedWrite(write_fd_, request.c_str(), request.size());
-    CheckedRead(read_fd_, &len, sizeof(uint32_t));
+
+    if (!TryRead(read_fd_, &len, sizeof(uint32_t))) {
+      // We failed to read from the child, assume a crash and try to reap.
+      GOOGLE_LOG(INFO) << "Trying to reap child, pid=" << child_pid_;
+
+      int status;
+      waitpid(child_pid_, &status, WEXITED);
+
+      string error_msg;
+      if (WIFEXITED(status)) {
+        StringAppendF(&error_msg,
+                      "child exited, status=%d", WEXITSTATUS(status));
+      } else if (WIFSIGNALED(status)) {
+        StringAppendF(&error_msg,
+                      "child killed by signal %d", WTERMSIG(status));
+      }
+      GOOGLE_LOG(INFO) << error_msg;
+      child_pid_ = -1;
+
+      conformance::ConformanceResponse response_obj;
+      response_obj.set_runtime_error(error_msg);
+      response_obj.SerializeToString(response);
+      return;
+    }
+
     response->resize(len);
     CheckedRead(read_fd_, (void*)response->c_str(), len);
   }
@@ -134,7 +170,7 @@ class ForkPipeRunner : public google::protobuf::ConformanceTestRunner {
       CHECK_SYSCALL(close(fromproc_pipe_fd[1]));
       write_fd_ = toproc_pipe_fd[1];
       read_fd_ = fromproc_pipe_fd[0];
-      running_ = true;
+      child_pid_ = pid;
     } else {
       // Child.
       CHECK_SYSCALL(close(STDIN_FILENO));
@@ -158,30 +194,48 @@ class ForkPipeRunner : public google::protobuf::ConformanceTestRunner {
 
   void CheckedWrite(int fd, const void *buf, size_t len) {
     if (write(fd, buf, len) != len) {
-      GOOGLE_LOG(FATAL) << "Error writing to test program: " << strerror(errno);
+      GOOGLE_LOG(FATAL) << current_test_name_
+                        << ": error writing to test program: "
+                        << strerror(errno);
     }
   }
 
-  void CheckedRead(int fd, void *buf, size_t len) {
+  bool TryRead(int fd, void *buf, size_t len) {
     size_t ofs = 0;
     while (len > 0) {
       ssize_t bytes_read = read(fd, (char*)buf + ofs, len);
 
       if (bytes_read == 0) {
-        GOOGLE_LOG(FATAL) << "Unexpected EOF from test program";
+        GOOGLE_LOG(ERROR) << current_test_name_
+                          << ": unexpected EOF from test program";
+        return false;
       } else if (bytes_read < 0) {
-        GOOGLE_LOG(FATAL) << "Error reading from test program: " << strerror(errno);
+        GOOGLE_LOG(ERROR) << current_test_name_
+                          << ": error reading from test program: "
+                          << strerror(errno);
+        return false;
       }
 
       len -= bytes_read;
       ofs += bytes_read;
     }
+
+    return true;
+  }
+
+  void CheckedRead(int fd, void *buf, size_t len) {
+    if (!TryRead(fd, buf, len)) {
+      GOOGLE_LOG(FATAL) << current_test_name_
+                        << ": error reading from test program: "
+                        << strerror(errno);
+    }
   }
 
   int write_fd_;
   int read_fd_;
-  bool running_;
+  pid_t child_pid_;
   std::string executable_;
+  std::string current_test_name_;
 };
 
 void UsageError() {
@@ -202,6 +256,12 @@ void UsageError() {
 
 void ParseFailureList(const char *filename, vector<string>* failure_list) {
   std::ifstream infile(filename);
+
+  if (!infile.is_open()) {
+    fprintf(stderr, "Couldn't open failure list file: %s\n", filename);
+    exit(1);
+  }
+
   for (string line; getline(infile, line);) {
     // Remove whitespace.
     line.erase(std::remove_if(line.begin(), line.end(), ::isspace),
@@ -217,16 +277,15 @@ void ParseFailureList(const char *filename, vector<string>* failure_list) {
 }
 
 int main(int argc, char *argv[]) {
-  int arg = 1;
   char *program;
   google::protobuf::ConformanceTestSuite suite;
 
+  vector<string> failure_list;
+
   for (int arg = 1; arg < argc; ++arg) {
     if (strcmp(argv[arg], "--failure_list") == 0) {
       if (++arg == argc) UsageError();
-      vector<string> failure_list;
       ParseFailureList(argv[arg], &failure_list);
-      suite.SetFailureList(failure_list);
     } else if (strcmp(argv[arg], "--verbose") == 0) {
       suite.SetVerbose(true);
     } else if (argv[arg][0] == '-') {
@@ -241,6 +300,7 @@ int main(int argc, char *argv[]) {
     }
   }
 
+  suite.SetFailureList(failure_list);
   ForkPipeRunner runner(program);
 
   std::string output;

+ 85 - 0
conformance/failure_list_cpp.txt

@@ -7,6 +7,89 @@
 # TODO(haberman): insert links to corresponding bugs tracking the issue.
 # Should we use GitHub issues or the Google-internal bug tracker?
 
+FieldMaskNumbersDontRoundTrip.JsonOutput
+FieldMaskPathsDontRoundTrip.JsonOutput
+FieldMaskTooManyUnderscore.JsonOutput
+JsonInput.AnyUnorderedTypeTag.JsonOutput
+JsonInput.AnyUnorderedTypeTag.ProtobufOutput
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForInteger.ProtobufOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.AnyWithValueForJsonObject.ProtobufOutput
+JsonInput.BoolFieldDoubleQuotedFalse
+JsonInput.BoolFieldDoubleQuotedTrue
+JsonInput.BoolFieldIntegerOne
+JsonInput.BoolFieldIntegerZero
+JsonInput.BytesFieldInvalidBase64Characters
+JsonInput.BytesFieldNoPadding
+JsonInput.DoubleFieldTooSmall
+JsonInput.DurationHasZeroFractionalDigit.Validator
+JsonInput.DurationJsonInputTooLarge
+JsonInput.DurationJsonInputTooSmall
+JsonInput.DurationMissingS
+JsonInput.EnumFieldUnknownValue.Validator
+JsonInput.FieldMaskInvalidCharacter
+JsonInput.FieldNameDuplicate
+JsonInput.FieldNameDuplicateDifferentCasing1
+JsonInput.FieldNameDuplicateDifferentCasing2
+JsonInput.FieldNameInLowerCamelCase.Validator
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameInSnakeCase.ProtobufOutput
+JsonInput.FieldNameNotQuoted
+JsonInput.FloatFieldTooLarge
+JsonInput.FloatFieldTooSmall
+JsonInput.Int32FieldLeadingSpace
+JsonInput.Int32FieldLeadingZero
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinValue.JsonOutput
+JsonInput.Int32FieldMinValue.ProtobufOutput
+JsonInput.Int32FieldNegativeWithLeadingZero
+JsonInput.Int32FieldNotInteger
+JsonInput.Int32FieldNotNumber
+JsonInput.Int32FieldTooLarge
+JsonInput.Int32FieldTooSmall
+JsonInput.Int32FieldTrailingSpace
+JsonInput.Int64FieldNotInteger
+JsonInput.Int64FieldNotNumber
+JsonInput.Int64FieldTooLarge
+JsonInput.Int64FieldTooSmall
+JsonInput.MapFieldValueIsNull
+JsonInput.OneofFieldDuplicate
+JsonInput.RepeatedFieldMessageElementIsNull
+JsonInput.RepeatedFieldPrimitiveElementIsNull
+JsonInput.RepeatedFieldTrailingComma
+JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool
+JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotMessage
+JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotString
+JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotBool
+JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotInt
+JsonInput.RepeatedFieldWrongElementTypeExpectingMessagesGotString
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotBool
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotMessage
+JsonInput.StringFieldNotAString
+JsonInput.StringFieldSurrogateInWrongOrder
+JsonInput.StringFieldSurrogatePair.JsonOutput
+JsonInput.StringFieldSurrogatePair.ProtobufOutput
+JsonInput.StringFieldUnpairedHighSurrogate
+JsonInput.StringFieldUnpairedLowSurrogate
+JsonInput.StringFieldUppercaseEscapeLetter
+JsonInput.TimestampJsonInputLowercaseT
+JsonInput.TimestampJsonInputLowercaseZ
+JsonInput.TimestampJsonInputMissingT
+JsonInput.TimestampJsonInputMissingZ
+JsonInput.TimestampJsonInputTooLarge
+JsonInput.TimestampJsonInputTooSmall
+JsonInput.TrailingCommaInAnObject
+JsonInput.Uint32FieldNotInteger
+JsonInput.Uint32FieldNotNumber
+JsonInput.Uint32FieldTooLarge
+JsonInput.Uint64FieldNotInteger
+JsonInput.Uint64FieldNotNumber
+JsonInput.Uint64FieldTooLarge
+JsonInput.WrapperTypesWithNullValue.JsonOutput
+JsonInput.WrapperTypesWithNullValue.ProtobufOutput
 ProtobufInput.PrematureEofBeforeKnownRepeatedValue.MESSAGE
 ProtobufInput.PrematureEofInDelimitedDataForKnownNonRepeatedValue.MESSAGE
 ProtobufInput.PrematureEofInDelimitedDataForKnownRepeatedValue.MESSAGE
@@ -19,3 +102,5 @@ ProtobufInput.PrematureEofInPackedField.SINT64
 ProtobufInput.PrematureEofInPackedField.UINT32
 ProtobufInput.PrematureEofInPackedField.UINT64
 ProtobufInput.PrematureEofInsideKnownRepeatedValue.MESSAGE
+TimestampProtoInputTooLarge.JsonOutput
+TimestampProtoInputTooSmall.JsonOutput

+ 16 - 0
conformance/failure_list_csharp.txt

@@ -0,0 +1,16 @@
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.FieldNameInLowerCamelCase.Validator
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameInSnakeCase.ProtobufOutput
+JsonInput.FieldNameWithMixedCases.JsonOutput
+JsonInput.FieldNameWithMixedCases.ProtobufOutput
+JsonInput.FieldNameWithMixedCases.Validator
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinValue.JsonOutput
+JsonInput.Int64FieldMaxValueNotQuoted.JsonOutput
+JsonInput.Int64FieldMaxValueNotQuoted.ProtobufOutput
+JsonInput.OriginalProtoFieldName.JsonOutput
+JsonInput.StringFieldSurrogatePair.JsonOutput
+JsonInput.Uint64FieldMaxValueNotQuoted.JsonOutput
+JsonInput.Uint64FieldMaxValueNotQuoted.ProtobufOutput

+ 49 - 0
conformance/failure_list_java.txt

@@ -0,0 +1,49 @@
+# This is the list of conformance tests that are known to fail for the Java
+# implementation right now.  These should be fixed.
+#
+# By listing them here we can keep tabs on which ones are failing and be sure
+# that we don't introduce regressions in other tests.
+
+FieldMaskNumbersDontRoundTrip.JsonOutput
+FieldMaskPathsDontRoundTrip.JsonOutput
+FieldMaskTooManyUnderscore.JsonOutput
+JsonInput.AnyWithFieldMask.ProtobufOutput
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.BoolFieldAllCapitalFalse
+JsonInput.BoolFieldAllCapitalTrue
+JsonInput.BoolFieldCamelCaseFalse
+JsonInput.BoolFieldCamelCaseTrue
+JsonInput.BoolFieldDoubleQuotedFalse
+JsonInput.BoolFieldDoubleQuotedTrue
+JsonInput.BoolMapFieldKeyNotQuoted
+JsonInput.DoubleFieldInfinityNotQuoted
+JsonInput.DoubleFieldNanNotQuoted
+JsonInput.DoubleFieldNegativeInfinityNotQuoted
+JsonInput.EnumFieldNotQuoted
+JsonInput.FieldMask.ProtobufOutput
+JsonInput.FieldMaskInvalidCharacter
+JsonInput.FieldNameDuplicate
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameNotQuoted
+JsonInput.FloatFieldInfinityNotQuoted
+JsonInput.FloatFieldNanNotQuoted
+JsonInput.FloatFieldNegativeInfinityNotQuoted
+JsonInput.Int32FieldLeadingZero
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinValue.JsonOutput
+JsonInput.Int32FieldNegativeWithLeadingZero
+JsonInput.Int32FieldPlusSign
+JsonInput.Int32MapFieldKeyNotQuoted
+JsonInput.Int64MapFieldKeyNotQuoted
+JsonInput.JsonWithComments
+JsonInput.OriginalProtoFieldName.JsonOutput
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotBool
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt
+JsonInput.StringFieldNotAString
+JsonInput.StringFieldSurrogateInWrongOrder
+JsonInput.StringFieldUnpairedHighSurrogate
+JsonInput.StringFieldUnpairedLowSurrogate
+JsonInput.StringFieldUppercaseEscapeLetter
+JsonInput.Uint32MapFieldKeyNotQuoted
+JsonInput.Uint64MapFieldKeyNotQuoted

+ 4 - 0
conformance/failure_list_objc.txt

@@ -0,0 +1,4 @@
+# No tests currently failing.
+#
+# json input or output tests are skipped (in conformance_objc.m) as mobile
+# platforms don't support json wire format to avoid code bloat.

+ 2 - 0
conformance/failure_list_python-post26.txt

@@ -0,0 +1,2 @@
+JsonInput.StringFieldSurrogateInWrongOrder
+JsonInput.StringFieldUnpairedHighSurrogate

+ 85 - 0
conformance/failure_list_python.txt

@@ -0,0 +1,85 @@
+DurationProtoInputTooLarge.JsonOutput
+DurationProtoInputTooSmall.JsonOutput
+FieldMaskNumbersDontRoundTrip.JsonOutput
+FieldMaskPathsDontRoundTrip.JsonOutput
+FieldMaskTooManyUnderscore.JsonOutput
+JsonInput.Any.JsonOutput
+JsonInput.Any.ProtobufOutput
+JsonInput.AnyNested.JsonOutput
+JsonInput.AnyNested.ProtobufOutput
+JsonInput.AnyUnorderedTypeTag.JsonOutput
+JsonInput.AnyUnorderedTypeTag.ProtobufOutput
+JsonInput.AnyWithDuration.JsonOutput
+JsonInput.AnyWithDuration.ProtobufOutput
+JsonInput.AnyWithFieldMask.JsonOutput
+JsonInput.AnyWithFieldMask.ProtobufOutput
+JsonInput.AnyWithInt32ValueWrapper.JsonOutput
+JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput
+JsonInput.AnyWithStruct.JsonOutput
+JsonInput.AnyWithStruct.ProtobufOutput
+JsonInput.AnyWithTimestamp.JsonOutput
+JsonInput.AnyWithTimestamp.ProtobufOutput
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForInteger.ProtobufOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.AnyWithValueForJsonObject.ProtobufOutput
+JsonInput.BytesFieldInvalidBase64Characters
+JsonInput.DoubleFieldInfinityNotQuoted
+JsonInput.DoubleFieldNanNotQuoted
+JsonInput.DoubleFieldNegativeInfinityNotQuoted
+JsonInput.DoubleFieldTooSmall
+JsonInput.DurationJsonInputTooLarge
+JsonInput.DurationJsonInputTooSmall
+JsonInput.DurationMissingS
+JsonInput.EnumFieldNumericValueNonZero.JsonOutput
+JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput
+JsonInput.EnumFieldNumericValueZero.JsonOutput
+JsonInput.EnumFieldNumericValueZero.ProtobufOutput
+JsonInput.EnumFieldUnknownValue.Validator
+JsonInput.FieldMask.ProtobufOutput
+JsonInput.FieldMaskInvalidCharacter
+JsonInput.FieldNameInLowerCamelCase.Validator
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameInSnakeCase.ProtobufOutput
+JsonInput.FloatFieldInfinityNotQuoted
+JsonInput.FloatFieldNanNotQuoted
+JsonInput.FloatFieldNegativeInfinityNotQuoted
+JsonInput.FloatFieldTooLarge
+JsonInput.FloatFieldTooSmall
+JsonInput.Int32FieldExponentialFormat.JsonOutput
+JsonInput.Int32FieldExponentialFormat.ProtobufOutput
+JsonInput.Int32FieldFloatTrailingZero.JsonOutput
+JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput
+JsonInput.Int32FieldMaxFloatValue.JsonOutput
+JsonInput.Int32FieldMaxFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinValue.JsonOutput
+JsonInput.OriginalProtoFieldName.JsonOutput
+JsonInput.OriginalProtoFieldName.ProtobufOutput
+JsonInput.RepeatedFieldMessageElementIsNull
+JsonInput.RepeatedFieldPrimitiveElementIsNull
+JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool
+JsonInput.StringFieldSurrogatePair.JsonOutput
+JsonInput.StringFieldUnpairedLowSurrogate
+JsonInput.Struct.JsonOutput
+JsonInput.Struct.ProtobufOutput
+JsonInput.TimestampJsonInputLowercaseT
+JsonInput.Uint32FieldMaxFloatValue.JsonOutput
+JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput
+JsonInput.ValueAcceptBool.JsonOutput
+JsonInput.ValueAcceptBool.ProtobufOutput
+JsonInput.ValueAcceptFloat.JsonOutput
+JsonInput.ValueAcceptFloat.ProtobufOutput
+JsonInput.ValueAcceptInteger.JsonOutput
+JsonInput.ValueAcceptInteger.ProtobufOutput
+JsonInput.ValueAcceptList.JsonOutput
+JsonInput.ValueAcceptList.ProtobufOutput
+JsonInput.ValueAcceptNull.JsonOutput
+JsonInput.ValueAcceptNull.ProtobufOutput
+JsonInput.ValueAcceptObject.JsonOutput
+JsonInput.ValueAcceptObject.ProtobufOutput
+JsonInput.ValueAcceptString.JsonOutput
+JsonInput.ValueAcceptString.ProtobufOutput
+TimestampProtoInputTooLarge.JsonOutput
+TimestampProtoInputTooSmall.JsonOutput

+ 110 - 0
conformance/failure_list_python_cpp.txt

@@ -0,0 +1,110 @@
+# This is the list of conformance tests that are known to fail for the
+# Python/C++ implementation right now.  These should be fixed.
+#
+# By listing them here we can keep tabs on which ones are failing and be sure
+# that we don't introduce regressions in other tests.
+#
+# TODO(haberman): insert links to corresponding bugs tracking the issue.
+# Should we use GitHub issues or the Google-internal bug tracker?
+
+DurationProtoInputTooLarge.JsonOutput
+DurationProtoInputTooSmall.JsonOutput
+FieldMaskNumbersDontRoundTrip.JsonOutput
+FieldMaskPathsDontRoundTrip.JsonOutput
+FieldMaskTooManyUnderscore.JsonOutput
+JsonInput.Any.JsonOutput
+JsonInput.Any.ProtobufOutput
+JsonInput.AnyNested.JsonOutput
+JsonInput.AnyNested.ProtobufOutput
+JsonInput.AnyUnorderedTypeTag.JsonOutput
+JsonInput.AnyUnorderedTypeTag.ProtobufOutput
+JsonInput.AnyWithDuration.JsonOutput
+JsonInput.AnyWithDuration.ProtobufOutput
+JsonInput.AnyWithFieldMask.JsonOutput
+JsonInput.AnyWithFieldMask.ProtobufOutput
+JsonInput.AnyWithInt32ValueWrapper.JsonOutput
+JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput
+JsonInput.AnyWithStruct.JsonOutput
+JsonInput.AnyWithStruct.ProtobufOutput
+JsonInput.AnyWithTimestamp.JsonOutput
+JsonInput.AnyWithTimestamp.ProtobufOutput
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForInteger.ProtobufOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.AnyWithValueForJsonObject.ProtobufOutput
+JsonInput.BytesFieldInvalidBase64Characters
+JsonInput.DoubleFieldInfinityNotQuoted
+JsonInput.DoubleFieldNanNotQuoted
+JsonInput.DoubleFieldNegativeInfinityNotQuoted
+JsonInput.DoubleFieldTooSmall
+JsonInput.DurationJsonInputTooLarge
+JsonInput.DurationJsonInputTooSmall
+JsonInput.DurationMissingS
+JsonInput.EnumFieldNumericValueNonZero.JsonOutput
+JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput
+JsonInput.EnumFieldNumericValueZero.JsonOutput
+JsonInput.EnumFieldNumericValueZero.ProtobufOutput
+JsonInput.EnumFieldUnknownValue.Validator
+JsonInput.FieldMask.ProtobufOutput
+JsonInput.FieldMaskInvalidCharacter
+JsonInput.FieldNameInLowerCamelCase.Validator
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameInSnakeCase.ProtobufOutput
+JsonInput.FloatFieldInfinityNotQuoted
+JsonInput.FloatFieldNanNotQuoted
+JsonInput.FloatFieldNegativeInfinityNotQuoted
+JsonInput.FloatFieldTooLarge
+JsonInput.FloatFieldTooSmall
+JsonInput.Int32FieldExponentialFormat.JsonOutput
+JsonInput.Int32FieldExponentialFormat.ProtobufOutput
+JsonInput.Int32FieldFloatTrailingZero.JsonOutput
+JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput
+JsonInput.Int32FieldMaxFloatValue.JsonOutput
+JsonInput.Int32FieldMaxFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinValue.JsonOutput
+JsonInput.OriginalProtoFieldName.JsonOutput
+JsonInput.OriginalProtoFieldName.ProtobufOutput
+JsonInput.RepeatedFieldMessageElementIsNull
+JsonInput.RepeatedFieldPrimitiveElementIsNull
+JsonInput.RepeatedFieldWrongElementTypeExpectingIntegersGotBool
+JsonInput.StringFieldSurrogatePair.JsonOutput
+JsonInput.StringFieldUnpairedLowSurrogate
+JsonInput.Struct.JsonOutput
+JsonInput.Struct.ProtobufOutput
+JsonInput.TimestampJsonInputLowercaseT
+JsonInput.Uint32FieldMaxFloatValue.JsonOutput
+JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput
+JsonInput.ValueAcceptBool.JsonOutput
+JsonInput.ValueAcceptBool.ProtobufOutput
+JsonInput.ValueAcceptFloat.JsonOutput
+JsonInput.ValueAcceptFloat.ProtobufOutput
+JsonInput.ValueAcceptInteger.JsonOutput
+JsonInput.ValueAcceptInteger.ProtobufOutput
+JsonInput.ValueAcceptList.JsonOutput
+JsonInput.ValueAcceptList.ProtobufOutput
+JsonInput.ValueAcceptNull.JsonOutput
+JsonInput.ValueAcceptNull.ProtobufOutput
+JsonInput.ValueAcceptObject.JsonOutput
+JsonInput.ValueAcceptObject.ProtobufOutput
+JsonInput.ValueAcceptString.JsonOutput
+JsonInput.ValueAcceptString.ProtobufOutput
+ProtobufInput.PrematureEofInDelimitedDataForKnownNonRepeatedValue.MESSAGE
+ProtobufInput.PrematureEofInDelimitedDataForKnownRepeatedValue.MESSAGE
+ProtobufInput.PrematureEofInPackedField.BOOL
+ProtobufInput.PrematureEofInPackedField.DOUBLE
+ProtobufInput.PrematureEofInPackedField.ENUM
+ProtobufInput.PrematureEofInPackedField.FIXED32
+ProtobufInput.PrematureEofInPackedField.FIXED64
+ProtobufInput.PrematureEofInPackedField.FLOAT
+ProtobufInput.PrematureEofInPackedField.INT32
+ProtobufInput.PrematureEofInPackedField.INT64
+ProtobufInput.PrematureEofInPackedField.SFIXED32
+ProtobufInput.PrematureEofInPackedField.SFIXED64
+ProtobufInput.PrematureEofInPackedField.SINT32
+ProtobufInput.PrematureEofInPackedField.SINT64
+ProtobufInput.PrematureEofInPackedField.UINT32
+ProtobufInput.PrematureEofInPackedField.UINT64
+TimestampProtoInputTooLarge.JsonOutput
+TimestampProtoInputTooSmall.JsonOutput

+ 199 - 2
conformance/failure_list_ruby.txt

@@ -1,2 +1,199 @@
-JsonInput.HelloWorld.JsonOutput
-JsonInput.HelloWorld.ProtobufOutput
+DurationProtoInputTooLarge.JsonOutput
+DurationProtoInputTooSmall.JsonOutput
+FieldMaskNumbersDontRoundTrip.JsonOutput
+FieldMaskPathsDontRoundTrip.JsonOutput
+FieldMaskTooManyUnderscore.JsonOutput
+JsonInput.Any.JsonOutput
+JsonInput.Any.ProtobufOutput
+JsonInput.AnyNested.JsonOutput
+JsonInput.AnyNested.ProtobufOutput
+JsonInput.AnyUnorderedTypeTag.JsonOutput
+JsonInput.AnyUnorderedTypeTag.ProtobufOutput
+JsonInput.AnyWithDuration.JsonOutput
+JsonInput.AnyWithDuration.ProtobufOutput
+JsonInput.AnyWithFieldMask.JsonOutput
+JsonInput.AnyWithFieldMask.ProtobufOutput
+JsonInput.AnyWithInt32ValueWrapper.JsonOutput
+JsonInput.AnyWithInt32ValueWrapper.ProtobufOutput
+JsonInput.AnyWithStruct.JsonOutput
+JsonInput.AnyWithStruct.ProtobufOutput
+JsonInput.AnyWithTimestamp.JsonOutput
+JsonInput.AnyWithTimestamp.ProtobufOutput
+JsonInput.AnyWithValueForInteger.JsonOutput
+JsonInput.AnyWithValueForInteger.ProtobufOutput
+JsonInput.AnyWithValueForJsonObject.JsonOutput
+JsonInput.AnyWithValueForJsonObject.ProtobufOutput
+JsonInput.BoolFieldIntegerOne
+JsonInput.BoolFieldIntegerZero
+JsonInput.DoubleFieldInfinity.JsonOutput
+JsonInput.DoubleFieldInfinity.ProtobufOutput
+JsonInput.DoubleFieldMaxNegativeValue.JsonOutput
+JsonInput.DoubleFieldMaxNegativeValue.ProtobufOutput
+JsonInput.DoubleFieldMaxPositiveValue.JsonOutput
+JsonInput.DoubleFieldMaxPositiveValue.ProtobufOutput
+JsonInput.DoubleFieldMinNegativeValue.JsonOutput
+JsonInput.DoubleFieldMinNegativeValue.ProtobufOutput
+JsonInput.DoubleFieldMinPositiveValue.JsonOutput
+JsonInput.DoubleFieldMinPositiveValue.ProtobufOutput
+JsonInput.DoubleFieldNan.JsonOutput
+JsonInput.DoubleFieldNan.ProtobufOutput
+JsonInput.DoubleFieldNegativeInfinity.JsonOutput
+JsonInput.DoubleFieldNegativeInfinity.ProtobufOutput
+JsonInput.DoubleFieldQuotedValue.JsonOutput
+JsonInput.DoubleFieldQuotedValue.ProtobufOutput
+JsonInput.DurationHas3FractionalDigits.Validator
+JsonInput.DurationHas6FractionalDigits.Validator
+JsonInput.DurationHas9FractionalDigits.Validator
+JsonInput.DurationHasZeroFractionalDigit.Validator
+JsonInput.DurationMaxValue.JsonOutput
+JsonInput.DurationMaxValue.ProtobufOutput
+JsonInput.DurationMinValue.JsonOutput
+JsonInput.DurationMinValue.ProtobufOutput
+JsonInput.DurationRepeatedValue.JsonOutput
+JsonInput.DurationRepeatedValue.ProtobufOutput
+JsonInput.EnumFieldNumericValueNonZero.JsonOutput
+JsonInput.EnumFieldNumericValueNonZero.ProtobufOutput
+JsonInput.EnumFieldNumericValueZero.JsonOutput
+JsonInput.EnumFieldNumericValueZero.ProtobufOutput
+JsonInput.EnumFieldUnknownValue.Validator
+JsonInput.FieldMask.JsonOutput
+JsonInput.FieldMask.ProtobufOutput
+JsonInput.FieldNameInSnakeCase.JsonOutput
+JsonInput.FieldNameWithMixedCases.JsonOutput
+JsonInput.FieldNameWithMixedCases.ProtobufOutput
+JsonInput.FieldNameWithMixedCases.Validator
+JsonInput.FloatFieldInfinity.JsonOutput
+JsonInput.FloatFieldInfinity.ProtobufOutput
+JsonInput.FloatFieldNan.JsonOutput
+JsonInput.FloatFieldNan.ProtobufOutput
+JsonInput.FloatFieldNegativeInfinity.JsonOutput
+JsonInput.FloatFieldNegativeInfinity.ProtobufOutput
+JsonInput.FloatFieldQuotedValue.JsonOutput
+JsonInput.FloatFieldQuotedValue.ProtobufOutput
+JsonInput.FloatFieldTooLarge
+JsonInput.FloatFieldTooSmall
+JsonInput.Int32FieldExponentialFormat.JsonOutput
+JsonInput.Int32FieldExponentialFormat.ProtobufOutput
+JsonInput.Int32FieldFloatTrailingZero.JsonOutput
+JsonInput.Int32FieldFloatTrailingZero.ProtobufOutput
+JsonInput.Int32FieldMaxFloatValue.JsonOutput
+JsonInput.Int32FieldMaxFloatValue.ProtobufOutput
+JsonInput.Int32FieldMinFloatValue.JsonOutput
+JsonInput.Int32FieldMinFloatValue.ProtobufOutput
+JsonInput.Int32FieldStringValue.JsonOutput
+JsonInput.Int32FieldStringValue.ProtobufOutput
+JsonInput.Int32FieldStringValueEscaped.JsonOutput
+JsonInput.Int32FieldStringValueEscaped.ProtobufOutput
+JsonInput.Int32MapEscapedKey.JsonOutput
+JsonInput.Int32MapEscapedKey.ProtobufOutput
+JsonInput.Int32MapField.JsonOutput
+JsonInput.Int32MapField.ProtobufOutput
+JsonInput.Int64FieldBeString.Validator
+JsonInput.Int64FieldMaxValue.JsonOutput
+JsonInput.Int64FieldMaxValue.ProtobufOutput
+JsonInput.Int64FieldMinValue.JsonOutput
+JsonInput.Int64FieldMinValue.ProtobufOutput
+JsonInput.Int64MapEscapedKey.JsonOutput
+JsonInput.Int64MapEscapedKey.ProtobufOutput
+JsonInput.Int64MapField.JsonOutput
+JsonInput.Int64MapField.ProtobufOutput
+JsonInput.MessageField.JsonOutput
+JsonInput.MessageField.ProtobufOutput
+JsonInput.MessageMapField.JsonOutput
+JsonInput.MessageMapField.ProtobufOutput
+JsonInput.MessageRepeatedField.JsonOutput
+JsonInput.MessageRepeatedField.ProtobufOutput
+JsonInput.OptionalBoolWrapper.JsonOutput
+JsonInput.OptionalBoolWrapper.ProtobufOutput
+JsonInput.OptionalBytesWrapper.JsonOutput
+JsonInput.OptionalBytesWrapper.ProtobufOutput
+JsonInput.OptionalDoubleWrapper.JsonOutput
+JsonInput.OptionalDoubleWrapper.ProtobufOutput
+JsonInput.OptionalFloatWrapper.JsonOutput
+JsonInput.OptionalFloatWrapper.ProtobufOutput
+JsonInput.OptionalInt32Wrapper.JsonOutput
+JsonInput.OptionalInt32Wrapper.ProtobufOutput
+JsonInput.OptionalInt64Wrapper.JsonOutput
+JsonInput.OptionalInt64Wrapper.ProtobufOutput
+JsonInput.OptionalStringWrapper.JsonOutput
+JsonInput.OptionalStringWrapper.ProtobufOutput
+JsonInput.OptionalUint32Wrapper.JsonOutput
+JsonInput.OptionalUint32Wrapper.ProtobufOutput
+JsonInput.OptionalUint64Wrapper.JsonOutput
+JsonInput.OptionalUint64Wrapper.ProtobufOutput
+JsonInput.OptionalWrapperTypesWithNonDefaultValue.JsonOutput
+JsonInput.OptionalWrapperTypesWithNonDefaultValue.ProtobufOutput
+JsonInput.OriginalProtoFieldName.JsonOutput
+JsonInput.PrimitiveRepeatedField.JsonOutput
+JsonInput.PrimitiveRepeatedField.ProtobufOutput
+JsonInput.RepeatedBoolWrapper.JsonOutput
+JsonInput.RepeatedBoolWrapper.ProtobufOutput
+JsonInput.RepeatedBytesWrapper.JsonOutput
+JsonInput.RepeatedBytesWrapper.ProtobufOutput
+JsonInput.RepeatedDoubleWrapper.JsonOutput
+JsonInput.RepeatedDoubleWrapper.ProtobufOutput
+JsonInput.RepeatedFieldWrongElementTypeExpectingStringsGotInt
+JsonInput.RepeatedFloatWrapper.JsonOutput
+JsonInput.RepeatedFloatWrapper.ProtobufOutput
+JsonInput.RepeatedInt32Wrapper.JsonOutput
+JsonInput.RepeatedInt32Wrapper.ProtobufOutput
+JsonInput.RepeatedInt64Wrapper.JsonOutput
+JsonInput.RepeatedInt64Wrapper.ProtobufOutput
+JsonInput.RepeatedStringWrapper.JsonOutput
+JsonInput.RepeatedStringWrapper.ProtobufOutput
+JsonInput.RepeatedUint32Wrapper.JsonOutput
+JsonInput.RepeatedUint32Wrapper.ProtobufOutput
+JsonInput.RepeatedUint64Wrapper.JsonOutput
+JsonInput.RepeatedUint64Wrapper.ProtobufOutput
+JsonInput.StringFieldNotAString
+JsonInput.StringFieldSurrogateInWrongOrder
+JsonInput.StringFieldSurrogatePair.JsonOutput
+JsonInput.StringFieldSurrogatePair.ProtobufOutput
+JsonInput.StringFieldUnpairedHighSurrogate
+JsonInput.StringFieldUnpairedLowSurrogate
+JsonInput.Struct.JsonOutput
+JsonInput.Struct.ProtobufOutput
+JsonInput.TimestampHas3FractionalDigits.Validator
+JsonInput.TimestampHas6FractionalDigits.Validator
+JsonInput.TimestampHas9FractionalDigits.Validator
+JsonInput.TimestampHasZeroFractionalDigit.Validator
+JsonInput.TimestampMaxValue.JsonOutput
+JsonInput.TimestampMaxValue.ProtobufOutput
+JsonInput.TimestampMinValue.JsonOutput
+JsonInput.TimestampMinValue.ProtobufOutput
+JsonInput.TimestampRepeatedValue.JsonOutput
+JsonInput.TimestampRepeatedValue.ProtobufOutput
+JsonInput.TimestampWithNegativeOffset.JsonOutput
+JsonInput.TimestampWithNegativeOffset.ProtobufOutput
+JsonInput.TimestampWithPositiveOffset.JsonOutput
+JsonInput.TimestampWithPositiveOffset.ProtobufOutput
+JsonInput.TimestampZeroNormalized.Validator
+JsonInput.Uint32FieldMaxFloatValue.JsonOutput
+JsonInput.Uint32FieldMaxFloatValue.ProtobufOutput
+JsonInput.Uint32MapField.JsonOutput
+JsonInput.Uint32MapField.ProtobufOutput
+JsonInput.Uint64FieldBeString.Validator
+JsonInput.Uint64FieldMaxValue.JsonOutput
+JsonInput.Uint64FieldMaxValue.ProtobufOutput
+JsonInput.Uint64MapField.JsonOutput
+JsonInput.Uint64MapField.ProtobufOutput
+JsonInput.ValueAcceptBool.JsonOutput
+JsonInput.ValueAcceptBool.ProtobufOutput
+JsonInput.ValueAcceptFloat.JsonOutput
+JsonInput.ValueAcceptFloat.ProtobufOutput
+JsonInput.ValueAcceptInteger.JsonOutput
+JsonInput.ValueAcceptInteger.ProtobufOutput
+JsonInput.ValueAcceptList.JsonOutput
+JsonInput.ValueAcceptList.ProtobufOutput
+JsonInput.ValueAcceptNull.JsonOutput
+JsonInput.ValueAcceptNull.ProtobufOutput
+JsonInput.ValueAcceptObject.JsonOutput
+JsonInput.ValueAcceptObject.ProtobufOutput
+JsonInput.ValueAcceptString.JsonOutput
+JsonInput.ValueAcceptString.ProtobufOutput
+ProtobufInput.DoubleFieldNormalizeQuietNan.JsonOutput
+ProtobufInput.DoubleFieldNormalizeSignalingNan.JsonOutput
+ProtobufInput.FloatFieldNormalizeQuietNan.JsonOutput
+ProtobufInput.FloatFieldNormalizeSignalingNan.JsonOutput
+TimestampProtoInputTooLarge.JsonOutput
+TimestampProtoInputTooSmall.JsonOutput

+ 2075 - 0
conformance/third_party/jsoncpp/json.h

@@ -0,0 +1,2075 @@
+/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).
+/// It is intended to be used with #include "json/json.h"
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation, 
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all 
+jurisdictions which recognize such a disclaimer. In such jurisdictions, 
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this 
+software may choose to accept it either as 1) Public Domain, 2) under the 
+conditions of the MIT License (see below), or 3) under the terms of dual 
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+   http://en.wikipedia.org/wiki/MIT_License
+   
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+#ifndef JSON_AMALGATED_H_INCLUDED
+# define JSON_AMALGATED_H_INCLUDED
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+#define JSON_IS_AMALGAMATION
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/version.h
+// //////////////////////////////////////////////////////////////////////
+
+// DO NOT EDIT. This file (and "version") is generated by CMake.
+// Run CMake configure step to update it.
+#ifndef JSON_VERSION_H_INCLUDED
+# define JSON_VERSION_H_INCLUDED
+
+# define JSONCPP_VERSION_STRING "1.6.5"
+# define JSONCPP_VERSION_MAJOR 1
+# define JSONCPP_VERSION_MINOR 6
+# define JSONCPP_VERSION_PATCH 5
+# define JSONCPP_VERSION_QUALIFIER
+# define JSONCPP_VERSION_HEXA ((JSONCPP_VERSION_MAJOR << 24) | (JSONCPP_VERSION_MINOR << 16) | (JSONCPP_VERSION_PATCH << 8))
+
+#endif // JSON_VERSION_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/version.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_CONFIG_H_INCLUDED
+#define JSON_CONFIG_H_INCLUDED
+
+/// If defined, indicates that json library is embedded in CppTL library.
+//# define JSON_IN_CPPTL 1
+
+/// If defined, indicates that json may leverage CppTL library
+//#  define JSON_USE_CPPTL 1
+/// If defined, indicates that cpptl vector based map should be used instead of
+/// std::map
+/// as Value container.
+//#  define JSON_USE_CPPTL_SMALLMAP 1
+
+// If non-zero, the library uses exceptions to report bad input instead of C
+// assertion macros. The default is to use exceptions.
+#ifndef JSON_USE_EXCEPTION
+#define JSON_USE_EXCEPTION 1
+#endif
+
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+/// Remarks: it is automatically defined in the generated amalgated header.
+// #define JSON_IS_AMALGAMATION
+
+#ifdef JSON_IN_CPPTL
+#include <cpptl/config.h>
+#ifndef JSON_USE_CPPTL
+#define JSON_USE_CPPTL 1
+#endif
+#endif
+
+#ifdef JSON_IN_CPPTL
+#define JSON_API CPPTL_API
+#elif defined(JSON_DLL_BUILD)
+#if defined(_MSC_VER)
+#define JSON_API __declspec(dllexport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#elif defined(JSON_DLL)
+#if defined(_MSC_VER)
+#define JSON_API __declspec(dllimport)
+#define JSONCPP_DISABLE_DLL_INTERFACE_WARNING
+#endif // if defined(_MSC_VER)
+#endif // ifdef JSON_IN_CPPTL
+#if !defined(JSON_API)
+#define JSON_API
+#endif
+
+// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for
+// integer
+// Storages, and 64 bits integer support is disabled.
+// #define JSON_NO_INT64 1
+
+#if defined(_MSC_VER) // MSVC
+#  if _MSC_VER <= 1200 // MSVC 6
+    // Microsoft Visual Studio 6 only support conversion from __int64 to double
+    // (no conversion from unsigned __int64).
+#    define JSON_USE_INT64_DOUBLE_CONVERSION 1
+    // Disable warning 4786 for VS6 caused by STL (identifier was truncated to '255'
+    // characters in the debug information)
+    // All projects I've ever seen with VS6 were using this globally (not bothering
+    // with pragma push/pop).
+#    pragma warning(disable : 4786)
+#  endif // MSVC 6
+
+#  if _MSC_VER >= 1500 // MSVC 2008
+    /// Indicates that the following function is deprecated.
+#    define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
+#  endif
+
+#endif // defined(_MSC_VER)
+
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+
+#if defined(_MSC_VER) && _MSC_VER >= 1600 // MSVC >= 2010
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif // MSVC >= 2010
+
+#ifdef __clang__
+#if __has_feature(cxx_rvalue_references)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif  // has_feature
+
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#if defined(__GXX_EXPERIMENTAL_CXX0X__) || (__cplusplus >= 201103L)
+#define JSON_HAS_RVALUE_REFERENCES 1
+#endif  // GXX_EXPERIMENTAL
+
+#endif // __clang__ || __GNUC__
+
+#endif // not defined JSON_HAS_RVALUE_REFERENCES
+
+#ifndef JSON_HAS_RVALUE_REFERENCES
+#define JSON_HAS_RVALUE_REFERENCES 0
+#endif
+
+#ifdef __clang__
+#elif defined __GNUC__ // not clang (gcc comes later since clang emulates gcc)
+#  if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5))
+#    define JSONCPP_DEPRECATED(message)  __attribute__ ((deprecated(message)))
+#  elif (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 1))
+#    define JSONCPP_DEPRECATED(message)  __attribute__((__deprecated__))
+#  endif  // GNUC version
+#endif // __clang__ || __GNUC__
+
+#if !defined(JSONCPP_DEPRECATED)
+#define JSONCPP_DEPRECATED(message)
+#endif // if !defined(JSONCPP_DEPRECATED)
+
+namespace Json {
+typedef int Int;
+typedef unsigned int UInt;
+#if defined(JSON_NO_INT64)
+typedef int LargestInt;
+typedef unsigned int LargestUInt;
+#undef JSON_HAS_INT64
+#else                 // if defined(JSON_NO_INT64)
+// For Microsoft Visual use specific types as long long is not supported
+#if defined(_MSC_VER) // Microsoft Visual Studio
+typedef __int64 Int64;
+typedef unsigned __int64 UInt64;
+#else                 // if defined(_MSC_VER) // Other platforms, use long long
+typedef long long int Int64;
+typedef unsigned long long int UInt64;
+#endif // if defined(_MSC_VER)
+typedef Int64 LargestInt;
+typedef UInt64 LargestUInt;
+#define JSON_HAS_INT64
+#endif // if defined(JSON_NO_INT64)
+} // end namespace Json
+
+#endif // JSON_CONFIG_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_FORWARDS_H_INCLUDED
+#define JSON_FORWARDS_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json {
+
+// writer.h
+class FastWriter;
+class StyledWriter;
+
+// reader.h
+class Reader;
+
+// features.h
+class Features;
+
+// value.h
+typedef unsigned int ArrayIndex;
+class StaticString;
+class Path;
+class PathArgument;
+class Value;
+class ValueIteratorBase;
+class ValueIterator;
+class ValueConstIterator;
+
+} // namespace Json
+
+#endif // JSON_FORWARDS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
+#define CPPTL_JSON_FEATURES_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json {
+
+/** \brief Configuration passed to reader and writer.
+ * This configuration object can be used to force the Reader or Writer
+ * to behave in a standard conforming way.
+ */
+class JSON_API Features {
+public:
+  /** \brief A configuration that allows all features and assumes all strings
+   * are UTF-8.
+   * - C & C++ comments are allowed
+   * - Root object can be any JSON value
+   * - Assumes Value strings are encoded in UTF-8
+   */
+  static Features all();
+
+  /** \brief A configuration that is strictly compatible with the JSON
+   * specification.
+   * - Comments are forbidden.
+   * - Root object must be either an array or an object value.
+   * - Assumes Value strings are encoded in UTF-8
+   */
+  static Features strictMode();
+
+  /** \brief Initialize the configuration like JsonConfig::allFeatures;
+   */
+  Features();
+
+  /// \c true if comments are allowed. Default: \c true.
+  bool allowComments_;
+
+  /// \c true if root must be either an array or an object value. Default: \c
+  /// false.
+  bool strictRoot_;
+
+  /// \c true if dropped null placeholders are allowed. Default: \c false.
+  bool allowDroppedNullPlaceholders_;
+
+  /// \c true if numeric object key are allowed. Default: \c false.
+  bool allowNumericKeys_;
+};
+
+} // namespace Json
+
+#endif // CPPTL_JSON_FEATURES_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_H_INCLUDED
+#define CPPTL_JSON_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <string>
+#include <vector>
+#include <exception>
+
+#ifndef JSON_USE_CPPTL_SMALLMAP
+#include <map>
+#else
+#include <cpptl/smallmap.h>
+#endif
+#ifdef JSON_USE_CPPTL
+#include <cpptl/forwards.h>
+#endif
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+/** \brief JSON (JavaScript Object Notation).
+ */
+namespace Json {
+
+/** Base class for all exceptions we throw.
+ *
+ * We use nothing but these internally. Of course, STL can throw others.
+ */
+class JSON_API Exception : public std::exception {
+public:
+  Exception(std::string const& msg);
+  ~Exception() throw() override;
+  char const* what() const throw() override;
+protected:
+  std::string msg_;
+};
+
+/** Exceptions which the user cannot easily avoid.
+ *
+ * E.g. out-of-memory (when we use malloc), stack-overflow, malicious input
+ * 
+ * \remark derived from Json::Exception
+ */
+class JSON_API RuntimeError : public Exception {
+public:
+  RuntimeError(std::string const& msg);
+};
+
+/** Exceptions thrown by JSON_ASSERT/JSON_FAIL macros.
+ *
+ * These are precondition-violations (user bugs) and internal errors (our bugs).
+ * 
+ * \remark derived from Json::Exception
+ */
+class JSON_API LogicError : public Exception {
+public:
+  LogicError(std::string const& msg);
+};
+
+/// used internally
+void throwRuntimeError(std::string const& msg);
+/// used internally
+void throwLogicError(std::string const& msg);
+
+/** \brief Type of the value held by a Value object.
+ */
+enum ValueType {
+  nullValue = 0, ///< 'null' value
+  intValue,      ///< signed integer value
+  uintValue,     ///< unsigned integer value
+  realValue,     ///< double value
+  stringValue,   ///< UTF-8 string value
+  booleanValue,  ///< bool value
+  arrayValue,    ///< array value (ordered list)
+  objectValue    ///< object value (collection of name/value pairs).
+};
+
+enum CommentPlacement {
+  commentBefore = 0,      ///< a comment placed on the line before a value
+  commentAfterOnSameLine, ///< a comment just after a value on the same line
+  commentAfter, ///< a comment on the line after a value (only make sense for
+  /// root value)
+  numberOfCommentPlacement
+};
+
+//# ifdef JSON_USE_CPPTL
+//   typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
+//   typedef CppTL::AnyEnumerator<const Value &> EnumValues;
+//# endif
+
+/** \brief Lightweight wrapper to tag static string.
+ *
+ * Value constructor and objectValue member assignement takes advantage of the
+ * StaticString and avoid the cost of string duplication when storing the
+ * string or the member name.
+ *
+ * Example of usage:
+ * \code
+ * Json::Value aValue( StaticString("some text") );
+ * Json::Value object;
+ * static const StaticString code("code");
+ * object[code] = 1234;
+ * \endcode
+ */
+class JSON_API StaticString {
+public:
+  explicit StaticString(const char* czstring) : c_str_(czstring) {}
+
+  operator const char*() const { return c_str_; }
+
+  const char* c_str() const { return c_str_; }
+
+private:
+  const char* c_str_;
+};
+
+/** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
+ *
+ * This class is a discriminated union wrapper that can represents a:
+ * - signed integer [range: Value::minInt - Value::maxInt]
+ * - unsigned integer (range: 0 - Value::maxUInt)
+ * - double
+ * - UTF-8 string
+ * - boolean
+ * - 'null'
+ * - an ordered list of Value
+ * - collection of name/value pairs (javascript object)
+ *
+ * The type of the held value is represented by a #ValueType and
+ * can be obtained using type().
+ *
+ * Values of an #objectValue or #arrayValue can be accessed using operator[]()
+ * methods.
+ * Non-const methods will automatically create the a #nullValue element
+ * if it does not exist.
+ * The sequence of an #arrayValue will be automatically resized and initialized
+ * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
+ *
+ * The get() methods can be used to obtain default value in the case the
+ * required element does not exist.
+ *
+ * It is possible to iterate over the list of a #objectValue values using
+ * the getMemberNames() method.
+ *
+ * \note #Value string-length fit in size_t, but keys must be < 2^30.
+ * (The reason is an implementation detail.) A #CharReader will raise an
+ * exception if a bound is exceeded to avoid security holes in your app,
+ * but the Value API does *not* check bounds. That is the responsibility
+ * of the caller.
+ */
+class JSON_API Value {
+  friend class ValueIteratorBase;
+public:
+  typedef std::vector<std::string> Members;
+  typedef ValueIterator iterator;
+  typedef ValueConstIterator const_iterator;
+  typedef Json::UInt UInt;
+  typedef Json::Int Int;
+#if defined(JSON_HAS_INT64)
+  typedef Json::UInt64 UInt64;
+  typedef Json::Int64 Int64;
+#endif // defined(JSON_HAS_INT64)
+  typedef Json::LargestInt LargestInt;
+  typedef Json::LargestUInt LargestUInt;
+  typedef Json::ArrayIndex ArrayIndex;
+
+  static const Value& null;  ///< We regret this reference to a global instance; prefer the simpler Value().
+  static const Value& nullRef;  ///< just a kludge for binary-compatibility; same as null
+  /// Minimum signed integer value that can be stored in a Json::Value.
+  static const LargestInt minLargestInt;
+  /// Maximum signed integer value that can be stored in a Json::Value.
+  static const LargestInt maxLargestInt;
+  /// Maximum unsigned integer value that can be stored in a Json::Value.
+  static const LargestUInt maxLargestUInt;
+
+  /// Minimum signed int value that can be stored in a Json::Value.
+  static const Int minInt;
+  /// Maximum signed int value that can be stored in a Json::Value.
+  static const Int maxInt;
+  /// Maximum unsigned int value that can be stored in a Json::Value.
+  static const UInt maxUInt;
+
+#if defined(JSON_HAS_INT64)
+  /// Minimum signed 64 bits int value that can be stored in a Json::Value.
+  static const Int64 minInt64;
+  /// Maximum signed 64 bits int value that can be stored in a Json::Value.
+  static const Int64 maxInt64;
+  /// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
+  static const UInt64 maxUInt64;
+#endif // defined(JSON_HAS_INT64)
+
+private:
+#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+  class CZString {
+  public:
+    enum DuplicationPolicy {
+      noDuplication = 0,
+      duplicate,
+      duplicateOnCopy
+    };
+    CZString(ArrayIndex index);
+    CZString(char const* str, unsigned length, DuplicationPolicy allocate);
+    CZString(CZString const& other);
+#if JSON_HAS_RVALUE_REFERENCES
+    CZString(CZString&& other);
+#endif
+    ~CZString();
+    CZString& operator=(CZString other);
+    bool operator<(CZString const& other) const;
+    bool operator==(CZString const& other) const;
+    ArrayIndex index() const;
+    //const char* c_str() const; ///< \deprecated
+    char const* data() const;
+    unsigned length() const;
+    bool isStaticString() const;
+
+  private:
+    void swap(CZString& other);
+
+    struct StringStorage {
+      unsigned policy_: 2;
+      unsigned length_: 30; // 1GB max
+    };
+
+    char const* cstr_;  // actually, a prefixed string, unless policy is noDup
+    union {
+      ArrayIndex index_;
+      StringStorage storage_;
+    };
+  };
+
+public:
+#ifndef JSON_USE_CPPTL_SMALLMAP
+  typedef std::map<CZString, Value> ObjectValues;
+#else
+  typedef CppTL::SmallMap<CZString, Value> ObjectValues;
+#endif // ifndef JSON_USE_CPPTL_SMALLMAP
+#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+
+public:
+  /** \brief Create a default Value of the given type.
+
+    This is a very useful constructor.
+    To create an empty array, pass arrayValue.
+    To create an empty object, pass objectValue.
+    Another Value can then be set to this one by assignment.
+This is useful since clear() and resize() will not alter types.
+
+    Examples:
+\code
+Json::Value null_value; // null
+Json::Value arr_value(Json::arrayValue); // []
+Json::Value obj_value(Json::objectValue); // {}
+\endcode
+  */
+  Value(ValueType type = nullValue);
+  Value(Int value);
+  Value(UInt value);
+#if defined(JSON_HAS_INT64)
+  Value(Int64 value);
+  Value(UInt64 value);
+#endif // if defined(JSON_HAS_INT64)
+  Value(double value);
+  Value(const char* value); ///< Copy til first 0. (NULL causes to seg-fault.)
+  Value(const char* begin, const char* end); ///< Copy all, incl zeroes.
+  /** \brief Constructs a value from a static string.
+
+   * Like other value string constructor but do not duplicate the string for
+   * internal storage. The given string must remain alive after the call to this
+   * constructor.
+   * \note This works only for null-terminated strings. (We cannot change the
+   *   size of this class, so we have nowhere to store the length,
+   *   which might be computed later for various operations.)
+   *
+   * Example of usage:
+   * \code
+   * static StaticString foo("some text");
+   * Json::Value aValue(foo);
+   * \endcode
+   */
+  Value(const StaticString& value);
+  Value(const std::string& value); ///< Copy data() til size(). Embedded zeroes too.
+#ifdef JSON_USE_CPPTL
+  Value(const CppTL::ConstString& value);
+#endif
+  Value(bool value);
+  /// Deep copy.
+  Value(const Value& other);
+#if JSON_HAS_RVALUE_REFERENCES
+  /// Move constructor
+  Value(Value&& other);
+#endif
+  ~Value();
+
+  /// Deep copy, then swap(other).
+  /// \note Over-write existing comments. To preserve comments, use #swapPayload().
+  Value& operator=(Value other);
+  /// Swap everything.
+  void swap(Value& other);
+  /// Swap values but leave comments and source offsets in place.
+  void swapPayload(Value& other);
+
+  ValueType type() const;
+
+  /// Compare payload only, not comments etc.
+  bool operator<(const Value& other) const;
+  bool operator<=(const Value& other) const;
+  bool operator>=(const Value& other) const;
+  bool operator>(const Value& other) const;
+  bool operator==(const Value& other) const;
+  bool operator!=(const Value& other) const;
+  int compare(const Value& other) const;
+
+  const char* asCString() const; ///< Embedded zeroes could cause you trouble!
+  std::string asString() const; ///< Embedded zeroes are possible.
+  /** Get raw char* of string-value.
+   *  \return false if !string. (Seg-fault if str or end are NULL.)
+   */
+  bool getString(
+      char const** begin, char const** end) const;
+#ifdef JSON_USE_CPPTL
+  CppTL::ConstString asConstString() const;
+#endif
+  Int asInt() const;
+  UInt asUInt() const;
+#if defined(JSON_HAS_INT64)
+  Int64 asInt64() const;
+  UInt64 asUInt64() const;
+#endif // if defined(JSON_HAS_INT64)
+  LargestInt asLargestInt() const;
+  LargestUInt asLargestUInt() const;
+  float asFloat() const;
+  double asDouble() const;
+  bool asBool() const;
+
+  bool isNull() const;
+  bool isBool() const;
+  bool isInt() const;
+  bool isInt64() const;
+  bool isUInt() const;
+  bool isUInt64() const;
+  bool isIntegral() const;
+  bool isDouble() const;
+  bool isNumeric() const;
+  bool isString() const;
+  bool isArray() const;
+  bool isObject() const;
+
+  bool isConvertibleTo(ValueType other) const;
+
+  /// Number of values in array or object
+  ArrayIndex size() const;
+
+  /// \brief Return true if empty array, empty object, or null;
+  /// otherwise, false.
+  bool empty() const;
+
+  /// Return isNull()
+  bool operator!() const;
+
+  /// Remove all object members and array elements.
+  /// \pre type() is arrayValue, objectValue, or nullValue
+  /// \post type() is unchanged
+  void clear();
+
+  /// Resize the array to size elements.
+  /// New elements are initialized to null.
+  /// May only be called on nullValue or arrayValue.
+  /// \pre type() is arrayValue or nullValue
+  /// \post type() is arrayValue
+  void resize(ArrayIndex size);
+
+  /// Access an array element (zero based index ).
+  /// If the array contains less than index element, then null value are
+  /// inserted
+  /// in the array so that its size is index+1.
+  /// (You may need to say 'value[0u]' to get your compiler to distinguish
+  ///  this from the operator[] which takes a string.)
+  Value& operator[](ArrayIndex index);
+
+  /// Access an array element (zero based index ).
+  /// If the array contains less than index element, then null value are
+  /// inserted
+  /// in the array so that its size is index+1.
+  /// (You may need to say 'value[0u]' to get your compiler to distinguish
+  ///  this from the operator[] which takes a string.)
+  Value& operator[](int index);
+
+  /// Access an array element (zero based index )
+  /// (You may need to say 'value[0u]' to get your compiler to distinguish
+  ///  this from the operator[] which takes a string.)
+  const Value& operator[](ArrayIndex index) const;
+
+  /// Access an array element (zero based index )
+  /// (You may need to say 'value[0u]' to get your compiler to distinguish
+  ///  this from the operator[] which takes a string.)
+  const Value& operator[](int index) const;
+
+  /// If the array contains at least index+1 elements, returns the element
+  /// value,
+  /// otherwise returns defaultValue.
+  Value get(ArrayIndex index, const Value& defaultValue) const;
+  /// Return true if index < size().
+  bool isValidIndex(ArrayIndex index) const;
+  /// \brief Append value to array at the end.
+  ///
+  /// Equivalent to jsonvalue[jsonvalue.size()] = value;
+  Value& append(const Value& value);
+
+  /// Access an object value by name, create a null member if it does not exist.
+  /// \note Because of our implementation, keys are limited to 2^30 -1 chars.
+  ///  Exceeding that will cause an exception.
+  Value& operator[](const char* key);
+  /// Access an object value by name, returns null if there is no member with
+  /// that name.
+  const Value& operator[](const char* key) const;
+  /// Access an object value by name, create a null member if it does not exist.
+  /// \param key may contain embedded nulls.
+  Value& operator[](const std::string& key);
+  /// Access an object value by name, returns null if there is no member with
+  /// that name.
+  /// \param key may contain embedded nulls.
+  const Value& operator[](const std::string& key) const;
+  /** \brief Access an object value by name, create a null member if it does not
+   exist.
+
+   * If the object has no entry for that name, then the member name used to store
+   * the new entry is not duplicated.
+   * Example of use:
+   * \code
+   * Json::Value object;
+   * static const StaticString code("code");
+   * object[code] = 1234;
+   * \endcode
+   */
+  Value& operator[](const StaticString& key);
+#ifdef JSON_USE_CPPTL
+  /// Access an object value by name, create a null member if it does not exist.
+  Value& operator[](const CppTL::ConstString& key);
+  /// Access an object value by name, returns null if there is no member with
+  /// that name.
+  const Value& operator[](const CppTL::ConstString& key) const;
+#endif
+  /// Return the member named key if it exist, defaultValue otherwise.
+  /// \note deep copy
+  Value get(const char* key, const Value& defaultValue) const;
+  /// Return the member named key if it exist, defaultValue otherwise.
+  /// \note deep copy
+  /// \note key may contain embedded nulls.
+  Value get(const char* begin, const char* end, const Value& defaultValue) const;
+  /// Return the member named key if it exist, defaultValue otherwise.
+  /// \note deep copy
+  /// \param key may contain embedded nulls.
+  Value get(const std::string& key, const Value& defaultValue) const;
+#ifdef JSON_USE_CPPTL
+  /// Return the member named key if it exist, defaultValue otherwise.
+  /// \note deep copy
+  Value get(const CppTL::ConstString& key, const Value& defaultValue) const;
+#endif
+  /// Most general and efficient version of isMember()const, get()const,
+  /// and operator[]const
+  /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
+  Value const* find(char const* begin, char const* end) const;
+  /// Most general and efficient version of object-mutators.
+  /// \note As stated elsewhere, behavior is undefined if (end-begin) >= 2^30
+  /// \return non-zero, but JSON_ASSERT if this is neither object nor nullValue.
+  Value const* demand(char const* begin, char const* end);
+  /// \brief Remove and return the named member.
+  ///
+  /// Do nothing if it did not exist.
+  /// \return the removed Value, or null.
+  /// \pre type() is objectValue or nullValue
+  /// \post type() is unchanged
+  /// \deprecated
+  Value removeMember(const char* key);
+  /// Same as removeMember(const char*)
+  /// \param key may contain embedded nulls.
+  /// \deprecated
+  Value removeMember(const std::string& key);
+  /// Same as removeMember(const char* begin, const char* end, Value* removed),
+  /// but 'key' is null-terminated.
+  bool removeMember(const char* key, Value* removed);
+  /** \brief Remove the named map member.
+
+      Update 'removed' iff removed.
+      \param key may contain embedded nulls.
+      \return true iff removed (no exceptions)
+  */
+  bool removeMember(std::string const& key, Value* removed);
+  /// Same as removeMember(std::string const& key, Value* removed)
+  bool removeMember(const char* begin, const char* end, Value* removed);
+  /** \brief Remove the indexed array element.
+
+      O(n) expensive operations.
+      Update 'removed' iff removed.
+      \return true iff removed (no exceptions)
+  */
+  bool removeIndex(ArrayIndex i, Value* removed);
+
+  /// Return true if the object has a member named key.
+  /// \note 'key' must be null-terminated.
+  bool isMember(const char* key) const;
+  /// Return true if the object has a member named key.
+  /// \param key may contain embedded nulls.
+  bool isMember(const std::string& key) const;
+  /// Same as isMember(std::string const& key)const
+  bool isMember(const char* begin, const char* end) const;
+#ifdef JSON_USE_CPPTL
+  /// Return true if the object has a member named key.
+  bool isMember(const CppTL::ConstString& key) const;
+#endif
+
+  /// \brief Return a list of the member names.
+  ///
+  /// If null, return an empty list.
+  /// \pre type() is objectValue or nullValue
+  /// \post if type() was nullValue, it remains nullValue
+  Members getMemberNames() const;
+
+  //# ifdef JSON_USE_CPPTL
+  //      EnumMemberNames enumMemberNames() const;
+  //      EnumValues enumValues() const;
+  //# endif
+
+  /// \deprecated Always pass len.
+  JSONCPP_DEPRECATED("Use setComment(std::string const&) instead.")
+  void setComment(const char* comment, CommentPlacement placement);
+  /// Comments must be //... or /* ... */
+  void setComment(const char* comment, size_t len, CommentPlacement placement);
+  /// Comments must be //... or /* ... */
+  void setComment(const std::string& comment, CommentPlacement placement);
+  bool hasComment(CommentPlacement placement) const;
+  /// Include delimiters and embedded newlines.
+  std::string getComment(CommentPlacement placement) const;
+
+  std::string toStyledString() const;
+
+  const_iterator begin() const;
+  const_iterator end() const;
+
+  iterator begin();
+  iterator end();
+
+  // Accessors for the [start, limit) range of bytes within the JSON text from
+  // which this value was parsed, if any.
+  void setOffsetStart(size_t start);
+  void setOffsetLimit(size_t limit);
+  size_t getOffsetStart() const;
+  size_t getOffsetLimit() const;
+
+private:
+  void initBasic(ValueType type, bool allocated = false);
+
+  Value& resolveReference(const char* key);
+  Value& resolveReference(const char* key, const char* end);
+
+  struct CommentInfo {
+    CommentInfo();
+    ~CommentInfo();
+
+    void setComment(const char* text, size_t len);
+
+    char* comment_;
+  };
+
+  // struct MemberNamesTransform
+  //{
+  //   typedef const char *result_type;
+  //   const char *operator()( const CZString &name ) const
+  //   {
+  //      return name.c_str();
+  //   }
+  //};
+
+  union ValueHolder {
+    LargestInt int_;
+    LargestUInt uint_;
+    double real_;
+    bool bool_;
+    char* string_;  // actually ptr to unsigned, followed by str, unless !allocated_
+    ObjectValues* map_;
+  } value_;
+  ValueType type_ : 8;
+  unsigned int allocated_ : 1; // Notes: if declared as bool, bitfield is useless.
+                               // If not allocated_, string_ must be null-terminated.
+  CommentInfo* comments_;
+
+  // [start, limit) byte offsets in the source JSON text from which this Value
+  // was extracted.
+  size_t start_;
+  size_t limit_;
+};
+
+/** \brief Experimental and untested: represents an element of the "path" to
+ * access a node.
+ */
+class JSON_API PathArgument {
+public:
+  friend class Path;
+
+  PathArgument();
+  PathArgument(ArrayIndex index);
+  PathArgument(const char* key);
+  PathArgument(const std::string& key);
+
+private:
+  enum Kind {
+    kindNone = 0,
+    kindIndex,
+    kindKey
+  };
+  std::string key_;
+  ArrayIndex index_;
+  Kind kind_;
+};
+
+/** \brief Experimental and untested: represents a "path" to access a node.
+ *
+ * Syntax:
+ * - "." => root node
+ * - ".[n]" => elements at index 'n' of root node (an array value)
+ * - ".name" => member named 'name' of root node (an object value)
+ * - ".name1.name2.name3"
+ * - ".[0][1][2].name1[3]"
+ * - ".%" => member name is provided as parameter
+ * - ".[%]" => index is provied as parameter
+ */
+class JSON_API Path {
+public:
+  Path(const std::string& path,
+       const PathArgument& a1 = PathArgument(),
+       const PathArgument& a2 = PathArgument(),
+       const PathArgument& a3 = PathArgument(),
+       const PathArgument& a4 = PathArgument(),
+       const PathArgument& a5 = PathArgument());
+
+  const Value& resolve(const Value& root) const;
+  Value resolve(const Value& root, const Value& defaultValue) const;
+  /// Creates the "path" to access the specified node and returns a reference on
+  /// the node.
+  Value& make(Value& root) const;
+
+private:
+  typedef std::vector<const PathArgument*> InArgs;
+  typedef std::vector<PathArgument> Args;
+
+  void makePath(const std::string& path, const InArgs& in);
+  void addPathInArg(const std::string& path,
+                    const InArgs& in,
+                    InArgs::const_iterator& itInArg,
+                    PathArgument::Kind kind);
+  void invalidPath(const std::string& path, int location);
+
+  Args args_;
+};
+
+/** \brief base class for Value iterators.
+ *
+ */
+class JSON_API ValueIteratorBase {
+public:
+  typedef std::bidirectional_iterator_tag iterator_category;
+  typedef unsigned int size_t;
+  typedef int difference_type;
+  typedef ValueIteratorBase SelfType;
+
+  bool operator==(const SelfType& other) const { return isEqual(other); }
+
+  bool operator!=(const SelfType& other) const { return !isEqual(other); }
+
+  difference_type operator-(const SelfType& other) const {
+    return other.computeDistance(*this);
+  }
+
+  /// Return either the index or the member name of the referenced value as a
+  /// Value.
+  Value key() const;
+
+  /// Return the index of the referenced Value, or -1 if it is not an arrayValue.
+  UInt index() const;
+
+  /// Return the member name of the referenced Value, or "" if it is not an
+  /// objectValue.
+  /// \note Avoid `c_str()` on result, as embedded zeroes are possible.
+  std::string name() const;
+
+  /// Return the member name of the referenced Value. "" if it is not an
+  /// objectValue.
+  /// \deprecated This cannot be used for UTF-8 strings, since there can be embedded nulls.
+  JSONCPP_DEPRECATED("Use `key = name();` instead.")
+  char const* memberName() const;
+  /// Return the member name of the referenced Value, or NULL if it is not an
+  /// objectValue.
+  /// \note Better version than memberName(). Allows embedded nulls.
+  char const* memberName(char const** end) const;
+
+protected:
+  Value& deref() const;
+
+  void increment();
+
+  void decrement();
+
+  difference_type computeDistance(const SelfType& other) const;
+
+  bool isEqual(const SelfType& other) const;
+
+  void copy(const SelfType& other);
+
+private:
+  Value::ObjectValues::iterator current_;
+  // Indicates that iterator is for a null value.
+  bool isNull_;
+
+public:
+  // For some reason, BORLAND needs these at the end, rather
+  // than earlier. No idea why.
+  ValueIteratorBase();
+  explicit ValueIteratorBase(const Value::ObjectValues::iterator& current);
+};
+
+/** \brief const iterator for object and array value.
+ *
+ */
+class JSON_API ValueConstIterator : public ValueIteratorBase {
+  friend class Value;
+
+public:
+  typedef const Value value_type;
+  //typedef unsigned int size_t;
+  //typedef int difference_type;
+  typedef const Value& reference;
+  typedef const Value* pointer;
+  typedef ValueConstIterator SelfType;
+
+  ValueConstIterator();
+  ValueConstIterator(ValueIterator const& other);
+
+private:
+/*! \internal Use by Value to create an iterator.
+ */
+  explicit ValueConstIterator(const Value::ObjectValues::iterator& current);
+public:
+  SelfType& operator=(const ValueIteratorBase& other);
+
+  SelfType operator++(int) {
+    SelfType temp(*this);
+    ++*this;
+    return temp;
+  }
+
+  SelfType operator--(int) {
+    SelfType temp(*this);
+    --*this;
+    return temp;
+  }
+
+  SelfType& operator--() {
+    decrement();
+    return *this;
+  }
+
+  SelfType& operator++() {
+    increment();
+    return *this;
+  }
+
+  reference operator*() const { return deref(); }
+
+  pointer operator->() const { return &deref(); }
+};
+
+/** \brief Iterator for object and array value.
+ */
+class JSON_API ValueIterator : public ValueIteratorBase {
+  friend class Value;
+
+public:
+  typedef Value value_type;
+  typedef unsigned int size_t;
+  typedef int difference_type;
+  typedef Value& reference;
+  typedef Value* pointer;
+  typedef ValueIterator SelfType;
+
+  ValueIterator();
+  explicit ValueIterator(const ValueConstIterator& other);
+  ValueIterator(const ValueIterator& other);
+
+private:
+/*! \internal Use by Value to create an iterator.
+ */
+  explicit ValueIterator(const Value::ObjectValues::iterator& current);
+public:
+  SelfType& operator=(const SelfType& other);
+
+  SelfType operator++(int) {
+    SelfType temp(*this);
+    ++*this;
+    return temp;
+  }
+
+  SelfType operator--(int) {
+    SelfType temp(*this);
+    --*this;
+    return temp;
+  }
+
+  SelfType& operator--() {
+    decrement();
+    return *this;
+  }
+
+  SelfType& operator++() {
+    increment();
+    return *this;
+  }
+
+  reference operator*() const { return deref(); }
+
+  pointer operator->() const { return &deref(); }
+};
+
+} // namespace Json
+
+
+namespace std {
+/// Specialize std::swap() for Json::Value.
+template<>
+inline void swap(Json::Value& a, Json::Value& b) { a.swap(b); }
+}
+
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // CPPTL_JSON_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_READER_H_INCLUDED
+#define CPPTL_JSON_READER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "features.h"
+#include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <deque>
+#include <iosfwd>
+#include <stack>
+#include <string>
+#include <istream>
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+namespace Json {
+
+/** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a
+ *Value.
+ *
+ * \deprecated Use CharReader and CharReaderBuilder.
+ */
+class JSON_API Reader {
+public:
+  typedef char Char;
+  typedef const Char* Location;
+
+  /** \brief An error tagged with where in the JSON text it was encountered.
+   *
+   * The offsets give the [start, limit) range of bytes within the text. Note
+   * that this is bytes, not codepoints.
+   *
+   */
+  struct StructuredError {
+    size_t offset_start;
+    size_t offset_limit;
+    std::string message;
+  };
+
+  /** \brief Constructs a Reader allowing all features
+   * for parsing.
+   */
+  Reader();
+
+  /** \brief Constructs a Reader allowing the specified feature set
+   * for parsing.
+   */
+  Reader(const Features& features);
+
+  /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+   * document.
+   * \param document UTF-8 encoded string containing the document to read.
+   * \param root [out] Contains the root value of the document if it was
+   *             successfully parsed.
+   * \param collectComments \c true to collect comment and allow writing them
+   * back during
+   *                        serialization, \c false to discard comments.
+   *                        This parameter is ignored if
+   * Features::allowComments_
+   *                        is \c false.
+   * \return \c true if the document was successfully parsed, \c false if an
+   * error occurred.
+   */
+  bool
+  parse(const std::string& document, Value& root, bool collectComments = true);
+
+  /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+   document.
+   * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
+   document to read.
+   * \param endDoc Pointer on the end of the UTF-8 encoded string of the
+   document to read.
+   *               Must be >= beginDoc.
+   * \param root [out] Contains the root value of the document if it was
+   *             successfully parsed.
+   * \param collectComments \c true to collect comment and allow writing them
+   back during
+   *                        serialization, \c false to discard comments.
+   *                        This parameter is ignored if
+   Features::allowComments_
+   *                        is \c false.
+   * \return \c true if the document was successfully parsed, \c false if an
+   error occurred.
+   */
+  bool parse(const char* beginDoc,
+             const char* endDoc,
+             Value& root,
+             bool collectComments = true);
+
+  /// \brief Parse from input stream.
+  /// \see Json::operator>>(std::istream&, Json::Value&).
+  bool parse(std::istream& is, Value& root, bool collectComments = true);
+
+  /** \brief Returns a user friendly string that list errors in the parsed
+   * document.
+   * \return Formatted error message with the list of errors with their location
+   * in
+   *         the parsed document. An empty string is returned if no error
+   * occurred
+   *         during parsing.
+   * \deprecated Use getFormattedErrorMessages() instead (typo fix).
+   */
+  JSONCPP_DEPRECATED("Use getFormattedErrorMessages() instead.")
+  std::string getFormatedErrorMessages() const;
+
+  /** \brief Returns a user friendly string that list errors in the parsed
+   * document.
+   * \return Formatted error message with the list of errors with their location
+   * in
+   *         the parsed document. An empty string is returned if no error
+   * occurred
+   *         during parsing.
+   */
+  std::string getFormattedErrorMessages() const;
+
+  /** \brief Returns a vector of structured erros encounted while parsing.
+   * \return A (possibly empty) vector of StructuredError objects. Currently
+   *         only one error can be returned, but the caller should tolerate
+   * multiple
+   *         errors.  This can occur if the parser recovers from a non-fatal
+   *         parse error and then encounters additional errors.
+   */
+  std::vector<StructuredError> getStructuredErrors() const;
+
+  /** \brief Add a semantic error message.
+   * \param value JSON Value location associated with the error
+   * \param message The error message.
+   * \return \c true if the error was successfully added, \c false if the
+   * Value offset exceeds the document size.
+   */
+  bool pushError(const Value& value, const std::string& message);
+
+  /** \brief Add a semantic error message with extra context.
+   * \param value JSON Value location associated with the error
+   * \param message The error message.
+   * \param extra Additional JSON Value location to contextualize the error
+   * \return \c true if the error was successfully added, \c false if either
+   * Value offset exceeds the document size.
+   */
+  bool pushError(const Value& value, const std::string& message, const Value& extra);
+
+  /** \brief Return whether there are any errors.
+   * \return \c true if there are no errors to report \c false if
+   * errors have occurred.
+   */
+  bool good() const;
+
+private:
+  enum TokenType {
+    tokenEndOfStream = 0,
+    tokenObjectBegin,
+    tokenObjectEnd,
+    tokenArrayBegin,
+    tokenArrayEnd,
+    tokenString,
+    tokenNumber,
+    tokenTrue,
+    tokenFalse,
+    tokenNull,
+    tokenArraySeparator,
+    tokenMemberSeparator,
+    tokenComment,
+    tokenError
+  };
+
+  class Token {
+  public:
+    TokenType type_;
+    Location start_;
+    Location end_;
+  };
+
+  class ErrorInfo {
+  public:
+    Token token_;
+    std::string message_;
+    Location extra_;
+  };
+
+  typedef std::deque<ErrorInfo> Errors;
+
+  bool readToken(Token& token);
+  void skipSpaces();
+  bool match(Location pattern, int patternLength);
+  bool readComment();
+  bool readCStyleComment();
+  bool readCppStyleComment();
+  bool readString();
+  void readNumber();
+  bool readValue();
+  bool readObject(Token& token);
+  bool readArray(Token& token);
+  bool decodeNumber(Token& token);
+  bool decodeNumber(Token& token, Value& decoded);
+  bool decodeString(Token& token);
+  bool decodeString(Token& token, std::string& decoded);
+  bool decodeDouble(Token& token);
+  bool decodeDouble(Token& token, Value& decoded);
+  bool decodeUnicodeCodePoint(Token& token,
+                              Location& current,
+                              Location end,
+                              unsigned int& unicode);
+  bool decodeUnicodeEscapeSequence(Token& token,
+                                   Location& current,
+                                   Location end,
+                                   unsigned int& unicode);
+  bool addError(const std::string& message, Token& token, Location extra = 0);
+  bool recoverFromError(TokenType skipUntilToken);
+  bool addErrorAndRecover(const std::string& message,
+                          Token& token,
+                          TokenType skipUntilToken);
+  void skipUntilSpace();
+  Value& currentValue();
+  Char getNextChar();
+  void
+  getLocationLineAndColumn(Location location, int& line, int& column) const;
+  std::string getLocationLineAndColumn(Location location) const;
+  void addComment(Location begin, Location end, CommentPlacement placement);
+  void skipCommentTokens(Token& token);
+
+  typedef std::stack<Value*> Nodes;
+  Nodes nodes_;
+  Errors errors_;
+  std::string document_;
+  Location begin_;
+  Location end_;
+  Location current_;
+  Location lastValueEnd_;
+  Value* lastValue_;
+  std::string commentsBefore_;
+  Features features_;
+  bool collectComments_;
+};  // Reader
+
+/** Interface for reading JSON from a char array.
+ */
+class JSON_API CharReader {
+public:
+  virtual ~CharReader() {}
+  /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a>
+   document.
+   * The document must be a UTF-8 encoded string containing the document to read.
+   *
+   * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the
+   document to read.
+   * \param endDoc Pointer on the end of the UTF-8 encoded string of the
+   document to read.
+   *        Must be >= beginDoc.
+   * \param root [out] Contains the root value of the document if it was
+   *             successfully parsed.
+   * \param errs [out] Formatted error messages (if not NULL)
+   *        a user friendly string that lists errors in the parsed
+   * document.
+   * \return \c true if the document was successfully parsed, \c false if an
+   error occurred.
+   */
+  virtual bool parse(
+      char const* beginDoc, char const* endDoc,
+      Value* root, std::string* errs) = 0;
+
+  class JSON_API Factory {
+  public:
+    virtual ~Factory() {}
+    /** \brief Allocate a CharReader via operator new().
+     * \throw std::exception if something goes wrong (e.g. invalid settings)
+     */
+    virtual CharReader* newCharReader() const = 0;
+  };  // Factory
+};  // CharReader
+
+/** \brief Build a CharReader implementation.
+
+Usage:
+\code
+  using namespace Json;
+  CharReaderBuilder builder;
+  builder["collectComments"] = false;
+  Value value;
+  std::string errs;
+  bool ok = parseFromStream(builder, std::cin, &value, &errs);
+\endcode
+*/
+class JSON_API CharReaderBuilder : public CharReader::Factory {
+public:
+  // Note: We use a Json::Value so that we can add data-members to this class
+  // without a major version bump.
+  /** Configuration of this builder.
+    These are case-sensitive.
+    Available settings (case-sensitive):
+    - `"collectComments": false or true`
+      - true to collect comment and allow writing them
+        back during serialization, false to discard comments.
+        This parameter is ignored if allowComments is false.
+    - `"allowComments": false or true`
+      - true if comments are allowed.
+    - `"strictRoot": false or true`
+      - true if root must be either an array or an object value
+    - `"allowDroppedNullPlaceholders": false or true`
+      - true if dropped null placeholders are allowed. (See StreamWriterBuilder.)
+    - `"allowNumericKeys": false or true`
+      - true if numeric object keys are allowed.
+    - `"allowSingleQuotes": false or true`
+      - true if '' are allowed for strings (both keys and values)
+    - `"stackLimit": integer`
+      - Exceeding stackLimit (recursive depth of `readValue()`) will
+        cause an exception.
+      - This is a security issue (seg-faults caused by deeply nested JSON),
+        so the default is low.
+    - `"failIfExtra": false or true`
+      - If true, `parse()` returns false when extra non-whitespace trails
+        the JSON value in the input string.
+    - `"rejectDupKeys": false or true`
+      - If true, `parse()` returns false when a key is duplicated within an object.
+    - `"allowSpecialFloats": false or true`
+      - If true, special float values (NaNs and infinities) are allowed 
+        and their values are lossfree restorable.
+
+    You can examine 'settings_` yourself
+    to see the defaults. You can also write and read them just like any
+    JSON Value.
+    \sa setDefaults()
+    */
+  Json::Value settings_;
+
+  CharReaderBuilder();
+  ~CharReaderBuilder() override;
+
+  CharReader* newCharReader() const override;
+
+  /** \return true if 'settings' are legal and consistent;
+   *   otherwise, indicate bad settings via 'invalid'.
+   */
+  bool validate(Json::Value* invalid) const;
+
+  /** A simple way to update a specific setting.
+   */
+  Value& operator[](std::string key);
+
+  /** Called by ctor, but you can use this to reset settings_.
+   * \pre 'settings' != NULL (but Json::null is fine)
+   * \remark Defaults:
+   * \snippet src/lib_json/json_reader.cpp CharReaderBuilderDefaults
+   */
+  static void setDefaults(Json::Value* settings);
+  /** Same as old Features::strictMode().
+   * \pre 'settings' != NULL (but Json::null is fine)
+   * \remark Defaults:
+   * \snippet src/lib_json/json_reader.cpp CharReaderBuilderStrictMode
+   */
+  static void strictMode(Json::Value* settings);
+};
+
+/** Consume entire stream and use its begin/end.
+  * Someday we might have a real StreamReader, but for now this
+  * is convenient.
+  */
+bool JSON_API parseFromStream(
+    CharReader::Factory const&,
+    std::istream&,
+    Value* root, std::string* errs);
+
+/** \brief Read from 'sin' into 'root'.
+
+ Always keep comments from the input JSON.
+
+ This can be used to read a file into a particular sub-object.
+ For example:
+ \code
+ Json::Value root;
+ cin >> root["dir"]["file"];
+ cout << root;
+ \endcode
+ Result:
+ \verbatim
+ {
+ "dir": {
+     "file": {
+     // The input stream JSON would be nested here.
+     }
+ }
+ }
+ \endverbatim
+ \throw std::exception on parse error.
+ \see Json::operator<<()
+*/
+JSON_API std::istream& operator>>(std::istream&, Value&);
+
+} // namespace Json
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // CPPTL_JSON_READER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_WRITER_H_INCLUDED
+#define JSON_WRITER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <vector>
+#include <string>
+#include <ostream>
+
+// Disable warning C4251: <data member>: <type> needs to have dll-interface to
+// be used by...
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(push)
+#pragma warning(disable : 4251)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+namespace Json {
+
+class Value;
+
+/**
+
+Usage:
+\code
+  using namespace Json;
+  void writeToStdout(StreamWriter::Factory const& factory, Value const& value) {
+    std::unique_ptr<StreamWriter> const writer(
+      factory.newStreamWriter());
+    writer->write(value, &std::cout);
+    std::cout << std::endl;  // add lf and flush
+  }
+\endcode
+*/
+class JSON_API StreamWriter {
+protected:
+  std::ostream* sout_;  // not owned; will not delete
+public:
+  StreamWriter();
+  virtual ~StreamWriter();
+  /** Write Value into document as configured in sub-class.
+      Do not take ownership of sout, but maintain a reference during function.
+      \pre sout != NULL
+      \return zero on success (For now, we always return zero, so check the stream instead.)
+      \throw std::exception possibly, depending on configuration
+   */
+  virtual int write(Value const& root, std::ostream* sout) = 0;
+
+  /** \brief A simple abstract factory.
+   */
+  class JSON_API Factory {
+  public:
+    virtual ~Factory();
+    /** \brief Allocate a CharReader via operator new().
+     * \throw std::exception if something goes wrong (e.g. invalid settings)
+     */
+    virtual StreamWriter* newStreamWriter() const = 0;
+  };  // Factory
+};  // StreamWriter
+
+/** \brief Write into stringstream, then return string, for convenience.
+ * A StreamWriter will be created from the factory, used, and then deleted.
+ */
+std::string JSON_API writeString(StreamWriter::Factory const& factory, Value const& root);
+
+
+/** \brief Build a StreamWriter implementation.
+
+Usage:
+\code
+  using namespace Json;
+  Value value = ...;
+  StreamWriterBuilder builder;
+  builder["commentStyle"] = "None";
+  builder["indentation"] = "   ";  // or whatever you like
+  std::unique_ptr<Json::StreamWriter> writer(
+      builder.newStreamWriter());
+  writer->write(value, &std::cout);
+  std::cout << std::endl;  // add lf and flush
+\endcode
+*/
+class JSON_API StreamWriterBuilder : public StreamWriter::Factory {
+public:
+  // Note: We use a Json::Value so that we can add data-members to this class
+  // without a major version bump.
+  /** Configuration of this builder.
+    Available settings (case-sensitive):
+    - "commentStyle": "None" or "All"
+    - "indentation":  "<anything>"
+    - "enableYAMLCompatibility": false or true
+      - slightly change the whitespace around colons
+    - "dropNullPlaceholders": false or true
+      - Drop the "null" string from the writer's output for nullValues.
+        Strictly speaking, this is not valid JSON. But when the output is being
+        fed to a browser's Javascript, it makes for smaller output and the
+        browser can handle the output just fine.
+    - "useSpecialFloats": false or true
+      - If true, outputs non-finite floating point values in the following way:
+        NaN values as "NaN", positive infinity as "Infinity", and negative infinity
+        as "-Infinity".
+
+    You can examine 'settings_` yourself
+    to see the defaults. You can also write and read them just like any
+    JSON Value.
+    \sa setDefaults()
+    */
+  Json::Value settings_;
+
+  StreamWriterBuilder();
+  ~StreamWriterBuilder() override;
+
+  /**
+   * \throw std::exception if something goes wrong (e.g. invalid settings)
+   */
+  StreamWriter* newStreamWriter() const override;
+
+  /** \return true if 'settings' are legal and consistent;
+   *   otherwise, indicate bad settings via 'invalid'.
+   */
+  bool validate(Json::Value* invalid) const;
+  /** A simple way to update a specific setting.
+   */
+  Value& operator[](std::string key);
+
+  /** Called by ctor, but you can use this to reset settings_.
+   * \pre 'settings' != NULL (but Json::null is fine)
+   * \remark Defaults:
+   * \snippet src/lib_json/json_writer.cpp StreamWriterBuilderDefaults
+   */
+  static void setDefaults(Json::Value* settings);
+};
+
+/** \brief Abstract class for writers.
+ * \deprecated Use StreamWriter. (And really, this is an implementation detail.)
+ */
+class JSON_API Writer {
+public:
+  virtual ~Writer();
+
+  virtual std::string write(const Value& root) = 0;
+};
+
+/** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format
+ *without formatting (not human friendly).
+ *
+ * The JSON document is written in a single line. It is not intended for 'human'
+ *consumption,
+ * but may be usefull to support feature such as RPC where bandwith is limited.
+ * \sa Reader, Value
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API FastWriter : public Writer {
+
+public:
+  FastWriter();
+  ~FastWriter() override {}
+
+  void enableYAMLCompatibility();
+
+  /** \brief Drop the "null" string from the writer's output for nullValues.
+   * Strictly speaking, this is not valid JSON. But when the output is being
+   * fed to a browser's Javascript, it makes for smaller output and the
+   * browser can handle the output just fine.
+   */
+  void dropNullPlaceholders();
+
+  void omitEndingLineFeed();
+
+public: // overridden from Writer
+  std::string write(const Value& root) override;
+
+private:
+  void writeValue(const Value& value);
+
+  std::string document_;
+  bool yamlCompatiblityEnabled_;
+  bool dropNullPlaceholders_;
+  bool omitEndingLineFeed_;
+};
+
+/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
+ *human friendly way.
+ *
+ * The rules for line break and indent are as follow:
+ * - Object value:
+ *     - if empty then print {} without indent and line break
+ *     - if not empty the print '{', line break & indent, print one value per
+ *line
+ *       and then unindent and line break and print '}'.
+ * - Array value:
+ *     - if empty then print [] without indent and line break
+ *     - if the array contains no object value, empty array or some other value
+ *types,
+ *       and all the values fit on one lines, then print the array on a single
+ *line.
+ *     - otherwise, it the values do not fit on one line, or the array contains
+ *       object or non empty array, then print one value per line.
+ *
+ * If the Value have comments then they are outputed according to their
+ *#CommentPlacement.
+ *
+ * \sa Reader, Value, Value::setComment()
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API StyledWriter : public Writer {
+public:
+  StyledWriter();
+  ~StyledWriter() override {}
+
+public: // overridden from Writer
+  /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+   * \param root Value to serialize.
+   * \return String containing the JSON document that represents the root value.
+   */
+  std::string write(const Value& root) override;
+
+private:
+  void writeValue(const Value& value);
+  void writeArrayValue(const Value& value);
+  bool isMultineArray(const Value& value);
+  void pushValue(const std::string& value);
+  void writeIndent();
+  void writeWithIndent(const std::string& value);
+  void indent();
+  void unindent();
+  void writeCommentBeforeValue(const Value& root);
+  void writeCommentAfterValueOnSameLine(const Value& root);
+  bool hasCommentForValue(const Value& value);
+  static std::string normalizeEOL(const std::string& text);
+
+  typedef std::vector<std::string> ChildValues;
+
+  ChildValues childValues_;
+  std::string document_;
+  std::string indentString_;
+  int rightMargin_;
+  int indentSize_;
+  bool addChildValues_;
+};
+
+/** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a
+ human friendly way,
+     to a stream rather than to a string.
+ *
+ * The rules for line break and indent are as follow:
+ * - Object value:
+ *     - if empty then print {} without indent and line break
+ *     - if not empty the print '{', line break & indent, print one value per
+ line
+ *       and then unindent and line break and print '}'.
+ * - Array value:
+ *     - if empty then print [] without indent and line break
+ *     - if the array contains no object value, empty array or some other value
+ types,
+ *       and all the values fit on one lines, then print the array on a single
+ line.
+ *     - otherwise, it the values do not fit on one line, or the array contains
+ *       object or non empty array, then print one value per line.
+ *
+ * If the Value have comments then they are outputed according to their
+ #CommentPlacement.
+ *
+ * \param indentation Each level will be indented by this amount extra.
+ * \sa Reader, Value, Value::setComment()
+ * \deprecated Use StreamWriterBuilder.
+ */
+class JSON_API StyledStreamWriter {
+public:
+  StyledStreamWriter(std::string indentation = "\t");
+  ~StyledStreamWriter() {}
+
+public:
+  /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+   * \param out Stream to write to. (Can be ostringstream, e.g.)
+   * \param root Value to serialize.
+   * \note There is no point in deriving from Writer, since write() should not
+   * return a value.
+   */
+  void write(std::ostream& out, const Value& root);
+
+private:
+  void writeValue(const Value& value);
+  void writeArrayValue(const Value& value);
+  bool isMultineArray(const Value& value);
+  void pushValue(const std::string& value);
+  void writeIndent();
+  void writeWithIndent(const std::string& value);
+  void indent();
+  void unindent();
+  void writeCommentBeforeValue(const Value& root);
+  void writeCommentAfterValueOnSameLine(const Value& root);
+  bool hasCommentForValue(const Value& value);
+  static std::string normalizeEOL(const std::string& text);
+
+  typedef std::vector<std::string> ChildValues;
+
+  ChildValues childValues_;
+  std::ostream* document_;
+  std::string indentString_;
+  int rightMargin_;
+  std::string indentation_;
+  bool addChildValues_ : 1;
+  bool indented_ : 1;
+};
+
+#if defined(JSON_HAS_INT64)
+std::string JSON_API valueToString(Int value);
+std::string JSON_API valueToString(UInt value);
+#endif // if defined(JSON_HAS_INT64)
+std::string JSON_API valueToString(LargestInt value);
+std::string JSON_API valueToString(LargestUInt value);
+std::string JSON_API valueToString(double value);
+std::string JSON_API valueToString(bool value);
+std::string JSON_API valueToQuotedString(const char* value);
+
+/// \brief Output using the StyledStreamWriter.
+/// \see Json::operator>>()
+JSON_API std::ostream& operator<<(std::ostream&, const Value& root);
+
+} // namespace Json
+
+#if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+#pragma warning(pop)
+#endif // if defined(JSONCPP_DISABLE_DLL_INTERFACE_WARNING)
+
+#endif // JSON_WRITER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/assertions.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_ASSERTIONS_H_INCLUDED
+#define CPPTL_JSON_ASSERTIONS_H_INCLUDED
+
+#include <stdlib.h>
+#include <sstream>
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+/** It should not be possible for a maliciously designed file to
+ *  cause an abort() or seg-fault, so these macros are used only
+ *  for pre-condition violations and internal logic errors.
+ */
+#if JSON_USE_EXCEPTION
+
+// @todo <= add detail about condition in exception
+# define JSON_ASSERT(condition)                                                \
+  {if (!(condition)) {Json::throwLogicError( "assert json failed" );}}
+
+# define JSON_FAIL_MESSAGE(message)                                            \
+  {                                                                            \
+    std::ostringstream oss; oss << message;                                    \
+    Json::throwLogicError(oss.str());                                          \
+    abort();                                                                   \
+  }
+
+#else // JSON_USE_EXCEPTION
+
+# define JSON_ASSERT(condition) assert(condition)
+
+// The call to assert() will show the failure message in debug builds. In
+// release builds we abort, for a core-dump or debugger.
+# define JSON_FAIL_MESSAGE(message)                                            \
+  {                                                                            \
+    std::ostringstream oss; oss << message;                                    \
+    assert(false && oss.str().c_str());                                        \
+    abort();                                                                   \
+  }
+
+
+#endif
+
+#define JSON_ASSERT_MESSAGE(condition, message)                                \
+  if (!(condition)) {                                                          \
+    JSON_FAIL_MESSAGE(message);                                                \
+  }
+
+#endif // CPPTL_JSON_ASSERTIONS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/assertions.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+#endif //ifndef JSON_AMALGATED_H_INCLUDED

+ 5192 - 0
conformance/third_party/jsoncpp/jsoncpp.cpp

@@ -0,0 +1,5192 @@
+/// Json-cpp amalgated source (http://jsoncpp.sourceforge.net/).
+/// It is intended to be used with #include "json/json.h"
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation, 
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all 
+jurisdictions which recognize such a disclaimer. In such jurisdictions, 
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this 
+software may choose to accept it either as 1) Public Domain, 2) under the 
+conditions of the MIT License (see below), or 3) under the terms of dual 
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+   http://en.wikipedia.org/wiki/MIT_License
+   
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+#include "third_party/jsoncpp/json.h"
+
+#ifndef JSON_IS_AMALGAMATION
+#error "Compile with -I PATH_TO_JSON_DIRECTORY"
+#endif
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_tool.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+#define LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+
+/* This header provides common string manipulation support, such as UTF-8,
+ * portable conversion from/to string...
+ *
+ * It is an internal header that must not be exposed.
+ */
+
+namespace Json {
+
+/// Converts a unicode code-point to UTF-8.
+static inline std::string codePointToUTF8(unsigned int cp) {
+  std::string result;
+
+  // based on description from http://en.wikipedia.org/wiki/UTF-8
+
+  if (cp <= 0x7f) {
+    result.resize(1);
+    result[0] = static_cast<char>(cp);
+  } else if (cp <= 0x7FF) {
+    result.resize(2);
+    result[1] = static_cast<char>(0x80 | (0x3f & cp));
+    result[0] = static_cast<char>(0xC0 | (0x1f & (cp >> 6)));
+  } else if (cp <= 0xFFFF) {
+    result.resize(3);
+    result[2] = static_cast<char>(0x80 | (0x3f & cp));
+    result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
+    result[0] = static_cast<char>(0xE0 | (0xf & (cp >> 12)));
+  } else if (cp <= 0x10FFFF) {
+    result.resize(4);
+    result[3] = static_cast<char>(0x80 | (0x3f & cp));
+    result[2] = static_cast<char>(0x80 | (0x3f & (cp >> 6)));
+    result[1] = static_cast<char>(0x80 | (0x3f & (cp >> 12)));
+    result[0] = static_cast<char>(0xF0 | (0x7 & (cp >> 18)));
+  }
+
+  return result;
+}
+
+/// Returns true if ch is a control character (in range [1,31]).
+static inline bool isControlCharacter(char ch) { return ch > 0 && ch <= 0x1F; }
+
+enum {
+  /// Constant that specify the size of the buffer that must be passed to
+  /// uintToString.
+  uintToStringBufferSize = 3 * sizeof(LargestUInt) + 1
+};
+
+// Defines a char buffer for use with uintToString().
+typedef char UIntToStringBuffer[uintToStringBufferSize];
+
+/** Converts an unsigned integer to string.
+ * @param value Unsigned interger to convert to string
+ * @param current Input/Output string buffer.
+ *        Must have at least uintToStringBufferSize chars free.
+ */
+static inline void uintToString(LargestUInt value, char*& current) {
+  *--current = 0;
+  do {
+    *--current = static_cast<signed char>(value % 10U + static_cast<unsigned>('0'));
+    value /= 10;
+  } while (value != 0);
+}
+
+/** Change ',' to '.' everywhere in buffer.
+ *
+ * We had a sophisticated way, but it did not work in WinCE.
+ * @see https://github.com/open-source-parsers/jsoncpp/pull/9
+ */
+static inline void fixNumericLocale(char* begin, char* end) {
+  while (begin < end) {
+    if (*begin == ',') {
+      *begin = '.';
+    }
+    ++begin;
+  }
+}
+
+} // namespace Json {
+
+#endif // LIB_JSONCPP_JSON_TOOL_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_tool.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_reader.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/assertions.h>
+#include <json/reader.h>
+#include <json/value.h>
+#include "json_tool.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <utility>
+#include <cstdio>
+#include <cassert>
+#include <cstring>
+#include <istream>
+#include <sstream>
+#include <memory>
+#include <set>
+#include <limits>
+
+#if defined(_MSC_VER)
+#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above 
+#define snprintf sprintf_s
+#elif _MSC_VER >= 1900 // VC++ 14.0 and above
+#define snprintf std::snprintf
+#else
+#define snprintf _snprintf
+#endif
+#elif defined(__ANDROID__) || defined(__QNXNTO__)
+#define snprintf snprintf
+#elif __cplusplus >= 201103L
+#define snprintf std::snprintf
+#endif
+
+#if defined(__QNXNTO__)
+#define sscanf std::sscanf
+#endif
+
+#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
+// Disable warning about strdup being deprecated.
+#pragma warning(disable : 4996)
+#endif
+
+static int const stackLimit_g = 1000;
+static int       stackDepth_g = 0;  // see readValue()
+
+namespace Json {
+
+#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
+typedef std::unique_ptr<CharReader> CharReaderPtr;
+#else
+typedef std::auto_ptr<CharReader>   CharReaderPtr;
+#endif
+
+// Implementation of class Features
+// ////////////////////////////////
+
+Features::Features()
+    : allowComments_(true), strictRoot_(false),
+      allowDroppedNullPlaceholders_(false), allowNumericKeys_(false) {}
+
+Features Features::all() { return Features(); }
+
+Features Features::strictMode() {
+  Features features;
+  features.allowComments_ = false;
+  features.strictRoot_ = true;
+  features.allowDroppedNullPlaceholders_ = false;
+  features.allowNumericKeys_ = false;
+  return features;
+}
+
+// Implementation of class Reader
+// ////////////////////////////////
+
+static bool containsNewLine(Reader::Location begin, Reader::Location end) {
+  for (; begin < end; ++begin)
+    if (*begin == '\n' || *begin == '\r')
+      return true;
+  return false;
+}
+
+// Class Reader
+// //////////////////////////////////////////////////////////////////
+
+Reader::Reader()
+    : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
+      lastValue_(), commentsBefore_(), features_(Features::all()),
+      collectComments_() {}
+
+Reader::Reader(const Features& features)
+    : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
+      lastValue_(), commentsBefore_(), features_(features), collectComments_() {
+}
+
+bool
+Reader::parse(const std::string& document, Value& root, bool collectComments) {
+  document_ = document;
+  const char* begin = document_.c_str();
+  const char* end = begin + document_.length();
+  return parse(begin, end, root, collectComments);
+}
+
+bool Reader::parse(std::istream& sin, Value& root, bool collectComments) {
+  // std::istream_iterator<char> begin(sin);
+  // std::istream_iterator<char> end;
+  // Those would allow streamed input from a file, if parse() were a
+  // template function.
+
+  // Since std::string is reference-counted, this at least does not
+  // create an extra copy.
+  std::string doc;
+  std::getline(sin, doc, (char)EOF);
+  return parse(doc, root, collectComments);
+}
+
+bool Reader::parse(const char* beginDoc,
+                   const char* endDoc,
+                   Value& root,
+                   bool collectComments) {
+  if (!features_.allowComments_) {
+    collectComments = false;
+  }
+
+  begin_ = beginDoc;
+  end_ = endDoc;
+  collectComments_ = collectComments;
+  current_ = begin_;
+  lastValueEnd_ = 0;
+  lastValue_ = 0;
+  commentsBefore_ = "";
+  errors_.clear();
+  while (!nodes_.empty())
+    nodes_.pop();
+  nodes_.push(&root);
+
+  stackDepth_g = 0;  // Yes, this is bad coding, but options are limited.
+  bool successful = readValue();
+  Token token;
+  skipCommentTokens(token);
+  if (collectComments_ && !commentsBefore_.empty())
+    root.setComment(commentsBefore_, commentAfter);
+  if (features_.strictRoot_) {
+    if (!root.isArray() && !root.isObject()) {
+      // Set error location to start of doc, ideally should be first token found
+      // in doc
+      token.type_ = tokenError;
+      token.start_ = beginDoc;
+      token.end_ = endDoc;
+      addError(
+          "A valid JSON document must be either an array or an object value.",
+          token);
+      return false;
+    }
+  }
+  return successful;
+}
+
+bool Reader::readValue() {
+  // This is a non-reentrant way to support a stackLimit. Terrible!
+  // But this deprecated class has a security problem: Bad input can
+  // cause a seg-fault. This seems like a fair, binary-compatible way
+  // to prevent the problem.
+  if (stackDepth_g >= stackLimit_g) throwRuntimeError("Exceeded stackLimit in readValue().");
+  ++stackDepth_g;
+
+  Token token;
+  skipCommentTokens(token);
+  bool successful = true;
+
+  if (collectComments_ && !commentsBefore_.empty()) {
+    currentValue().setComment(commentsBefore_, commentBefore);
+    commentsBefore_ = "";
+  }
+
+  switch (token.type_) {
+  case tokenObjectBegin:
+    successful = readObject(token);
+    currentValue().setOffsetLimit(current_ - begin_);
+    break;
+  case tokenArrayBegin:
+    successful = readArray(token);
+    currentValue().setOffsetLimit(current_ - begin_);
+    break;
+  case tokenNumber:
+    successful = decodeNumber(token);
+    break;
+  case tokenString:
+    successful = decodeString(token);
+    break;
+  case tokenTrue:
+    {
+    Value v(true);
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenFalse:
+    {
+    Value v(false);
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenNull:
+    {
+    Value v;
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenArraySeparator:
+  case tokenObjectEnd:
+  case tokenArrayEnd:
+    if (features_.allowDroppedNullPlaceholders_) {
+      // "Un-read" the current token and mark the current value as a null
+      // token.
+      current_--;
+      Value v;
+      currentValue().swapPayload(v);
+      currentValue().setOffsetStart(current_ - begin_ - 1);
+      currentValue().setOffsetLimit(current_ - begin_);
+      break;
+    } // Else, fall through...
+  default:
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    return addError("Syntax error: value, object or array expected.", token);
+  }
+
+  if (collectComments_) {
+    lastValueEnd_ = current_;
+    lastValue_ = &currentValue();
+  }
+
+  --stackDepth_g;
+  return successful;
+}
+
+void Reader::skipCommentTokens(Token& token) {
+  if (features_.allowComments_) {
+    do {
+      readToken(token);
+    } while (token.type_ == tokenComment);
+  } else {
+    readToken(token);
+  }
+}
+
+bool Reader::readToken(Token& token) {
+  skipSpaces();
+  token.start_ = current_;
+  Char c = getNextChar();
+  bool ok = true;
+  switch (c) {
+  case '{':
+    token.type_ = tokenObjectBegin;
+    break;
+  case '}':
+    token.type_ = tokenObjectEnd;
+    break;
+  case '[':
+    token.type_ = tokenArrayBegin;
+    break;
+  case ']':
+    token.type_ = tokenArrayEnd;
+    break;
+  case '"':
+    token.type_ = tokenString;
+    ok = readString();
+    break;
+  case '/':
+    token.type_ = tokenComment;
+    ok = readComment();
+    break;
+  case '0':
+  case '1':
+  case '2':
+  case '3':
+  case '4':
+  case '5':
+  case '6':
+  case '7':
+  case '8':
+  case '9':
+  case '-':
+    token.type_ = tokenNumber;
+    readNumber();
+    break;
+  case 't':
+    token.type_ = tokenTrue;
+    ok = match("rue", 3);
+    break;
+  case 'f':
+    token.type_ = tokenFalse;
+    ok = match("alse", 4);
+    break;
+  case 'n':
+    token.type_ = tokenNull;
+    ok = match("ull", 3);
+    break;
+  case ',':
+    token.type_ = tokenArraySeparator;
+    break;
+  case ':':
+    token.type_ = tokenMemberSeparator;
+    break;
+  case 0:
+    token.type_ = tokenEndOfStream;
+    break;
+  default:
+    ok = false;
+    break;
+  }
+  if (!ok)
+    token.type_ = tokenError;
+  token.end_ = current_;
+  return true;
+}
+
+void Reader::skipSpaces() {
+  while (current_ != end_) {
+    Char c = *current_;
+    if (c == ' ' || c == '\t' || c == '\r' || c == '\n')
+      ++current_;
+    else
+      break;
+  }
+}
+
+bool Reader::match(Location pattern, int patternLength) {
+  if (end_ - current_ < patternLength)
+    return false;
+  int index = patternLength;
+  while (index--)
+    if (current_[index] != pattern[index])
+      return false;
+  current_ += patternLength;
+  return true;
+}
+
+bool Reader::readComment() {
+  Location commentBegin = current_ - 1;
+  Char c = getNextChar();
+  bool successful = false;
+  if (c == '*')
+    successful = readCStyleComment();
+  else if (c == '/')
+    successful = readCppStyleComment();
+  if (!successful)
+    return false;
+
+  if (collectComments_) {
+    CommentPlacement placement = commentBefore;
+    if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) {
+      if (c != '*' || !containsNewLine(commentBegin, current_))
+        placement = commentAfterOnSameLine;
+    }
+
+    addComment(commentBegin, current_, placement);
+  }
+  return true;
+}
+
+static std::string normalizeEOL(Reader::Location begin, Reader::Location end) {
+  std::string normalized;
+  normalized.reserve(end - begin);
+  Reader::Location current = begin;
+  while (current != end) {
+    char c = *current++;
+    if (c == '\r') {
+      if (current != end && *current == '\n')
+         // convert dos EOL
+         ++current;
+      // convert Mac EOL
+      normalized += '\n';
+    } else {
+      normalized += c;
+    }
+  }
+  return normalized;
+}
+
+void
+Reader::addComment(Location begin, Location end, CommentPlacement placement) {
+  assert(collectComments_);
+  const std::string& normalized = normalizeEOL(begin, end);
+  if (placement == commentAfterOnSameLine) {
+    assert(lastValue_ != 0);
+    lastValue_->setComment(normalized, placement);
+  } else {
+    commentsBefore_ += normalized;
+  }
+}
+
+bool Reader::readCStyleComment() {
+  while (current_ != end_) {
+    Char c = getNextChar();
+    if (c == '*' && *current_ == '/')
+      break;
+  }
+  return getNextChar() == '/';
+}
+
+bool Reader::readCppStyleComment() {
+  while (current_ != end_) {
+    Char c = getNextChar();
+    if (c == '\n')
+      break;
+    if (c == '\r') {
+      // Consume DOS EOL. It will be normalized in addComment.
+      if (current_ != end_ && *current_ == '\n')
+        getNextChar();
+      // Break on Moc OS 9 EOL.
+      break;
+    }
+  }
+  return true;
+}
+
+void Reader::readNumber() {
+  const char *p = current_;
+  char c = '0'; // stopgap for already consumed character
+  // integral part
+  while (c >= '0' && c <= '9')
+    c = (current_ = p) < end_ ? *p++ : 0;
+  // fractional part
+  if (c == '.') {
+    c = (current_ = p) < end_ ? *p++ : 0;
+    while (c >= '0' && c <= '9')
+      c = (current_ = p) < end_ ? *p++ : 0;
+  }
+  // exponential part
+  if (c == 'e' || c == 'E') {
+    c = (current_ = p) < end_ ? *p++ : 0;
+    if (c == '+' || c == '-')
+      c = (current_ = p) < end_ ? *p++ : 0;
+    while (c >= '0' && c <= '9')
+      c = (current_ = p) < end_ ? *p++ : 0;
+  }
+}
+
+bool Reader::readString() {
+  Char c = 0;
+  while (current_ != end_) {
+    c = getNextChar();
+    if (c == '\\')
+      getNextChar();
+    else if (c == '"')
+      break;
+  }
+  return c == '"';
+}
+
+bool Reader::readObject(Token& tokenStart) {
+  Token tokenName;
+  std::string name;
+  Value init(objectValue);
+  currentValue().swapPayload(init);
+  currentValue().setOffsetStart(tokenStart.start_ - begin_);
+  while (readToken(tokenName)) {
+    bool initialTokenOk = true;
+    while (tokenName.type_ == tokenComment && initialTokenOk)
+      initialTokenOk = readToken(tokenName);
+    if (!initialTokenOk)
+      break;
+    if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
+      return true;
+    name = "";
+    if (tokenName.type_ == tokenString) {
+      if (!decodeString(tokenName, name))
+        return recoverFromError(tokenObjectEnd);
+    } else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_) {
+      Value numberName;
+      if (!decodeNumber(tokenName, numberName))
+        return recoverFromError(tokenObjectEnd);
+      name = numberName.asString();
+    } else {
+      break;
+    }
+
+    Token colon;
+    if (!readToken(colon) || colon.type_ != tokenMemberSeparator) {
+      return addErrorAndRecover(
+          "Missing ':' after object member name", colon, tokenObjectEnd);
+    }
+    Value& value = currentValue()[name];
+    nodes_.push(&value);
+    bool ok = readValue();
+    nodes_.pop();
+    if (!ok) // error already set
+      return recoverFromError(tokenObjectEnd);
+
+    Token comma;
+    if (!readToken(comma) ||
+        (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
+         comma.type_ != tokenComment)) {
+      return addErrorAndRecover(
+          "Missing ',' or '}' in object declaration", comma, tokenObjectEnd);
+    }
+    bool finalizeTokenOk = true;
+    while (comma.type_ == tokenComment && finalizeTokenOk)
+      finalizeTokenOk = readToken(comma);
+    if (comma.type_ == tokenObjectEnd)
+      return true;
+  }
+  return addErrorAndRecover(
+      "Missing '}' or object member name", tokenName, tokenObjectEnd);
+}
+
+bool Reader::readArray(Token& tokenStart) {
+  Value init(arrayValue);
+  currentValue().swapPayload(init);
+  currentValue().setOffsetStart(tokenStart.start_ - begin_);
+  skipSpaces();
+  if (*current_ == ']') // empty array
+  {
+    Token endArray;
+    readToken(endArray);
+    return true;
+  }
+  int index = 0;
+  for (;;) {
+    Value& value = currentValue()[index++];
+    nodes_.push(&value);
+    bool ok = readValue();
+    nodes_.pop();
+    if (!ok) // error already set
+      return recoverFromError(tokenArrayEnd);
+
+    Token token;
+    // Accept Comment after last item in the array.
+    ok = readToken(token);
+    while (token.type_ == tokenComment && ok) {
+      ok = readToken(token);
+    }
+    bool badTokenType =
+        (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd);
+    if (!ok || badTokenType) {
+      return addErrorAndRecover(
+          "Missing ',' or ']' in array declaration", token, tokenArrayEnd);
+    }
+    if (token.type_ == tokenArrayEnd)
+      break;
+  }
+  return true;
+}
+
+bool Reader::decodeNumber(Token& token) {
+  Value decoded;
+  if (!decodeNumber(token, decoded))
+    return false;
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool Reader::decodeNumber(Token& token, Value& decoded) {
+  // Attempts to parse the number as an integer. If the number is
+  // larger than the maximum supported value of an integer then
+  // we decode the number as a double.
+  Location current = token.start_;
+  bool isNegative = *current == '-';
+  if (isNegative)
+    ++current;
+  // TODO: Help the compiler do the div and mod at compile time or get rid of them.
+  Value::LargestUInt maxIntegerValue =
+      isNegative ? Value::LargestUInt(Value::maxLargestInt) + 1
+                 : Value::maxLargestUInt;
+  Value::LargestUInt threshold = maxIntegerValue / 10;
+  Value::LargestUInt value = 0;
+  while (current < token.end_) {
+    Char c = *current++;
+    if (c < '0' || c > '9')
+      return decodeDouble(token, decoded);
+    Value::UInt digit(c - '0');
+    if (value >= threshold) {
+      // We've hit or exceeded the max value divided by 10 (rounded down). If
+      // a) we've only just touched the limit, b) this is the last digit, and
+      // c) it's small enough to fit in that rounding delta, we're okay.
+      // Otherwise treat this number as a double to avoid overflow.
+      if (value > threshold || current != token.end_ ||
+          digit > maxIntegerValue % 10) {
+        return decodeDouble(token, decoded);
+      }
+    }
+    value = value * 10 + digit;
+  }
+  if (isNegative && value == maxIntegerValue)
+    decoded = Value::minLargestInt;
+  else if (isNegative)
+    decoded = -Value::LargestInt(value);
+  else if (value <= Value::LargestUInt(Value::maxInt))
+    decoded = Value::LargestInt(value);
+  else
+    decoded = value;
+  return true;
+}
+
+bool Reader::decodeDouble(Token& token) {
+  Value decoded;
+  if (!decodeDouble(token, decoded))
+    return false;
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool Reader::decodeDouble(Token& token, Value& decoded) {
+  double value = 0;
+  std::string buffer(token.start_, token.end_);
+  std::istringstream is(buffer);
+  if (!(is >> value))
+    return addError("'" + std::string(token.start_, token.end_) +
+                        "' is not a number.",
+                    token);
+  decoded = value;
+  return true;
+}
+
+bool Reader::decodeString(Token& token) {
+  std::string decoded_string;
+  if (!decodeString(token, decoded_string))
+    return false;
+  Value decoded(decoded_string);
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool Reader::decodeString(Token& token, std::string& decoded) {
+  decoded.reserve(token.end_ - token.start_ - 2);
+  Location current = token.start_ + 1; // skip '"'
+  Location end = token.end_ - 1;       // do not include '"'
+  while (current != end) {
+    Char c = *current++;
+    if (c == '"')
+      break;
+    else if (c == '\\') {
+      if (current == end)
+        return addError("Empty escape sequence in string", token, current);
+      Char escape = *current++;
+      switch (escape) {
+      case '"':
+        decoded += '"';
+        break;
+      case '/':
+        decoded += '/';
+        break;
+      case '\\':
+        decoded += '\\';
+        break;
+      case 'b':
+        decoded += '\b';
+        break;
+      case 'f':
+        decoded += '\f';
+        break;
+      case 'n':
+        decoded += '\n';
+        break;
+      case 'r':
+        decoded += '\r';
+        break;
+      case 't':
+        decoded += '\t';
+        break;
+      case 'u': {
+        unsigned int unicode;
+        if (!decodeUnicodeCodePoint(token, current, end, unicode))
+          return false;
+        decoded += codePointToUTF8(unicode);
+      } break;
+      default:
+        return addError("Bad escape sequence in string", token, current);
+      }
+    } else {
+      decoded += c;
+    }
+  }
+  return true;
+}
+
+bool Reader::decodeUnicodeCodePoint(Token& token,
+                                    Location& current,
+                                    Location end,
+                                    unsigned int& unicode) {
+
+  if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
+    return false;
+  if (unicode >= 0xD800 && unicode <= 0xDBFF) {
+    // surrogate pairs
+    if (end - current < 6)
+      return addError(
+          "additional six characters expected to parse unicode surrogate pair.",
+          token,
+          current);
+    unsigned int surrogatePair;
+    if (*(current++) == '\\' && *(current++) == 'u') {
+      if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair)) {
+        unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
+      } else
+        return false;
+    } else
+      return addError("expecting another \\u token to begin the second half of "
+                      "a unicode surrogate pair",
+                      token,
+                      current);
+  }
+  return true;
+}
+
+bool Reader::decodeUnicodeEscapeSequence(Token& token,
+                                         Location& current,
+                                         Location end,
+                                         unsigned int& unicode) {
+  if (end - current < 4)
+    return addError(
+        "Bad unicode escape sequence in string: four digits expected.",
+        token,
+        current);
+  unicode = 0;
+  for (int index = 0; index < 4; ++index) {
+    Char c = *current++;
+    unicode *= 16;
+    if (c >= '0' && c <= '9')
+      unicode += c - '0';
+    else if (c >= 'a' && c <= 'f')
+      unicode += c - 'a' + 10;
+    else if (c >= 'A' && c <= 'F')
+      unicode += c - 'A' + 10;
+    else
+      return addError(
+          "Bad unicode escape sequence in string: hexadecimal digit expected.",
+          token,
+          current);
+  }
+  return true;
+}
+
+bool
+Reader::addError(const std::string& message, Token& token, Location extra) {
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = extra;
+  errors_.push_back(info);
+  return false;
+}
+
+bool Reader::recoverFromError(TokenType skipUntilToken) {
+  int errorCount = int(errors_.size());
+  Token skip;
+  for (;;) {
+    if (!readToken(skip))
+      errors_.resize(errorCount); // discard errors caused by recovery
+    if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream)
+      break;
+  }
+  errors_.resize(errorCount);
+  return false;
+}
+
+bool Reader::addErrorAndRecover(const std::string& message,
+                                Token& token,
+                                TokenType skipUntilToken) {
+  addError(message, token);
+  return recoverFromError(skipUntilToken);
+}
+
+Value& Reader::currentValue() { return *(nodes_.top()); }
+
+Reader::Char Reader::getNextChar() {
+  if (current_ == end_)
+    return 0;
+  return *current_++;
+}
+
+void Reader::getLocationLineAndColumn(Location location,
+                                      int& line,
+                                      int& column) const {
+  Location current = begin_;
+  Location lastLineStart = current;
+  line = 0;
+  while (current < location && current != end_) {
+    Char c = *current++;
+    if (c == '\r') {
+      if (*current == '\n')
+        ++current;
+      lastLineStart = current;
+      ++line;
+    } else if (c == '\n') {
+      lastLineStart = current;
+      ++line;
+    }
+  }
+  // column & line start at 1
+  column = int(location - lastLineStart) + 1;
+  ++line;
+}
+
+std::string Reader::getLocationLineAndColumn(Location location) const {
+  int line, column;
+  getLocationLineAndColumn(location, line, column);
+  char buffer[18 + 16 + 16 + 1];
+  snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
+  return buffer;
+}
+
+// Deprecated. Preserved for backward compatibility
+std::string Reader::getFormatedErrorMessages() const {
+  return getFormattedErrorMessages();
+}
+
+std::string Reader::getFormattedErrorMessages() const {
+  std::string formattedMessage;
+  for (Errors::const_iterator itError = errors_.begin();
+       itError != errors_.end();
+       ++itError) {
+    const ErrorInfo& error = *itError;
+    formattedMessage +=
+        "* " + getLocationLineAndColumn(error.token_.start_) + "\n";
+    formattedMessage += "  " + error.message_ + "\n";
+    if (error.extra_)
+      formattedMessage +=
+          "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n";
+  }
+  return formattedMessage;
+}
+
+std::vector<Reader::StructuredError> Reader::getStructuredErrors() const {
+  std::vector<Reader::StructuredError> allErrors;
+  for (Errors::const_iterator itError = errors_.begin();
+       itError != errors_.end();
+       ++itError) {
+    const ErrorInfo& error = *itError;
+    Reader::StructuredError structured;
+    structured.offset_start = error.token_.start_ - begin_;
+    structured.offset_limit = error.token_.end_ - begin_;
+    structured.message = error.message_;
+    allErrors.push_back(structured);
+  }
+  return allErrors;
+}
+
+bool Reader::pushError(const Value& value, const std::string& message) {
+  size_t length = end_ - begin_;
+  if(value.getOffsetStart() > length
+    || value.getOffsetLimit() > length)
+    return false;
+  Token token;
+  token.type_ = tokenError;
+  token.start_ = begin_ + value.getOffsetStart();
+  token.end_ = end_ + value.getOffsetLimit();
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = 0;
+  errors_.push_back(info);
+  return true;
+}
+
+bool Reader::pushError(const Value& value, const std::string& message, const Value& extra) {
+  size_t length = end_ - begin_;
+  if(value.getOffsetStart() > length
+    || value.getOffsetLimit() > length
+    || extra.getOffsetLimit() > length)
+    return false;
+  Token token;
+  token.type_ = tokenError;
+  token.start_ = begin_ + value.getOffsetStart();
+  token.end_ = begin_ + value.getOffsetLimit();
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = begin_ + extra.getOffsetStart();
+  errors_.push_back(info);
+  return true;
+}
+
+bool Reader::good() const {
+  return !errors_.size();
+}
+
+// exact copy of Features
+class OurFeatures {
+public:
+  static OurFeatures all();
+  bool allowComments_;
+  bool strictRoot_;
+  bool allowDroppedNullPlaceholders_;
+  bool allowNumericKeys_;
+  bool allowSingleQuotes_;
+  bool failIfExtra_;
+  bool rejectDupKeys_;
+  bool allowSpecialFloats_;
+  int stackLimit_;
+};  // OurFeatures
+
+// exact copy of Implementation of class Features
+// ////////////////////////////////
+
+OurFeatures OurFeatures::all() { return OurFeatures(); }
+
+// Implementation of class Reader
+// ////////////////////////////////
+
+// exact copy of Reader, renamed to OurReader
+class OurReader {
+public:
+  typedef char Char;
+  typedef const Char* Location;
+  struct StructuredError {
+    size_t offset_start;
+    size_t offset_limit;
+    std::string message;
+  };
+
+  OurReader(OurFeatures const& features);
+  bool parse(const char* beginDoc,
+             const char* endDoc,
+             Value& root,
+             bool collectComments = true);
+  std::string getFormattedErrorMessages() const;
+  std::vector<StructuredError> getStructuredErrors() const;
+  bool pushError(const Value& value, const std::string& message);
+  bool pushError(const Value& value, const std::string& message, const Value& extra);
+  bool good() const;
+
+private:
+  OurReader(OurReader const&);  // no impl
+  void operator=(OurReader const&);  // no impl
+
+  enum TokenType {
+    tokenEndOfStream = 0,
+    tokenObjectBegin,
+    tokenObjectEnd,
+    tokenArrayBegin,
+    tokenArrayEnd,
+    tokenString,
+    tokenNumber,
+    tokenTrue,
+    tokenFalse,
+    tokenNull,
+    tokenNaN,
+    tokenPosInf,
+    tokenNegInf,
+    tokenArraySeparator,
+    tokenMemberSeparator,
+    tokenComment,
+    tokenError
+  };
+
+  class Token {
+  public:
+    TokenType type_;
+    Location start_;
+    Location end_;
+  };
+
+  class ErrorInfo {
+  public:
+    Token token_;
+    std::string message_;
+    Location extra_;
+  };
+
+  typedef std::deque<ErrorInfo> Errors;
+
+  bool readToken(Token& token);
+  void skipSpaces();
+  bool match(Location pattern, int patternLength);
+  bool readComment();
+  bool readCStyleComment();
+  bool readCppStyleComment();
+  bool readString();
+  bool readStringSingleQuote();
+  bool readNumber(bool checkInf);
+  bool readValue();
+  bool readObject(Token& token);
+  bool readArray(Token& token);
+  bool decodeNumber(Token& token);
+  bool decodeNumber(Token& token, Value& decoded);
+  bool decodeString(Token& token);
+  bool decodeString(Token& token, std::string& decoded);
+  bool decodeDouble(Token& token);
+  bool decodeDouble(Token& token, Value& decoded);
+  bool decodeUnicodeCodePoint(Token& token,
+                              Location& current,
+                              Location end,
+                              unsigned int& unicode);
+  bool decodeUnicodeEscapeSequence(Token& token,
+                                   Location& current,
+                                   Location end,
+                                   unsigned int& unicode);
+  bool addError(const std::string& message, Token& token, Location extra = 0);
+  bool recoverFromError(TokenType skipUntilToken);
+  bool addErrorAndRecover(const std::string& message,
+                          Token& token,
+                          TokenType skipUntilToken);
+  void skipUntilSpace();
+  Value& currentValue();
+  Char getNextChar();
+  void
+  getLocationLineAndColumn(Location location, int& line, int& column) const;
+  std::string getLocationLineAndColumn(Location location) const;
+  void addComment(Location begin, Location end, CommentPlacement placement);
+  void skipCommentTokens(Token& token);
+
+  typedef std::stack<Value*> Nodes;
+  Nodes nodes_;
+  Errors errors_;
+  std::string document_;
+  Location begin_;
+  Location end_;
+  Location current_;
+  Location lastValueEnd_;
+  Value* lastValue_;
+  std::string commentsBefore_;
+  int stackDepth_;
+
+  OurFeatures const features_;
+  bool collectComments_;
+};  // OurReader
+
+// complete copy of Read impl, for OurReader
+
+OurReader::OurReader(OurFeatures const& features)
+    : errors_(), document_(), begin_(), end_(), current_(), lastValueEnd_(),
+      lastValue_(), commentsBefore_(),
+      stackDepth_(0),
+      features_(features), collectComments_() {
+}
+
+bool OurReader::parse(const char* beginDoc,
+                   const char* endDoc,
+                   Value& root,
+                   bool collectComments) {
+  if (!features_.allowComments_) {
+    collectComments = false;
+  }
+
+  begin_ = beginDoc;
+  end_ = endDoc;
+  collectComments_ = collectComments;
+  current_ = begin_;
+  lastValueEnd_ = 0;
+  lastValue_ = 0;
+  commentsBefore_ = "";
+  errors_.clear();
+  while (!nodes_.empty())
+    nodes_.pop();
+  nodes_.push(&root);
+
+  stackDepth_ = 0;
+  bool successful = readValue();
+  Token token;
+  skipCommentTokens(token);
+  if (features_.failIfExtra_) {
+    if (token.type_ != tokenError && token.type_ != tokenEndOfStream) {
+      addError("Extra non-whitespace after JSON value.", token);
+      return false;
+    }
+  }
+  if (collectComments_ && !commentsBefore_.empty())
+    root.setComment(commentsBefore_, commentAfter);
+  if (features_.strictRoot_) {
+    if (!root.isArray() && !root.isObject()) {
+      // Set error location to start of doc, ideally should be first token found
+      // in doc
+      token.type_ = tokenError;
+      token.start_ = beginDoc;
+      token.end_ = endDoc;
+      addError(
+          "A valid JSON document must be either an array or an object value.",
+          token);
+      return false;
+    }
+  }
+  return successful;
+}
+
+bool OurReader::readValue() {
+  if (stackDepth_ >= features_.stackLimit_) throwRuntimeError("Exceeded stackLimit in readValue().");
+  ++stackDepth_;
+  Token token;
+  skipCommentTokens(token);
+  bool successful = true;
+
+  if (collectComments_ && !commentsBefore_.empty()) {
+    currentValue().setComment(commentsBefore_, commentBefore);
+    commentsBefore_ = "";
+  }
+
+  switch (token.type_) {
+  case tokenObjectBegin:
+    successful = readObject(token);
+    currentValue().setOffsetLimit(current_ - begin_);
+    break;
+  case tokenArrayBegin:
+    successful = readArray(token);
+    currentValue().setOffsetLimit(current_ - begin_);
+    break;
+  case tokenNumber:
+    successful = decodeNumber(token);
+    break;
+  case tokenString:
+    successful = decodeString(token);
+    break;
+  case tokenTrue:
+    {
+    Value v(true);
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenFalse:
+    {
+    Value v(false);
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenNull:
+    {
+    Value v;
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenNaN:
+    {
+    Value v(std::numeric_limits<double>::quiet_NaN());
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenPosInf:
+    {
+    Value v(std::numeric_limits<double>::infinity());
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenNegInf:
+    {
+    Value v(-std::numeric_limits<double>::infinity());
+    currentValue().swapPayload(v);
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    }
+    break;
+  case tokenArraySeparator:
+  case tokenObjectEnd:
+  case tokenArrayEnd:
+    if (features_.allowDroppedNullPlaceholders_) {
+      // "Un-read" the current token and mark the current value as a null
+      // token.
+      current_--;
+      Value v;
+      currentValue().swapPayload(v);
+      currentValue().setOffsetStart(current_ - begin_ - 1);
+      currentValue().setOffsetLimit(current_ - begin_);
+      break;
+    } // else, fall through ...
+  default:
+    currentValue().setOffsetStart(token.start_ - begin_);
+    currentValue().setOffsetLimit(token.end_ - begin_);
+    return addError("Syntax error: value, object or array expected.", token);
+  }
+
+  if (collectComments_) {
+    lastValueEnd_ = current_;
+    lastValue_ = &currentValue();
+  }
+
+  --stackDepth_;
+  return successful;
+}
+
+void OurReader::skipCommentTokens(Token& token) {
+  if (features_.allowComments_) {
+    do {
+      readToken(token);
+    } while (token.type_ == tokenComment);
+  } else {
+    readToken(token);
+  }
+}
+
+bool OurReader::readToken(Token& token) {
+  skipSpaces();
+  token.start_ = current_;
+  Char c = getNextChar();
+  bool ok = true;
+  switch (c) {
+  case '{':
+    token.type_ = tokenObjectBegin;
+    break;
+  case '}':
+    token.type_ = tokenObjectEnd;
+    break;
+  case '[':
+    token.type_ = tokenArrayBegin;
+    break;
+  case ']':
+    token.type_ = tokenArrayEnd;
+    break;
+  case '"':
+    token.type_ = tokenString;
+    ok = readString();
+    break;
+  case '\'':
+    if (features_.allowSingleQuotes_) {
+    token.type_ = tokenString;
+    ok = readStringSingleQuote();
+    break;
+    } // else continue
+  case '/':
+    token.type_ = tokenComment;
+    ok = readComment();
+    break;
+  case '0':
+  case '1':
+  case '2':
+  case '3':
+  case '4':
+  case '5':
+  case '6':
+  case '7':
+  case '8':
+  case '9':
+    token.type_ = tokenNumber;
+    readNumber(false);
+    break;
+  case '-':
+    if (readNumber(true)) {
+      token.type_ = tokenNumber;
+    } else {
+      token.type_ = tokenNegInf;
+      ok = features_.allowSpecialFloats_ && match("nfinity", 7);
+    }
+    break;
+  case 't':
+    token.type_ = tokenTrue;
+    ok = match("rue", 3);
+    break;
+  case 'f':
+    token.type_ = tokenFalse;
+    ok = match("alse", 4);
+    break;
+  case 'n':
+    token.type_ = tokenNull;
+    ok = match("ull", 3);
+    break;
+  case 'N':
+    if (features_.allowSpecialFloats_) {
+      token.type_ = tokenNaN;
+      ok = match("aN", 2);
+    } else {
+      ok = false;
+    }
+    break;
+  case 'I':
+    if (features_.allowSpecialFloats_) {
+      token.type_ = tokenPosInf;
+      ok = match("nfinity", 7);
+    } else {
+      ok = false;
+    }
+    break;
+  case ',':
+    token.type_ = tokenArraySeparator;
+    break;
+  case ':':
+    token.type_ = tokenMemberSeparator;
+    break;
+  case 0:
+    token.type_ = tokenEndOfStream;
+    break;
+  default:
+    ok = false;
+    break;
+  }
+  if (!ok)
+    token.type_ = tokenError;
+  token.end_ = current_;
+  return true;
+}
+
+void OurReader::skipSpaces() {
+  while (current_ != end_) {
+    Char c = *current_;
+    if (c == ' ' || c == '\t' || c == '\r' || c == '\n')
+      ++current_;
+    else
+      break;
+  }
+}
+
+bool OurReader::match(Location pattern, int patternLength) {
+  if (end_ - current_ < patternLength)
+    return false;
+  int index = patternLength;
+  while (index--)
+    if (current_[index] != pattern[index])
+      return false;
+  current_ += patternLength;
+  return true;
+}
+
+bool OurReader::readComment() {
+  Location commentBegin = current_ - 1;
+  Char c = getNextChar();
+  bool successful = false;
+  if (c == '*')
+    successful = readCStyleComment();
+  else if (c == '/')
+    successful = readCppStyleComment();
+  if (!successful)
+    return false;
+
+  if (collectComments_) {
+    CommentPlacement placement = commentBefore;
+    if (lastValueEnd_ && !containsNewLine(lastValueEnd_, commentBegin)) {
+      if (c != '*' || !containsNewLine(commentBegin, current_))
+        placement = commentAfterOnSameLine;
+    }
+
+    addComment(commentBegin, current_, placement);
+  }
+  return true;
+}
+
+void
+OurReader::addComment(Location begin, Location end, CommentPlacement placement) {
+  assert(collectComments_);
+  const std::string& normalized = normalizeEOL(begin, end);
+  if (placement == commentAfterOnSameLine) {
+    assert(lastValue_ != 0);
+    lastValue_->setComment(normalized, placement);
+  } else {
+    commentsBefore_ += normalized;
+  }
+}
+
+bool OurReader::readCStyleComment() {
+  while (current_ != end_) {
+    Char c = getNextChar();
+    if (c == '*' && *current_ == '/')
+      break;
+  }
+  return getNextChar() == '/';
+}
+
+bool OurReader::readCppStyleComment() {
+  while (current_ != end_) {
+    Char c = getNextChar();
+    if (c == '\n')
+      break;
+    if (c == '\r') {
+      // Consume DOS EOL. It will be normalized in addComment.
+      if (current_ != end_ && *current_ == '\n')
+        getNextChar();
+      // Break on Moc OS 9 EOL.
+      break;
+    }
+  }
+  return true;
+}
+
+bool OurReader::readNumber(bool checkInf) {
+  const char *p = current_;
+  if (checkInf && p != end_ && *p == 'I') {
+    current_ = ++p;
+    return false;
+  }
+  char c = '0'; // stopgap for already consumed character
+  // integral part
+  while (c >= '0' && c <= '9')
+    c = (current_ = p) < end_ ? *p++ : 0;
+  // fractional part
+  if (c == '.') {
+    c = (current_ = p) < end_ ? *p++ : 0;
+    while (c >= '0' && c <= '9')
+      c = (current_ = p) < end_ ? *p++ : 0;
+  }
+  // exponential part
+  if (c == 'e' || c == 'E') {
+    c = (current_ = p) < end_ ? *p++ : 0;
+    if (c == '+' || c == '-')
+      c = (current_ = p) < end_ ? *p++ : 0;
+    while (c >= '0' && c <= '9')
+      c = (current_ = p) < end_ ? *p++ : 0;
+  }
+  return true;
+}
+bool OurReader::readString() {
+  Char c = 0;
+  while (current_ != end_) {
+    c = getNextChar();
+    if (c == '\\')
+      getNextChar();
+    else if (c == '"')
+      break;
+  }
+  return c == '"';
+}
+
+
+bool OurReader::readStringSingleQuote() {
+  Char c = 0;
+  while (current_ != end_) {
+    c = getNextChar();
+    if (c == '\\')
+      getNextChar();
+    else if (c == '\'')
+      break;
+  }
+  return c == '\'';
+}
+
+bool OurReader::readObject(Token& tokenStart) {
+  Token tokenName;
+  std::string name;
+  Value init(objectValue);
+  currentValue().swapPayload(init);
+  currentValue().setOffsetStart(tokenStart.start_ - begin_);
+  while (readToken(tokenName)) {
+    bool initialTokenOk = true;
+    while (tokenName.type_ == tokenComment && initialTokenOk)
+      initialTokenOk = readToken(tokenName);
+    if (!initialTokenOk)
+      break;
+    if (tokenName.type_ == tokenObjectEnd && name.empty()) // empty object
+      return true;
+    name = "";
+    if (tokenName.type_ == tokenString) {
+      if (!decodeString(tokenName, name))
+        return recoverFromError(tokenObjectEnd);
+    } else if (tokenName.type_ == tokenNumber && features_.allowNumericKeys_) {
+      Value numberName;
+      if (!decodeNumber(tokenName, numberName))
+        return recoverFromError(tokenObjectEnd);
+      name = numberName.asString();
+    } else {
+      break;
+    }
+
+    Token colon;
+    if (!readToken(colon) || colon.type_ != tokenMemberSeparator) {
+      return addErrorAndRecover(
+          "Missing ':' after object member name", colon, tokenObjectEnd);
+    }
+    if (name.length() >= (1U<<30)) throwRuntimeError("keylength >= 2^30");
+    if (features_.rejectDupKeys_ && currentValue().isMember(name)) {
+      std::string msg = "Duplicate key: '" + name + "'";
+      return addErrorAndRecover(
+          msg, tokenName, tokenObjectEnd);
+    }
+    Value& value = currentValue()[name];
+    nodes_.push(&value);
+    bool ok = readValue();
+    nodes_.pop();
+    if (!ok) // error already set
+      return recoverFromError(tokenObjectEnd);
+
+    Token comma;
+    if (!readToken(comma) ||
+        (comma.type_ != tokenObjectEnd && comma.type_ != tokenArraySeparator &&
+         comma.type_ != tokenComment)) {
+      return addErrorAndRecover(
+          "Missing ',' or '}' in object declaration", comma, tokenObjectEnd);
+    }
+    bool finalizeTokenOk = true;
+    while (comma.type_ == tokenComment && finalizeTokenOk)
+      finalizeTokenOk = readToken(comma);
+    if (comma.type_ == tokenObjectEnd)
+      return true;
+  }
+  return addErrorAndRecover(
+      "Missing '}' or object member name", tokenName, tokenObjectEnd);
+}
+
+bool OurReader::readArray(Token& tokenStart) {
+  Value init(arrayValue);
+  currentValue().swapPayload(init);
+  currentValue().setOffsetStart(tokenStart.start_ - begin_);
+  skipSpaces();
+  if (*current_ == ']') // empty array
+  {
+    Token endArray;
+    readToken(endArray);
+    return true;
+  }
+  int index = 0;
+  for (;;) {
+    Value& value = currentValue()[index++];
+    nodes_.push(&value);
+    bool ok = readValue();
+    nodes_.pop();
+    if (!ok) // error already set
+      return recoverFromError(tokenArrayEnd);
+
+    Token token;
+    // Accept Comment after last item in the array.
+    ok = readToken(token);
+    while (token.type_ == tokenComment && ok) {
+      ok = readToken(token);
+    }
+    bool badTokenType =
+        (token.type_ != tokenArraySeparator && token.type_ != tokenArrayEnd);
+    if (!ok || badTokenType) {
+      return addErrorAndRecover(
+          "Missing ',' or ']' in array declaration", token, tokenArrayEnd);
+    }
+    if (token.type_ == tokenArrayEnd)
+      break;
+  }
+  return true;
+}
+
+bool OurReader::decodeNumber(Token& token) {
+  Value decoded;
+  if (!decodeNumber(token, decoded))
+    return false;
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool OurReader::decodeNumber(Token& token, Value& decoded) {
+  // Attempts to parse the number as an integer. If the number is
+  // larger than the maximum supported value of an integer then
+  // we decode the number as a double.
+  Location current = token.start_;
+  bool isNegative = *current == '-';
+  if (isNegative)
+    ++current;
+  // TODO: Help the compiler do the div and mod at compile time or get rid of them.
+  Value::LargestUInt maxIntegerValue =
+      isNegative ? Value::LargestUInt(-Value::minLargestInt)
+                 : Value::maxLargestUInt;
+  Value::LargestUInt threshold = maxIntegerValue / 10;
+  Value::LargestUInt value = 0;
+  while (current < token.end_) {
+    Char c = *current++;
+    if (c < '0' || c > '9')
+      return decodeDouble(token, decoded);
+    Value::UInt digit(c - '0');
+    if (value >= threshold) {
+      // We've hit or exceeded the max value divided by 10 (rounded down). If
+      // a) we've only just touched the limit, b) this is the last digit, and
+      // c) it's small enough to fit in that rounding delta, we're okay.
+      // Otherwise treat this number as a double to avoid overflow.
+      if (value > threshold || current != token.end_ ||
+          digit > maxIntegerValue % 10) {
+        return decodeDouble(token, decoded);
+      }
+    }
+    value = value * 10 + digit;
+  }
+  if (isNegative)
+    decoded = -Value::LargestInt(value);
+  else if (value <= Value::LargestUInt(Value::maxInt))
+    decoded = Value::LargestInt(value);
+  else
+    decoded = value;
+  return true;
+}
+
+bool OurReader::decodeDouble(Token& token) {
+  Value decoded;
+  if (!decodeDouble(token, decoded))
+    return false;
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool OurReader::decodeDouble(Token& token, Value& decoded) {
+  double value = 0;
+  const int bufferSize = 32;
+  int count;
+  int length = int(token.end_ - token.start_);
+
+  // Sanity check to avoid buffer overflow exploits.
+  if (length < 0) {
+    return addError("Unable to parse token length", token);
+  }
+
+  // Avoid using a string constant for the format control string given to
+  // sscanf, as this can cause hard to debug crashes on OS X. See here for more
+  // info:
+  //
+  //     http://developer.apple.com/library/mac/#DOCUMENTATION/DeveloperTools/gcc-4.0.1/gcc/Incompatibilities.html
+  char format[] = "%lf";
+
+  if (length <= bufferSize) {
+    Char buffer[bufferSize + 1];
+    memcpy(buffer, token.start_, length);
+    buffer[length] = 0;
+    count = sscanf(buffer, format, &value);
+  } else {
+    std::string buffer(token.start_, token.end_);
+    count = sscanf(buffer.c_str(), format, &value);
+  }
+
+  if (count != 1)
+    return addError("'" + std::string(token.start_, token.end_) +
+                        "' is not a number.",
+                    token);
+  decoded = value;
+  return true;
+}
+
+bool OurReader::decodeString(Token& token) {
+  std::string decoded_string;
+  if (!decodeString(token, decoded_string))
+    return false;
+  Value decoded(decoded_string);
+  currentValue().swapPayload(decoded);
+  currentValue().setOffsetStart(token.start_ - begin_);
+  currentValue().setOffsetLimit(token.end_ - begin_);
+  return true;
+}
+
+bool OurReader::decodeString(Token& token, std::string& decoded) {
+  decoded.reserve(token.end_ - token.start_ - 2);
+  Location current = token.start_ + 1; // skip '"'
+  Location end = token.end_ - 1;       // do not include '"'
+  while (current != end) {
+    Char c = *current++;
+    if (c == '"')
+      break;
+    else if (c == '\\') {
+      if (current == end)
+        return addError("Empty escape sequence in string", token, current);
+      Char escape = *current++;
+      switch (escape) {
+      case '"':
+        decoded += '"';
+        break;
+      case '/':
+        decoded += '/';
+        break;
+      case '\\':
+        decoded += '\\';
+        break;
+      case 'b':
+        decoded += '\b';
+        break;
+      case 'f':
+        decoded += '\f';
+        break;
+      case 'n':
+        decoded += '\n';
+        break;
+      case 'r':
+        decoded += '\r';
+        break;
+      case 't':
+        decoded += '\t';
+        break;
+      case 'u': {
+        unsigned int unicode;
+        if (!decodeUnicodeCodePoint(token, current, end, unicode))
+          return false;
+        decoded += codePointToUTF8(unicode);
+      } break;
+      default:
+        return addError("Bad escape sequence in string", token, current);
+      }
+    } else {
+      decoded += c;
+    }
+  }
+  return true;
+}
+
+bool OurReader::decodeUnicodeCodePoint(Token& token,
+                                    Location& current,
+                                    Location end,
+                                    unsigned int& unicode) {
+
+  if (!decodeUnicodeEscapeSequence(token, current, end, unicode))
+    return false;
+  if (unicode >= 0xD800 && unicode <= 0xDBFF) {
+    // surrogate pairs
+    if (end - current < 6)
+      return addError(
+          "additional six characters expected to parse unicode surrogate pair.",
+          token,
+          current);
+    unsigned int surrogatePair;
+    if (*(current++) == '\\' && *(current++) == 'u') {
+      if (decodeUnicodeEscapeSequence(token, current, end, surrogatePair)) {
+        unicode = 0x10000 + ((unicode & 0x3FF) << 10) + (surrogatePair & 0x3FF);
+      } else
+        return false;
+    } else
+      return addError("expecting another \\u token to begin the second half of "
+                      "a unicode surrogate pair",
+                      token,
+                      current);
+  }
+  return true;
+}
+
+bool OurReader::decodeUnicodeEscapeSequence(Token& token,
+                                         Location& current,
+                                         Location end,
+                                         unsigned int& unicode) {
+  if (end - current < 4)
+    return addError(
+        "Bad unicode escape sequence in string: four digits expected.",
+        token,
+        current);
+  unicode = 0;
+  for (int index = 0; index < 4; ++index) {
+    Char c = *current++;
+    unicode *= 16;
+    if (c >= '0' && c <= '9')
+      unicode += c - '0';
+    else if (c >= 'a' && c <= 'f')
+      unicode += c - 'a' + 10;
+    else if (c >= 'A' && c <= 'F')
+      unicode += c - 'A' + 10;
+    else
+      return addError(
+          "Bad unicode escape sequence in string: hexadecimal digit expected.",
+          token,
+          current);
+  }
+  return true;
+}
+
+bool
+OurReader::addError(const std::string& message, Token& token, Location extra) {
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = extra;
+  errors_.push_back(info);
+  return false;
+}
+
+bool OurReader::recoverFromError(TokenType skipUntilToken) {
+  int errorCount = int(errors_.size());
+  Token skip;
+  for (;;) {
+    if (!readToken(skip))
+      errors_.resize(errorCount); // discard errors caused by recovery
+    if (skip.type_ == skipUntilToken || skip.type_ == tokenEndOfStream)
+      break;
+  }
+  errors_.resize(errorCount);
+  return false;
+}
+
+bool OurReader::addErrorAndRecover(const std::string& message,
+                                Token& token,
+                                TokenType skipUntilToken) {
+  addError(message, token);
+  return recoverFromError(skipUntilToken);
+}
+
+Value& OurReader::currentValue() { return *(nodes_.top()); }
+
+OurReader::Char OurReader::getNextChar() {
+  if (current_ == end_)
+    return 0;
+  return *current_++;
+}
+
+void OurReader::getLocationLineAndColumn(Location location,
+                                      int& line,
+                                      int& column) const {
+  Location current = begin_;
+  Location lastLineStart = current;
+  line = 0;
+  while (current < location && current != end_) {
+    Char c = *current++;
+    if (c == '\r') {
+      if (*current == '\n')
+        ++current;
+      lastLineStart = current;
+      ++line;
+    } else if (c == '\n') {
+      lastLineStart = current;
+      ++line;
+    }
+  }
+  // column & line start at 1
+  column = int(location - lastLineStart) + 1;
+  ++line;
+}
+
+std::string OurReader::getLocationLineAndColumn(Location location) const {
+  int line, column;
+  getLocationLineAndColumn(location, line, column);
+  char buffer[18 + 16 + 16 + 1];
+  snprintf(buffer, sizeof(buffer), "Line %d, Column %d", line, column);
+  return buffer;
+}
+
+std::string OurReader::getFormattedErrorMessages() const {
+  std::string formattedMessage;
+  for (Errors::const_iterator itError = errors_.begin();
+       itError != errors_.end();
+       ++itError) {
+    const ErrorInfo& error = *itError;
+    formattedMessage +=
+        "* " + getLocationLineAndColumn(error.token_.start_) + "\n";
+    formattedMessage += "  " + error.message_ + "\n";
+    if (error.extra_)
+      formattedMessage +=
+          "See " + getLocationLineAndColumn(error.extra_) + " for detail.\n";
+  }
+  return formattedMessage;
+}
+
+std::vector<OurReader::StructuredError> OurReader::getStructuredErrors() const {
+  std::vector<OurReader::StructuredError> allErrors;
+  for (Errors::const_iterator itError = errors_.begin();
+       itError != errors_.end();
+       ++itError) {
+    const ErrorInfo& error = *itError;
+    OurReader::StructuredError structured;
+    structured.offset_start = error.token_.start_ - begin_;
+    structured.offset_limit = error.token_.end_ - begin_;
+    structured.message = error.message_;
+    allErrors.push_back(structured);
+  }
+  return allErrors;
+}
+
+bool OurReader::pushError(const Value& value, const std::string& message) {
+  size_t length = end_ - begin_;
+  if(value.getOffsetStart() > length
+    || value.getOffsetLimit() > length)
+    return false;
+  Token token;
+  token.type_ = tokenError;
+  token.start_ = begin_ + value.getOffsetStart();
+  token.end_ = end_ + value.getOffsetLimit();
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = 0;
+  errors_.push_back(info);
+  return true;
+}
+
+bool OurReader::pushError(const Value& value, const std::string& message, const Value& extra) {
+  size_t length = end_ - begin_;
+  if(value.getOffsetStart() > length
+    || value.getOffsetLimit() > length
+    || extra.getOffsetLimit() > length)
+    return false;
+  Token token;
+  token.type_ = tokenError;
+  token.start_ = begin_ + value.getOffsetStart();
+  token.end_ = begin_ + value.getOffsetLimit();
+  ErrorInfo info;
+  info.token_ = token;
+  info.message_ = message;
+  info.extra_ = begin_ + extra.getOffsetStart();
+  errors_.push_back(info);
+  return true;
+}
+
+bool OurReader::good() const {
+  return !errors_.size();
+}
+
+
+class OurCharReader : public CharReader {
+  bool const collectComments_;
+  OurReader reader_;
+public:
+  OurCharReader(
+    bool collectComments,
+    OurFeatures const& features)
+  : collectComments_(collectComments)
+  , reader_(features)
+  {}
+  bool parse(
+      char const* beginDoc, char const* endDoc,
+      Value* root, std::string* errs) override {
+    bool ok = reader_.parse(beginDoc, endDoc, *root, collectComments_);
+    if (errs) {
+      *errs = reader_.getFormattedErrorMessages();
+    }
+    return ok;
+  }
+};
+
+CharReaderBuilder::CharReaderBuilder()
+{
+  setDefaults(&settings_);
+}
+CharReaderBuilder::~CharReaderBuilder()
+{}
+CharReader* CharReaderBuilder::newCharReader() const
+{
+  bool collectComments = settings_["collectComments"].asBool();
+  OurFeatures features = OurFeatures::all();
+  features.allowComments_ = settings_["allowComments"].asBool();
+  features.strictRoot_ = settings_["strictRoot"].asBool();
+  features.allowDroppedNullPlaceholders_ = settings_["allowDroppedNullPlaceholders"].asBool();
+  features.allowNumericKeys_ = settings_["allowNumericKeys"].asBool();
+  features.allowSingleQuotes_ = settings_["allowSingleQuotes"].asBool();
+  features.stackLimit_ = settings_["stackLimit"].asInt();
+  features.failIfExtra_ = settings_["failIfExtra"].asBool();
+  features.rejectDupKeys_ = settings_["rejectDupKeys"].asBool();
+  features.allowSpecialFloats_ = settings_["allowSpecialFloats"].asBool();
+  return new OurCharReader(collectComments, features);
+}
+static void getValidReaderKeys(std::set<std::string>* valid_keys)
+{
+  valid_keys->clear();
+  valid_keys->insert("collectComments");
+  valid_keys->insert("allowComments");
+  valid_keys->insert("strictRoot");
+  valid_keys->insert("allowDroppedNullPlaceholders");
+  valid_keys->insert("allowNumericKeys");
+  valid_keys->insert("allowSingleQuotes");
+  valid_keys->insert("stackLimit");
+  valid_keys->insert("failIfExtra");
+  valid_keys->insert("rejectDupKeys");
+  valid_keys->insert("allowSpecialFloats");
+}
+bool CharReaderBuilder::validate(Json::Value* invalid) const
+{
+  Json::Value my_invalid;
+  if (!invalid) invalid = &my_invalid;  // so we do not need to test for NULL
+  Json::Value& inv = *invalid;
+  std::set<std::string> valid_keys;
+  getValidReaderKeys(&valid_keys);
+  Value::Members keys = settings_.getMemberNames();
+  size_t n = keys.size();
+  for (size_t i = 0; i < n; ++i) {
+    std::string const& key = keys[i];
+    if (valid_keys.find(key) == valid_keys.end()) {
+      inv[key] = settings_[key];
+    }
+  }
+  return 0u == inv.size();
+}
+Value& CharReaderBuilder::operator[](std::string key)
+{
+  return settings_[key];
+}
+// static
+void CharReaderBuilder::strictMode(Json::Value* settings)
+{
+//! [CharReaderBuilderStrictMode]
+  (*settings)["allowComments"] = false;
+  (*settings)["strictRoot"] = true;
+  (*settings)["allowDroppedNullPlaceholders"] = false;
+  (*settings)["allowNumericKeys"] = false;
+  (*settings)["allowSingleQuotes"] = false;
+  (*settings)["stackLimit"] = 1000;
+  (*settings)["failIfExtra"] = true;
+  (*settings)["rejectDupKeys"] = true;
+  (*settings)["allowSpecialFloats"] = false;
+//! [CharReaderBuilderStrictMode]
+}
+// static
+void CharReaderBuilder::setDefaults(Json::Value* settings)
+{
+//! [CharReaderBuilderDefaults]
+  (*settings)["collectComments"] = true;
+  (*settings)["allowComments"] = true;
+  (*settings)["strictRoot"] = false;
+  (*settings)["allowDroppedNullPlaceholders"] = false;
+  (*settings)["allowNumericKeys"] = false;
+  (*settings)["allowSingleQuotes"] = false;
+  (*settings)["stackLimit"] = 1000;
+  (*settings)["failIfExtra"] = false;
+  (*settings)["rejectDupKeys"] = false;
+  (*settings)["allowSpecialFloats"] = false;
+//! [CharReaderBuilderDefaults]
+}
+
+//////////////////////////////////
+// global functions
+
+bool parseFromStream(
+    CharReader::Factory const& fact, std::istream& sin,
+    Value* root, std::string* errs)
+{
+  std::ostringstream ssin;
+  ssin << sin.rdbuf();
+  std::string doc = ssin.str();
+  char const* begin = doc.data();
+  char const* end = begin + doc.size();
+  // Note that we do not actually need a null-terminator.
+  CharReaderPtr const reader(fact.newCharReader());
+  return reader->parse(begin, end, root, errs);
+}
+
+std::istream& operator>>(std::istream& sin, Value& root) {
+  CharReaderBuilder b;
+  std::string errs;
+  bool ok = parseFromStream(b, sin, &root, &errs);
+  if (!ok) {
+    fprintf(stderr,
+            "Error from reader: %s",
+            errs.c_str());
+
+    throwRuntimeError(errs);
+  }
+  return sin;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_reader.cpp
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_valueiterator.inl
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+// included by json_value.cpp
+
+namespace Json {
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueIteratorBase
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueIteratorBase::ValueIteratorBase()
+    : current_(), isNull_(true) {
+}
+
+ValueIteratorBase::ValueIteratorBase(
+    const Value::ObjectValues::iterator& current)
+    : current_(current), isNull_(false) {}
+
+Value& ValueIteratorBase::deref() const {
+  return current_->second;
+}
+
+void ValueIteratorBase::increment() {
+  ++current_;
+}
+
+void ValueIteratorBase::decrement() {
+  --current_;
+}
+
+ValueIteratorBase::difference_type
+ValueIteratorBase::computeDistance(const SelfType& other) const {
+#ifdef JSON_USE_CPPTL_SMALLMAP
+  return other.current_ - current_;
+#else
+  // Iterator for null value are initialized using the default
+  // constructor, which initialize current_ to the default
+  // std::map::iterator. As begin() and end() are two instance
+  // of the default std::map::iterator, they can not be compared.
+  // To allow this, we handle this comparison specifically.
+  if (isNull_ && other.isNull_) {
+    return 0;
+  }
+
+  // Usage of std::distance is not portable (does not compile with Sun Studio 12
+  // RogueWave STL,
+  // which is the one used by default).
+  // Using a portable hand-made version for non random iterator instead:
+  //   return difference_type( std::distance( current_, other.current_ ) );
+  difference_type myDistance = 0;
+  for (Value::ObjectValues::iterator it = current_; it != other.current_;
+       ++it) {
+    ++myDistance;
+  }
+  return myDistance;
+#endif
+}
+
+bool ValueIteratorBase::isEqual(const SelfType& other) const {
+  if (isNull_) {
+    return other.isNull_;
+  }
+  return current_ == other.current_;
+}
+
+void ValueIteratorBase::copy(const SelfType& other) {
+  current_ = other.current_;
+  isNull_ = other.isNull_;
+}
+
+Value ValueIteratorBase::key() const {
+  const Value::CZString czstring = (*current_).first;
+  if (czstring.data()) {
+    if (czstring.isStaticString())
+      return Value(StaticString(czstring.data()));
+    return Value(czstring.data(), czstring.data() + czstring.length());
+  }
+  return Value(czstring.index());
+}
+
+UInt ValueIteratorBase::index() const {
+  const Value::CZString czstring = (*current_).first;
+  if (!czstring.data())
+    return czstring.index();
+  return Value::UInt(-1);
+}
+
+std::string ValueIteratorBase::name() const {
+  char const* keey;
+  char const* end;
+  keey = memberName(&end);
+  if (!keey) return std::string();
+  return std::string(keey, end);
+}
+
+char const* ValueIteratorBase::memberName() const {
+  const char* cname = (*current_).first.data();
+  return cname ? cname : "";
+}
+
+char const* ValueIteratorBase::memberName(char const** end) const {
+  const char* cname = (*current_).first.data();
+  if (!cname) {
+    *end = NULL;
+    return NULL;
+  }
+  *end = cname + (*current_).first.length();
+  return cname;
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueConstIterator
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueConstIterator::ValueConstIterator() {}
+
+ValueConstIterator::ValueConstIterator(
+    const Value::ObjectValues::iterator& current)
+    : ValueIteratorBase(current) {}
+
+ValueConstIterator::ValueConstIterator(ValueIterator const& other)
+    : ValueIteratorBase(other) {}
+
+ValueConstIterator& ValueConstIterator::
+operator=(const ValueIteratorBase& other) {
+  copy(other);
+  return *this;
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class ValueIterator
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+ValueIterator::ValueIterator() {}
+
+ValueIterator::ValueIterator(const Value::ObjectValues::iterator& current)
+    : ValueIteratorBase(current) {}
+
+ValueIterator::ValueIterator(const ValueConstIterator& other)
+    : ValueIteratorBase(other) {
+  throwRuntimeError("ConstIterator to Iterator should never be allowed.");
+}
+
+ValueIterator::ValueIterator(const ValueIterator& other)
+    : ValueIteratorBase(other) {}
+
+ValueIterator& ValueIterator::operator=(const SelfType& other) {
+  copy(other);
+  return *this;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_valueiterator.inl
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_value.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/assertions.h>
+#include <json/value.h>
+#include <json/writer.h>
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <math.h>
+#include <sstream>
+#include <utility>
+#include <cstring>
+#include <cassert>
+#ifdef JSON_USE_CPPTL
+#include <cpptl/conststring.h>
+#endif
+#include <cstddef> // size_t
+#include <algorithm> // min()
+
+#define JSON_ASSERT_UNREACHABLE assert(false)
+
+namespace Json {
+
+// This is a walkaround to avoid the static initialization of Value::null.
+// kNull must be word-aligned to avoid crashing on ARM.  We use an alignment of
+// 8 (instead of 4) as a bit of future-proofing.
+#if defined(__ARMEL__)
+#define ALIGNAS(byte_alignment) __attribute__((aligned(byte_alignment)))
+#else
+#define ALIGNAS(byte_alignment)
+#endif
+static const unsigned char ALIGNAS(8) kNull[sizeof(Value)] = { 0 };
+const unsigned char& kNullRef = kNull[0];
+const Value& Value::null = reinterpret_cast<const Value&>(kNullRef);
+const Value& Value::nullRef = null;
+
+const Int Value::minInt = Int(~(UInt(-1) / 2));
+const Int Value::maxInt = Int(UInt(-1) / 2);
+const UInt Value::maxUInt = UInt(-1);
+#if defined(JSON_HAS_INT64)
+const Int64 Value::minInt64 = Int64(~(UInt64(-1) / 2));
+const Int64 Value::maxInt64 = Int64(UInt64(-1) / 2);
+const UInt64 Value::maxUInt64 = UInt64(-1);
+// The constant is hard-coded because some compiler have trouble
+// converting Value::maxUInt64 to a double correctly (AIX/xlC).
+// Assumes that UInt64 is a 64 bits integer.
+static const double maxUInt64AsDouble = 18446744073709551615.0;
+#endif // defined(JSON_HAS_INT64)
+const LargestInt Value::minLargestInt = LargestInt(~(LargestUInt(-1) / 2));
+const LargestInt Value::maxLargestInt = LargestInt(LargestUInt(-1) / 2);
+const LargestUInt Value::maxLargestUInt = LargestUInt(-1);
+
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+template <typename T, typename U>
+static inline bool InRange(double d, T min, U max) {
+  return d >= min && d <= max;
+}
+#else  // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+static inline double integerToDouble(Json::UInt64 value) {
+  return static_cast<double>(Int64(value / 2)) * 2.0 + Int64(value & 1);
+}
+
+template <typename T> static inline double integerToDouble(T value) {
+  return static_cast<double>(value);
+}
+
+template <typename T, typename U>
+static inline bool InRange(double d, T min, U max) {
+  return d >= integerToDouble(min) && d <= integerToDouble(max);
+}
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+
+/** Duplicates the specified string value.
+ * @param value Pointer to the string to duplicate. Must be zero-terminated if
+ *              length is "unknown".
+ * @param length Length of the value. if equals to unknown, then it will be
+ *               computed using strlen(value).
+ * @return Pointer on the duplicate instance of string.
+ */
+static inline char* duplicateStringValue(const char* value,
+                                         size_t length) {
+  // Avoid an integer overflow in the call to malloc below by limiting length
+  // to a sane value.
+  if (length >= (size_t)Value::maxInt)
+    length = Value::maxInt - 1;
+
+  char* newString = static_cast<char*>(malloc(length + 1));
+  if (newString == NULL) {
+    throwRuntimeError(
+        "in Json::Value::duplicateStringValue(): "
+        "Failed to allocate string value buffer");
+  }
+  memcpy(newString, value, length);
+  newString[length] = 0;
+  return newString;
+}
+
+/* Record the length as a prefix.
+ */
+static inline char* duplicateAndPrefixStringValue(
+    const char* value,
+    unsigned int length)
+{
+  // Avoid an integer overflow in the call to malloc below by limiting length
+  // to a sane value.
+  JSON_ASSERT_MESSAGE(length <= (unsigned)Value::maxInt - sizeof(unsigned) - 1U,
+                      "in Json::Value::duplicateAndPrefixStringValue(): "
+                      "length too big for prefixing");
+  unsigned actualLength = length + static_cast<unsigned>(sizeof(unsigned)) + 1U;
+  char* newString = static_cast<char*>(malloc(actualLength));
+  if (newString == 0) {
+    throwRuntimeError(
+        "in Json::Value::duplicateAndPrefixStringValue(): "
+        "Failed to allocate string value buffer");
+  }
+  *reinterpret_cast<unsigned*>(newString) = length;
+  memcpy(newString + sizeof(unsigned), value, length);
+  newString[actualLength - 1U] = 0; // to avoid buffer over-run accidents by users later
+  return newString;
+}
+inline static void decodePrefixedString(
+    bool isPrefixed, char const* prefixed,
+    unsigned* length, char const** value)
+{
+  if (!isPrefixed) {
+    *length = static_cast<unsigned>(strlen(prefixed));
+    *value = prefixed;
+  } else {
+    *length = *reinterpret_cast<unsigned const*>(prefixed);
+    *value = prefixed + sizeof(unsigned);
+  }
+}
+/** Free the string duplicated by duplicateStringValue()/duplicateAndPrefixStringValue().
+ */
+static inline void releaseStringValue(char* value) { free(value); }
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// ValueInternals...
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+#if !defined(JSON_IS_AMALGAMATION)
+
+#include "json_valueiterator.inl"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json {
+
+Exception::Exception(std::string const& msg)
+  : msg_(msg)
+{}
+Exception::~Exception() throw()
+{}
+char const* Exception::what() const throw()
+{
+  return msg_.c_str();
+}
+RuntimeError::RuntimeError(std::string const& msg)
+  : Exception(msg)
+{}
+LogicError::LogicError(std::string const& msg)
+  : Exception(msg)
+{}
+void throwRuntimeError(std::string const& msg)
+{
+  throw RuntimeError(msg);
+}
+void throwLogicError(std::string const& msg)
+{
+  throw LogicError(msg);
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::CommentInfo
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+Value::CommentInfo::CommentInfo() : comment_(0) {}
+
+Value::CommentInfo::~CommentInfo() {
+  if (comment_)
+    releaseStringValue(comment_);
+}
+
+void Value::CommentInfo::setComment(const char* text, size_t len) {
+  if (comment_) {
+    releaseStringValue(comment_);
+    comment_ = 0;
+  }
+  JSON_ASSERT(text != 0);
+  JSON_ASSERT_MESSAGE(
+      text[0] == '\0' || text[0] == '/',
+      "in Json::Value::setComment(): Comments must start with /");
+  // It seems that /**/ style comments are acceptable as well.
+  comment_ = duplicateStringValue(text, len);
+}
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::CZString
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+// Notes: policy_ indicates if the string was allocated when
+// a string is stored.
+
+Value::CZString::CZString(ArrayIndex aindex) : cstr_(0), index_(aindex) {}
+
+Value::CZString::CZString(char const* str, unsigned ulength, DuplicationPolicy allocate)
+    : cstr_(str) {
+  // allocate != duplicate
+  storage_.policy_ = allocate & 0x3;
+  storage_.length_ = ulength & 0x3FFFFFFF;
+}
+
+Value::CZString::CZString(const CZString& other)
+    : cstr_(other.storage_.policy_ != noDuplication && other.cstr_ != 0
+                ? duplicateStringValue(other.cstr_, other.storage_.length_)
+                : other.cstr_) {
+  storage_.policy_ = (other.cstr_
+                 ? (static_cast<DuplicationPolicy>(other.storage_.policy_) == noDuplication
+                     ? noDuplication : duplicate)
+                 : static_cast<DuplicationPolicy>(other.storage_.policy_));
+  storage_.length_ = other.storage_.length_;
+}
+
+#if JSON_HAS_RVALUE_REFERENCES
+Value::CZString::CZString(CZString&& other)
+  : cstr_(other.cstr_), index_(other.index_) {
+  other.cstr_ = nullptr;
+}
+#endif
+
+Value::CZString::~CZString() {
+  if (cstr_ && storage_.policy_ == duplicate)
+    releaseStringValue(const_cast<char*>(cstr_));
+}
+
+void Value::CZString::swap(CZString& other) {
+  std::swap(cstr_, other.cstr_);
+  std::swap(index_, other.index_);
+}
+
+Value::CZString& Value::CZString::operator=(CZString other) {
+  swap(other);
+  return *this;
+}
+
+bool Value::CZString::operator<(const CZString& other) const {
+  if (!cstr_) return index_ < other.index_;
+  //return strcmp(cstr_, other.cstr_) < 0;
+  // Assume both are strings.
+  unsigned this_len = this->storage_.length_;
+  unsigned other_len = other.storage_.length_;
+  unsigned min_len = std::min(this_len, other_len);
+  int comp = memcmp(this->cstr_, other.cstr_, min_len);
+  if (comp < 0) return true;
+  if (comp > 0) return false;
+  return (this_len < other_len);
+}
+
+bool Value::CZString::operator==(const CZString& other) const {
+  if (!cstr_) return index_ == other.index_;
+  //return strcmp(cstr_, other.cstr_) == 0;
+  // Assume both are strings.
+  unsigned this_len = this->storage_.length_;
+  unsigned other_len = other.storage_.length_;
+  if (this_len != other_len) return false;
+  int comp = memcmp(this->cstr_, other.cstr_, this_len);
+  return comp == 0;
+}
+
+ArrayIndex Value::CZString::index() const { return index_; }
+
+//const char* Value::CZString::c_str() const { return cstr_; }
+const char* Value::CZString::data() const { return cstr_; }
+unsigned Value::CZString::length() const { return storage_.length_; }
+bool Value::CZString::isStaticString() const { return storage_.policy_ == noDuplication; }
+
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// class Value::Value
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+// //////////////////////////////////////////////////////////////////
+
+/*! \internal Default constructor initialization must be equivalent to:
+ * memset( this, 0, sizeof(Value) )
+ * This optimization is used in ValueInternalMap fast allocator.
+ */
+Value::Value(ValueType vtype) {
+  initBasic(vtype);
+  switch (vtype) {
+  case nullValue:
+    break;
+  case intValue:
+  case uintValue:
+    value_.int_ = 0;
+    break;
+  case realValue:
+    value_.real_ = 0.0;
+    break;
+  case stringValue:
+    value_.string_ = 0;
+    break;
+  case arrayValue:
+  case objectValue:
+    value_.map_ = new ObjectValues();
+    break;
+  case booleanValue:
+    value_.bool_ = false;
+    break;
+  default:
+    JSON_ASSERT_UNREACHABLE;
+  }
+}
+
+Value::Value(Int value) {
+  initBasic(intValue);
+  value_.int_ = value;
+}
+
+Value::Value(UInt value) {
+  initBasic(uintValue);
+  value_.uint_ = value;
+}
+#if defined(JSON_HAS_INT64)
+Value::Value(Int64 value) {
+  initBasic(intValue);
+  value_.int_ = value;
+}
+Value::Value(UInt64 value) {
+  initBasic(uintValue);
+  value_.uint_ = value;
+}
+#endif // defined(JSON_HAS_INT64)
+
+Value::Value(double value) {
+  initBasic(realValue);
+  value_.real_ = value;
+}
+
+Value::Value(const char* value) {
+  initBasic(stringValue, true);
+  value_.string_ = duplicateAndPrefixStringValue(value, static_cast<unsigned>(strlen(value)));
+}
+
+Value::Value(const char* beginValue, const char* endValue) {
+  initBasic(stringValue, true);
+  value_.string_ =
+      duplicateAndPrefixStringValue(beginValue, static_cast<unsigned>(endValue - beginValue));
+}
+
+Value::Value(const std::string& value) {
+  initBasic(stringValue, true);
+  value_.string_ =
+      duplicateAndPrefixStringValue(value.data(), static_cast<unsigned>(value.length()));
+}
+
+Value::Value(const StaticString& value) {
+  initBasic(stringValue);
+  value_.string_ = const_cast<char*>(value.c_str());
+}
+
+#ifdef JSON_USE_CPPTL
+Value::Value(const CppTL::ConstString& value) {
+  initBasic(stringValue, true);
+  value_.string_ = duplicateAndPrefixStringValue(value, static_cast<unsigned>(value.length()));
+}
+#endif
+
+Value::Value(bool value) {
+  initBasic(booleanValue);
+  value_.bool_ = value;
+}
+
+Value::Value(Value const& other)
+    : type_(other.type_), allocated_(false)
+      ,
+      comments_(0), start_(other.start_), limit_(other.limit_)
+{
+  switch (type_) {
+  case nullValue:
+  case intValue:
+  case uintValue:
+  case realValue:
+  case booleanValue:
+    value_ = other.value_;
+    break;
+  case stringValue:
+    if (other.value_.string_ && other.allocated_) {
+      unsigned len;
+      char const* str;
+      decodePrefixedString(other.allocated_, other.value_.string_,
+          &len, &str);
+      value_.string_ = duplicateAndPrefixStringValue(str, len);
+      allocated_ = true;
+    } else {
+      value_.string_ = other.value_.string_;
+      allocated_ = false;
+    }
+    break;
+  case arrayValue:
+  case objectValue:
+    value_.map_ = new ObjectValues(*other.value_.map_);
+    break;
+  default:
+    JSON_ASSERT_UNREACHABLE;
+  }
+  if (other.comments_) {
+    comments_ = new CommentInfo[numberOfCommentPlacement];
+    for (int comment = 0; comment < numberOfCommentPlacement; ++comment) {
+      const CommentInfo& otherComment = other.comments_[comment];
+      if (otherComment.comment_)
+        comments_[comment].setComment(
+            otherComment.comment_, strlen(otherComment.comment_));
+    }
+  }
+}
+
+#if JSON_HAS_RVALUE_REFERENCES
+// Move constructor
+Value::Value(Value&& other) {
+  initBasic(nullValue);
+  swap(other);
+}
+#endif
+
+Value::~Value() {
+  switch (type_) {
+  case nullValue:
+  case intValue:
+  case uintValue:
+  case realValue:
+  case booleanValue:
+    break;
+  case stringValue:
+    if (allocated_)
+      releaseStringValue(value_.string_);
+    break;
+  case arrayValue:
+  case objectValue:
+    delete value_.map_;
+    break;
+  default:
+    JSON_ASSERT_UNREACHABLE;
+  }
+
+  if (comments_)
+    delete[] comments_;
+}
+
+Value& Value::operator=(Value other) {
+  swap(other);
+  return *this;
+}
+
+void Value::swapPayload(Value& other) {
+  ValueType temp = type_;
+  type_ = other.type_;
+  other.type_ = temp;
+  std::swap(value_, other.value_);
+  int temp2 = allocated_;
+  allocated_ = other.allocated_;
+  other.allocated_ = temp2 & 0x1;
+}
+
+void Value::swap(Value& other) {
+  swapPayload(other);
+  std::swap(comments_, other.comments_);
+  std::swap(start_, other.start_);
+  std::swap(limit_, other.limit_);
+}
+
+ValueType Value::type() const { return type_; }
+
+int Value::compare(const Value& other) const {
+  if (*this < other)
+    return -1;
+  if (*this > other)
+    return 1;
+  return 0;
+}
+
+bool Value::operator<(const Value& other) const {
+  int typeDelta = type_ - other.type_;
+  if (typeDelta)
+    return typeDelta < 0 ? true : false;
+  switch (type_) {
+  case nullValue:
+    return false;
+  case intValue:
+    return value_.int_ < other.value_.int_;
+  case uintValue:
+    return value_.uint_ < other.value_.uint_;
+  case realValue:
+    return value_.real_ < other.value_.real_;
+  case booleanValue:
+    return value_.bool_ < other.value_.bool_;
+  case stringValue:
+  {
+    if ((value_.string_ == 0) || (other.value_.string_ == 0)) {
+      if (other.value_.string_) return true;
+      else return false;
+    }
+    unsigned this_len;
+    unsigned other_len;
+    char const* this_str;
+    char const* other_str;
+    decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+    decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str);
+    unsigned min_len = std::min(this_len, other_len);
+    int comp = memcmp(this_str, other_str, min_len);
+    if (comp < 0) return true;
+    if (comp > 0) return false;
+    return (this_len < other_len);
+  }
+  case arrayValue:
+  case objectValue: {
+    int delta = int(value_.map_->size() - other.value_.map_->size());
+    if (delta)
+      return delta < 0;
+    return (*value_.map_) < (*other.value_.map_);
+  }
+  default:
+    JSON_ASSERT_UNREACHABLE;
+  }
+  return false; // unreachable
+}
+
+bool Value::operator<=(const Value& other) const { return !(other < *this); }
+
+bool Value::operator>=(const Value& other) const { return !(*this < other); }
+
+bool Value::operator>(const Value& other) const { return other < *this; }
+
+bool Value::operator==(const Value& other) const {
+  // if ( type_ != other.type_ )
+  // GCC 2.95.3 says:
+  // attempt to take address of bit-field structure member `Json::Value::type_'
+  // Beats me, but a temp solves the problem.
+  int temp = other.type_;
+  if (type_ != temp)
+    return false;
+  switch (type_) {
+  case nullValue:
+    return true;
+  case intValue:
+    return value_.int_ == other.value_.int_;
+  case uintValue:
+    return value_.uint_ == other.value_.uint_;
+  case realValue:
+    return value_.real_ == other.value_.real_;
+  case booleanValue:
+    return value_.bool_ == other.value_.bool_;
+  case stringValue:
+  {
+    if ((value_.string_ == 0) || (other.value_.string_ == 0)) {
+      return (value_.string_ == other.value_.string_);
+    }
+    unsigned this_len;
+    unsigned other_len;
+    char const* this_str;
+    char const* other_str;
+    decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+    decodePrefixedString(other.allocated_, other.value_.string_, &other_len, &other_str);
+    if (this_len != other_len) return false;
+    int comp = memcmp(this_str, other_str, this_len);
+    return comp == 0;
+  }
+  case arrayValue:
+  case objectValue:
+    return value_.map_->size() == other.value_.map_->size() &&
+           (*value_.map_) == (*other.value_.map_);
+  default:
+    JSON_ASSERT_UNREACHABLE;
+  }
+  return false; // unreachable
+}
+
+bool Value::operator!=(const Value& other) const { return !(*this == other); }
+
+const char* Value::asCString() const {
+  JSON_ASSERT_MESSAGE(type_ == stringValue,
+                      "in Json::Value::asCString(): requires stringValue");
+  if (value_.string_ == 0) return 0;
+  unsigned this_len;
+  char const* this_str;
+  decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+  return this_str;
+}
+
+bool Value::getString(char const** str, char const** cend) const {
+  if (type_ != stringValue) return false;
+  if (value_.string_ == 0) return false;
+  unsigned length;
+  decodePrefixedString(this->allocated_, this->value_.string_, &length, str);
+  *cend = *str + length;
+  return true;
+}
+
+std::string Value::asString() const {
+  switch (type_) {
+  case nullValue:
+    return "";
+  case stringValue:
+  {
+    if (value_.string_ == 0) return "";
+    unsigned this_len;
+    char const* this_str;
+    decodePrefixedString(this->allocated_, this->value_.string_, &this_len, &this_str);
+    return std::string(this_str, this_len);
+  }
+  case booleanValue:
+    return value_.bool_ ? "true" : "false";
+  case intValue:
+    return valueToString(value_.int_);
+  case uintValue:
+    return valueToString(value_.uint_);
+  case realValue:
+    return valueToString(value_.real_);
+  default:
+    JSON_FAIL_MESSAGE("Type is not convertible to string");
+  }
+}
+
+#ifdef JSON_USE_CPPTL
+CppTL::ConstString Value::asConstString() const {
+  unsigned len;
+  char const* str;
+  decodePrefixedString(allocated_, value_.string_,
+      &len, &str);
+  return CppTL::ConstString(str, len);
+}
+#endif
+
+Value::Int Value::asInt() const {
+  switch (type_) {
+  case intValue:
+    JSON_ASSERT_MESSAGE(isInt(), "LargestInt out of Int range");
+    return Int(value_.int_);
+  case uintValue:
+    JSON_ASSERT_MESSAGE(isInt(), "LargestUInt out of Int range");
+    return Int(value_.uint_);
+  case realValue:
+    JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt, maxInt),
+                        "double out of Int range");
+    return Int(value_.real_);
+  case nullValue:
+    return 0;
+  case booleanValue:
+    return value_.bool_ ? 1 : 0;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to Int.");
+}
+
+Value::UInt Value::asUInt() const {
+  switch (type_) {
+  case intValue:
+    JSON_ASSERT_MESSAGE(isUInt(), "LargestInt out of UInt range");
+    return UInt(value_.int_);
+  case uintValue:
+    JSON_ASSERT_MESSAGE(isUInt(), "LargestUInt out of UInt range");
+    return UInt(value_.uint_);
+  case realValue:
+    JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt),
+                        "double out of UInt range");
+    return UInt(value_.real_);
+  case nullValue:
+    return 0;
+  case booleanValue:
+    return value_.bool_ ? 1 : 0;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to UInt.");
+}
+
+#if defined(JSON_HAS_INT64)
+
+Value::Int64 Value::asInt64() const {
+  switch (type_) {
+  case intValue:
+    return Int64(value_.int_);
+  case uintValue:
+    JSON_ASSERT_MESSAGE(isInt64(), "LargestUInt out of Int64 range");
+    return Int64(value_.uint_);
+  case realValue:
+    JSON_ASSERT_MESSAGE(InRange(value_.real_, minInt64, maxInt64),
+                        "double out of Int64 range");
+    return Int64(value_.real_);
+  case nullValue:
+    return 0;
+  case booleanValue:
+    return value_.bool_ ? 1 : 0;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to Int64.");
+}
+
+Value::UInt64 Value::asUInt64() const {
+  switch (type_) {
+  case intValue:
+    JSON_ASSERT_MESSAGE(isUInt64(), "LargestInt out of UInt64 range");
+    return UInt64(value_.int_);
+  case uintValue:
+    return UInt64(value_.uint_);
+  case realValue:
+    JSON_ASSERT_MESSAGE(InRange(value_.real_, 0, maxUInt64),
+                        "double out of UInt64 range");
+    return UInt64(value_.real_);
+  case nullValue:
+    return 0;
+  case booleanValue:
+    return value_.bool_ ? 1 : 0;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to UInt64.");
+}
+#endif // if defined(JSON_HAS_INT64)
+
+LargestInt Value::asLargestInt() const {
+#if defined(JSON_NO_INT64)
+  return asInt();
+#else
+  return asInt64();
+#endif
+}
+
+LargestUInt Value::asLargestUInt() const {
+#if defined(JSON_NO_INT64)
+  return asUInt();
+#else
+  return asUInt64();
+#endif
+}
+
+double Value::asDouble() const {
+  switch (type_) {
+  case intValue:
+    return static_cast<double>(value_.int_);
+  case uintValue:
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+    return static_cast<double>(value_.uint_);
+#else  // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+    return integerToDouble(value_.uint_);
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+  case realValue:
+    return value_.real_;
+  case nullValue:
+    return 0.0;
+  case booleanValue:
+    return value_.bool_ ? 1.0 : 0.0;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to double.");
+}
+
+float Value::asFloat() const {
+  switch (type_) {
+  case intValue:
+    return static_cast<float>(value_.int_);
+  case uintValue:
+#if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+    return static_cast<float>(value_.uint_);
+#else  // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+    return integerToDouble(value_.uint_);
+#endif // if !defined(JSON_USE_INT64_DOUBLE_CONVERSION)
+  case realValue:
+    return static_cast<float>(value_.real_);
+  case nullValue:
+    return 0.0;
+  case booleanValue:
+    return value_.bool_ ? 1.0f : 0.0f;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to float.");
+}
+
+bool Value::asBool() const {
+  switch (type_) {
+  case booleanValue:
+    return value_.bool_;
+  case nullValue:
+    return false;
+  case intValue:
+    return value_.int_ ? true : false;
+  case uintValue:
+    return value_.uint_ ? true : false;
+  case realValue:
+    // This is kind of strange. Not recommended.
+    return (value_.real_ != 0.0) ? true : false;
+  default:
+    break;
+  }
+  JSON_FAIL_MESSAGE("Value is not convertible to bool.");
+}
+
+bool Value::isConvertibleTo(ValueType other) const {
+  switch (other) {
+  case nullValue:
+    return (isNumeric() && asDouble() == 0.0) ||
+           (type_ == booleanValue && value_.bool_ == false) ||
+           (type_ == stringValue && asString() == "") ||
+           (type_ == arrayValue && value_.map_->size() == 0) ||
+           (type_ == objectValue && value_.map_->size() == 0) ||
+           type_ == nullValue;
+  case intValue:
+    return isInt() ||
+           (type_ == realValue && InRange(value_.real_, minInt, maxInt)) ||
+           type_ == booleanValue || type_ == nullValue;
+  case uintValue:
+    return isUInt() ||
+           (type_ == realValue && InRange(value_.real_, 0, maxUInt)) ||
+           type_ == booleanValue || type_ == nullValue;
+  case realValue:
+    return isNumeric() || type_ == booleanValue || type_ == nullValue;
+  case booleanValue:
+    return isNumeric() || type_ == booleanValue || type_ == nullValue;
+  case stringValue:
+    return isNumeric() || type_ == booleanValue || type_ == stringValue ||
+           type_ == nullValue;
+  case arrayValue:
+    return type_ == arrayValue || type_ == nullValue;
+  case objectValue:
+    return type_ == objectValue || type_ == nullValue;
+  }
+  JSON_ASSERT_UNREACHABLE;
+  return false;
+}
+
+/// Number of values in array or object
+ArrayIndex Value::size() const {
+  switch (type_) {
+  case nullValue:
+  case intValue:
+  case uintValue:
+  case realValue:
+  case booleanValue:
+  case stringValue:
+    return 0;
+  case arrayValue: // size of the array is highest index + 1
+    if (!value_.map_->empty()) {
+      ObjectValues::const_iterator itLast = value_.map_->end();
+      --itLast;
+      return (*itLast).first.index() + 1;
+    }
+    return 0;
+  case objectValue:
+    return ArrayIndex(value_.map_->size());
+  }
+  JSON_ASSERT_UNREACHABLE;
+  return 0; // unreachable;
+}
+
+bool Value::empty() const {
+  if (isNull() || isArray() || isObject())
+    return size() == 0u;
+  else
+    return false;
+}
+
+bool Value::operator!() const { return isNull(); }
+
+void Value::clear() {
+  JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue ||
+                          type_ == objectValue,
+                      "in Json::Value::clear(): requires complex value");
+  start_ = 0;
+  limit_ = 0;
+  switch (type_) {
+  case arrayValue:
+  case objectValue:
+    value_.map_->clear();
+    break;
+  default:
+    break;
+  }
+}
+
+void Value::resize(ArrayIndex newSize) {
+  JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == arrayValue,
+                      "in Json::Value::resize(): requires arrayValue");
+  if (type_ == nullValue)
+    *this = Value(arrayValue);
+  ArrayIndex oldSize = size();
+  if (newSize == 0)
+    clear();
+  else if (newSize > oldSize)
+    (*this)[newSize - 1];
+  else {
+    for (ArrayIndex index = newSize; index < oldSize; ++index) {
+      value_.map_->erase(index);
+    }
+    assert(size() == newSize);
+  }
+}
+
+Value& Value::operator[](ArrayIndex index) {
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == arrayValue,
+      "in Json::Value::operator[](ArrayIndex): requires arrayValue");
+  if (type_ == nullValue)
+    *this = Value(arrayValue);
+  CZString key(index);
+  ObjectValues::iterator it = value_.map_->lower_bound(key);
+  if (it != value_.map_->end() && (*it).first == key)
+    return (*it).second;
+
+  ObjectValues::value_type defaultValue(key, nullRef);
+  it = value_.map_->insert(it, defaultValue);
+  return (*it).second;
+}
+
+Value& Value::operator[](int index) {
+  JSON_ASSERT_MESSAGE(
+      index >= 0,
+      "in Json::Value::operator[](int index): index cannot be negative");
+  return (*this)[ArrayIndex(index)];
+}
+
+const Value& Value::operator[](ArrayIndex index) const {
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == arrayValue,
+      "in Json::Value::operator[](ArrayIndex)const: requires arrayValue");
+  if (type_ == nullValue)
+    return nullRef;
+  CZString key(index);
+  ObjectValues::const_iterator it = value_.map_->find(key);
+  if (it == value_.map_->end())
+    return nullRef;
+  return (*it).second;
+}
+
+const Value& Value::operator[](int index) const {
+  JSON_ASSERT_MESSAGE(
+      index >= 0,
+      "in Json::Value::operator[](int index) const: index cannot be negative");
+  return (*this)[ArrayIndex(index)];
+}
+
+void Value::initBasic(ValueType vtype, bool allocated) {
+  type_ = vtype;
+  allocated_ = allocated;
+  comments_ = 0;
+  start_ = 0;
+  limit_ = 0;
+}
+
+// Access an object value by name, create a null member if it does not exist.
+// @pre Type of '*this' is object or null.
+// @param key is null-terminated.
+Value& Value::resolveReference(const char* key) {
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == objectValue,
+      "in Json::Value::resolveReference(): requires objectValue");
+  if (type_ == nullValue)
+    *this = Value(objectValue);
+  CZString actualKey(
+      key, static_cast<unsigned>(strlen(key)), CZString::noDuplication); // NOTE!
+  ObjectValues::iterator it = value_.map_->lower_bound(actualKey);
+  if (it != value_.map_->end() && (*it).first == actualKey)
+    return (*it).second;
+
+  ObjectValues::value_type defaultValue(actualKey, nullRef);
+  it = value_.map_->insert(it, defaultValue);
+  Value& value = (*it).second;
+  return value;
+}
+
+// @param key is not null-terminated.
+Value& Value::resolveReference(char const* key, char const* cend)
+{
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == objectValue,
+      "in Json::Value::resolveReference(key, end): requires objectValue");
+  if (type_ == nullValue)
+    *this = Value(objectValue);
+  CZString actualKey(
+      key, static_cast<unsigned>(cend-key), CZString::duplicateOnCopy);
+  ObjectValues::iterator it = value_.map_->lower_bound(actualKey);
+  if (it != value_.map_->end() && (*it).first == actualKey)
+    return (*it).second;
+
+  ObjectValues::value_type defaultValue(actualKey, nullRef);
+  it = value_.map_->insert(it, defaultValue);
+  Value& value = (*it).second;
+  return value;
+}
+
+Value Value::get(ArrayIndex index, const Value& defaultValue) const {
+  const Value* value = &((*this)[index]);
+  return value == &nullRef ? defaultValue : *value;
+}
+
+bool Value::isValidIndex(ArrayIndex index) const { return index < size(); }
+
+Value const* Value::find(char const* key, char const* cend) const
+{
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == objectValue,
+      "in Json::Value::find(key, end, found): requires objectValue or nullValue");
+  if (type_ == nullValue) return NULL;
+  CZString actualKey(key, static_cast<unsigned>(cend-key), CZString::noDuplication);
+  ObjectValues::const_iterator it = value_.map_->find(actualKey);
+  if (it == value_.map_->end()) return NULL;
+  return &(*it).second;
+}
+const Value& Value::operator[](const char* key) const
+{
+  Value const* found = find(key, key + strlen(key));
+  if (!found) return nullRef;
+  return *found;
+}
+Value const& Value::operator[](std::string const& key) const
+{
+  Value const* found = find(key.data(), key.data() + key.length());
+  if (!found) return nullRef;
+  return *found;
+}
+
+Value& Value::operator[](const char* key) {
+  return resolveReference(key, key + strlen(key));
+}
+
+Value& Value::operator[](const std::string& key) {
+  return resolveReference(key.data(), key.data() + key.length());
+}
+
+Value& Value::operator[](const StaticString& key) {
+  return resolveReference(key.c_str());
+}
+
+#ifdef JSON_USE_CPPTL
+Value& Value::operator[](const CppTL::ConstString& key) {
+  return resolveReference(key.c_str(), key.end_c_str());
+}
+Value const& Value::operator[](CppTL::ConstString const& key) const
+{
+  Value const* found = find(key.c_str(), key.end_c_str());
+  if (!found) return nullRef;
+  return *found;
+}
+#endif
+
+Value& Value::append(const Value& value) { return (*this)[size()] = value; }
+
+Value Value::get(char const* key, char const* cend, Value const& defaultValue) const
+{
+  Value const* found = find(key, cend);
+  return !found ? defaultValue : *found;
+}
+Value Value::get(char const* key, Value const& defaultValue) const
+{
+  return get(key, key + strlen(key), defaultValue);
+}
+Value Value::get(std::string const& key, Value const& defaultValue) const
+{
+  return get(key.data(), key.data() + key.length(), defaultValue);
+}
+
+
+bool Value::removeMember(const char* key, const char* cend, Value* removed)
+{
+  if (type_ != objectValue) {
+    return false;
+  }
+  CZString actualKey(key, static_cast<unsigned>(cend-key), CZString::noDuplication);
+  ObjectValues::iterator it = value_.map_->find(actualKey);
+  if (it == value_.map_->end())
+    return false;
+  *removed = it->second;
+  value_.map_->erase(it);
+  return true;
+}
+bool Value::removeMember(const char* key, Value* removed)
+{
+  return removeMember(key, key + strlen(key), removed);
+}
+bool Value::removeMember(std::string const& key, Value* removed)
+{
+  return removeMember(key.data(), key.data() + key.length(), removed);
+}
+Value Value::removeMember(const char* key)
+{
+  JSON_ASSERT_MESSAGE(type_ == nullValue || type_ == objectValue,
+                      "in Json::Value::removeMember(): requires objectValue");
+  if (type_ == nullValue)
+    return nullRef;
+
+  Value removed;  // null
+  removeMember(key, key + strlen(key), &removed);
+  return removed; // still null if removeMember() did nothing
+}
+Value Value::removeMember(const std::string& key)
+{
+  return removeMember(key.c_str());
+}
+
+bool Value::removeIndex(ArrayIndex index, Value* removed) {
+  if (type_ != arrayValue) {
+    return false;
+  }
+  CZString key(index);
+  ObjectValues::iterator it = value_.map_->find(key);
+  if (it == value_.map_->end()) {
+    return false;
+  }
+  *removed = it->second;
+  ArrayIndex oldSize = size();
+  // shift left all items left, into the place of the "removed"
+  for (ArrayIndex i = index; i < (oldSize - 1); ++i){
+    CZString keey(i);
+    (*value_.map_)[keey] = (*this)[i + 1];
+  }
+  // erase the last one ("leftover")
+  CZString keyLast(oldSize - 1);
+  ObjectValues::iterator itLast = value_.map_->find(keyLast);
+  value_.map_->erase(itLast);
+  return true;
+}
+
+#ifdef JSON_USE_CPPTL
+Value Value::get(const CppTL::ConstString& key,
+                 const Value& defaultValue) const {
+  return get(key.c_str(), key.end_c_str(), defaultValue);
+}
+#endif
+
+bool Value::isMember(char const* key, char const* cend) const
+{
+  Value const* value = find(key, cend);
+  return NULL != value;
+}
+bool Value::isMember(char const* key) const
+{
+  return isMember(key, key + strlen(key));
+}
+bool Value::isMember(std::string const& key) const
+{
+  return isMember(key.data(), key.data() + key.length());
+}
+
+#ifdef JSON_USE_CPPTL
+bool Value::isMember(const CppTL::ConstString& key) const {
+  return isMember(key.c_str(), key.end_c_str());
+}
+#endif
+
+Value::Members Value::getMemberNames() const {
+  JSON_ASSERT_MESSAGE(
+      type_ == nullValue || type_ == objectValue,
+      "in Json::Value::getMemberNames(), value must be objectValue");
+  if (type_ == nullValue)
+    return Value::Members();
+  Members members;
+  members.reserve(value_.map_->size());
+  ObjectValues::const_iterator it = value_.map_->begin();
+  ObjectValues::const_iterator itEnd = value_.map_->end();
+  for (; it != itEnd; ++it) {
+    members.push_back(std::string((*it).first.data(),
+                                  (*it).first.length()));
+  }
+  return members;
+}
+//
+//# ifdef JSON_USE_CPPTL
+// EnumMemberNames
+// Value::enumMemberNames() const
+//{
+//   if ( type_ == objectValue )
+//   {
+//      return CppTL::Enum::any(  CppTL::Enum::transform(
+//         CppTL::Enum::keys( *(value_.map_), CppTL::Type<const CZString &>() ),
+//         MemberNamesTransform() ) );
+//   }
+//   return EnumMemberNames();
+//}
+//
+//
+// EnumValues
+// Value::enumValues() const
+//{
+//   if ( type_ == objectValue  ||  type_ == arrayValue )
+//      return CppTL::Enum::anyValues( *(value_.map_),
+//                                     CppTL::Type<const Value &>() );
+//   return EnumValues();
+//}
+//
+//# endif
+
+static bool IsIntegral(double d) {
+  double integral_part;
+  return modf(d, &integral_part) == 0.0;
+}
+
+bool Value::isNull() const { return type_ == nullValue; }
+
+bool Value::isBool() const { return type_ == booleanValue; }
+
+bool Value::isInt() const {
+  switch (type_) {
+  case intValue:
+    return value_.int_ >= minInt && value_.int_ <= maxInt;
+  case uintValue:
+    return value_.uint_ <= UInt(maxInt);
+  case realValue:
+    return value_.real_ >= minInt && value_.real_ <= maxInt &&
+           IsIntegral(value_.real_);
+  default:
+    break;
+  }
+  return false;
+}
+
+bool Value::isUInt() const {
+  switch (type_) {
+  case intValue:
+    return value_.int_ >= 0 && LargestUInt(value_.int_) <= LargestUInt(maxUInt);
+  case uintValue:
+    return value_.uint_ <= maxUInt;
+  case realValue:
+    return value_.real_ >= 0 && value_.real_ <= maxUInt &&
+           IsIntegral(value_.real_);
+  default:
+    break;
+  }
+  return false;
+}
+
+bool Value::isInt64() const {
+#if defined(JSON_HAS_INT64)
+  switch (type_) {
+  case intValue:
+    return true;
+  case uintValue:
+    return value_.uint_ <= UInt64(maxInt64);
+  case realValue:
+    // Note that maxInt64 (= 2^63 - 1) is not exactly representable as a
+    // double, so double(maxInt64) will be rounded up to 2^63. Therefore we
+    // require the value to be strictly less than the limit.
+    return value_.real_ >= double(minInt64) &&
+           value_.real_ < double(maxInt64) && IsIntegral(value_.real_);
+  default:
+    break;
+  }
+#endif // JSON_HAS_INT64
+  return false;
+}
+
+bool Value::isUInt64() const {
+#if defined(JSON_HAS_INT64)
+  switch (type_) {
+  case intValue:
+    return value_.int_ >= 0;
+  case uintValue:
+    return true;
+  case realValue:
+    // Note that maxUInt64 (= 2^64 - 1) is not exactly representable as a
+    // double, so double(maxUInt64) will be rounded up to 2^64. Therefore we
+    // require the value to be strictly less than the limit.
+    return value_.real_ >= 0 && value_.real_ < maxUInt64AsDouble &&
+           IsIntegral(value_.real_);
+  default:
+    break;
+  }
+#endif // JSON_HAS_INT64
+  return false;
+}
+
+bool Value::isIntegral() const {
+#if defined(JSON_HAS_INT64)
+  return isInt64() || isUInt64();
+#else
+  return isInt() || isUInt();
+#endif
+}
+
+bool Value::isDouble() const { return type_ == realValue || isIntegral(); }
+
+bool Value::isNumeric() const { return isIntegral() || isDouble(); }
+
+bool Value::isString() const { return type_ == stringValue; }
+
+bool Value::isArray() const { return type_ == arrayValue; }
+
+bool Value::isObject() const { return type_ == objectValue; }
+
+void Value::setComment(const char* comment, size_t len, CommentPlacement placement) {
+  if (!comments_)
+    comments_ = new CommentInfo[numberOfCommentPlacement];
+  if ((len > 0) && (comment[len-1] == '\n')) {
+    // Always discard trailing newline, to aid indentation.
+    len -= 1;
+  }
+  comments_[placement].setComment(comment, len);
+}
+
+void Value::setComment(const char* comment, CommentPlacement placement) {
+  setComment(comment, strlen(comment), placement);
+}
+
+void Value::setComment(const std::string& comment, CommentPlacement placement) {
+  setComment(comment.c_str(), comment.length(), placement);
+}
+
+bool Value::hasComment(CommentPlacement placement) const {
+  return comments_ != 0 && comments_[placement].comment_ != 0;
+}
+
+std::string Value::getComment(CommentPlacement placement) const {
+  if (hasComment(placement))
+    return comments_[placement].comment_;
+  return "";
+}
+
+void Value::setOffsetStart(size_t start) { start_ = start; }
+
+void Value::setOffsetLimit(size_t limit) { limit_ = limit; }
+
+size_t Value::getOffsetStart() const { return start_; }
+
+size_t Value::getOffsetLimit() const { return limit_; }
+
+std::string Value::toStyledString() const {
+  StyledWriter writer;
+  return writer.write(*this);
+}
+
+Value::const_iterator Value::begin() const {
+  switch (type_) {
+  case arrayValue:
+  case objectValue:
+    if (value_.map_)
+      return const_iterator(value_.map_->begin());
+    break;
+  default:
+    break;
+  }
+  return const_iterator();
+}
+
+Value::const_iterator Value::end() const {
+  switch (type_) {
+  case arrayValue:
+  case objectValue:
+    if (value_.map_)
+      return const_iterator(value_.map_->end());
+    break;
+  default:
+    break;
+  }
+  return const_iterator();
+}
+
+Value::iterator Value::begin() {
+  switch (type_) {
+  case arrayValue:
+  case objectValue:
+    if (value_.map_)
+      return iterator(value_.map_->begin());
+    break;
+  default:
+    break;
+  }
+  return iterator();
+}
+
+Value::iterator Value::end() {
+  switch (type_) {
+  case arrayValue:
+  case objectValue:
+    if (value_.map_)
+      return iterator(value_.map_->end());
+    break;
+  default:
+    break;
+  }
+  return iterator();
+}
+
+// class PathArgument
+// //////////////////////////////////////////////////////////////////
+
+PathArgument::PathArgument() : key_(), index_(), kind_(kindNone) {}
+
+PathArgument::PathArgument(ArrayIndex index)
+    : key_(), index_(index), kind_(kindIndex) {}
+
+PathArgument::PathArgument(const char* key)
+    : key_(key), index_(), kind_(kindKey) {}
+
+PathArgument::PathArgument(const std::string& key)
+    : key_(key.c_str()), index_(), kind_(kindKey) {}
+
+// class Path
+// //////////////////////////////////////////////////////////////////
+
+Path::Path(const std::string& path,
+           const PathArgument& a1,
+           const PathArgument& a2,
+           const PathArgument& a3,
+           const PathArgument& a4,
+           const PathArgument& a5) {
+  InArgs in;
+  in.push_back(&a1);
+  in.push_back(&a2);
+  in.push_back(&a3);
+  in.push_back(&a4);
+  in.push_back(&a5);
+  makePath(path, in);
+}
+
+void Path::makePath(const std::string& path, const InArgs& in) {
+  const char* current = path.c_str();
+  const char* end = current + path.length();
+  InArgs::const_iterator itInArg = in.begin();
+  while (current != end) {
+    if (*current == '[') {
+      ++current;
+      if (*current == '%')
+        addPathInArg(path, in, itInArg, PathArgument::kindIndex);
+      else {
+        ArrayIndex index = 0;
+        for (; current != end && *current >= '0' && *current <= '9'; ++current)
+          index = index * 10 + ArrayIndex(*current - '0');
+        args_.push_back(index);
+      }
+      if (current == end || *current++ != ']')
+        invalidPath(path, int(current - path.c_str()));
+    } else if (*current == '%') {
+      addPathInArg(path, in, itInArg, PathArgument::kindKey);
+      ++current;
+    } else if (*current == '.') {
+      ++current;
+    } else {
+      const char* beginName = current;
+      while (current != end && !strchr("[.", *current))
+        ++current;
+      args_.push_back(std::string(beginName, current));
+    }
+  }
+}
+
+void Path::addPathInArg(const std::string& /*path*/,
+                        const InArgs& in,
+                        InArgs::const_iterator& itInArg,
+                        PathArgument::Kind kind) {
+  if (itInArg == in.end()) {
+    // Error: missing argument %d
+  } else if ((*itInArg)->kind_ != kind) {
+    // Error: bad argument type
+  } else {
+    args_.push_back(**itInArg);
+  }
+}
+
+void Path::invalidPath(const std::string& /*path*/, int /*location*/) {
+  // Error: invalid path.
+}
+
+const Value& Path::resolve(const Value& root) const {
+  const Value* node = &root;
+  for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) {
+    const PathArgument& arg = *it;
+    if (arg.kind_ == PathArgument::kindIndex) {
+      if (!node->isArray() || !node->isValidIndex(arg.index_)) {
+        // Error: unable to resolve path (array value expected at position...
+      }
+      node = &((*node)[arg.index_]);
+    } else if (arg.kind_ == PathArgument::kindKey) {
+      if (!node->isObject()) {
+        // Error: unable to resolve path (object value expected at position...)
+      }
+      node = &((*node)[arg.key_]);
+      if (node == &Value::nullRef) {
+        // Error: unable to resolve path (object has no member named '' at
+        // position...)
+      }
+    }
+  }
+  return *node;
+}
+
+Value Path::resolve(const Value& root, const Value& defaultValue) const {
+  const Value* node = &root;
+  for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) {
+    const PathArgument& arg = *it;
+    if (arg.kind_ == PathArgument::kindIndex) {
+      if (!node->isArray() || !node->isValidIndex(arg.index_))
+        return defaultValue;
+      node = &((*node)[arg.index_]);
+    } else if (arg.kind_ == PathArgument::kindKey) {
+      if (!node->isObject())
+        return defaultValue;
+      node = &((*node)[arg.key_]);
+      if (node == &Value::nullRef)
+        return defaultValue;
+    }
+  }
+  return *node;
+}
+
+Value& Path::make(Value& root) const {
+  Value* node = &root;
+  for (Args::const_iterator it = args_.begin(); it != args_.end(); ++it) {
+    const PathArgument& arg = *it;
+    if (arg.kind_ == PathArgument::kindIndex) {
+      if (!node->isArray()) {
+        // Error: node is not an array at position ...
+      }
+      node = &((*node)[arg.index_]);
+    } else if (arg.kind_ == PathArgument::kindKey) {
+      if (!node->isObject()) {
+        // Error: node is not an object at position...
+      }
+      node = &((*node)[arg.key_]);
+    }
+  }
+  return *node;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_value.cpp
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: src/lib_json/json_writer.cpp
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2011 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#if !defined(JSON_IS_AMALGAMATION)
+#include <json/writer.h>
+#include "json_tool.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+#include <iomanip>
+#include <memory>
+#include <sstream>
+#include <utility>
+#include <set>
+#include <cassert>
+#include <cstring>
+#include <cstdio>
+
+#if defined(_MSC_VER) && _MSC_VER >= 1200 && _MSC_VER < 1800 // Between VC++ 6.0 and VC++ 11.0
+#include <float.h>
+#define isfinite _finite
+#elif defined(__sun) && defined(__SVR4) //Solaris
+#if !defined(isfinite)
+#include <ieeefp.h>
+#define isfinite finite
+#endif
+#elif defined(_AIX)
+#if !defined(isfinite)
+#include <math.h>
+#define isfinite finite
+#endif
+#elif defined(__hpux)
+#if !defined(isfinite)
+#if defined(__ia64) && !defined(finite)
+#define isfinite(x) ((sizeof(x) == sizeof(float) ? \
+                     _Isfinitef(x) : _IsFinite(x)))
+#else
+#include <math.h>
+#define isfinite finite
+#endif
+#endif
+#else
+#include <cmath>
+#if !(defined(__QNXNTO__)) // QNX already defines isfinite
+#define isfinite std::isfinite
+#endif
+#endif
+
+#if defined(_MSC_VER)
+#if !defined(WINCE) && defined(__STDC_SECURE_LIB__) && _MSC_VER >= 1500 // VC++ 9.0 and above
+#define snprintf sprintf_s
+#elif _MSC_VER >= 1900 // VC++ 14.0 and above
+#define snprintf std::snprintf
+#else
+#define snprintf _snprintf
+#endif
+#elif defined(__ANDROID__) || defined(__QNXNTO__)
+#define snprintf snprintf
+#elif __cplusplus >= 201103L
+#define snprintf std::snprintf
+#endif
+
+#if defined(__BORLANDC__)  
+#include <float.h>
+#define isfinite _finite
+#define snprintf _snprintf
+#endif
+
+#if defined(_MSC_VER) && _MSC_VER >= 1400 // VC++ 8.0
+// Disable warning about strdup being deprecated.
+#pragma warning(disable : 4996)
+#endif
+
+namespace Json {
+
+#if __cplusplus >= 201103L || (defined(_CPPLIB_VER) && _CPPLIB_VER >= 520)
+typedef std::unique_ptr<StreamWriter> StreamWriterPtr;
+#else
+typedef std::auto_ptr<StreamWriter>   StreamWriterPtr;
+#endif
+
+static bool containsControlCharacter(const char* str) {
+  while (*str) {
+    if (isControlCharacter(*(str++)))
+      return true;
+  }
+  return false;
+}
+
+static bool containsControlCharacter0(const char* str, unsigned len) {
+  char const* end = str + len;
+  while (end != str) {
+    if (isControlCharacter(*str) || 0==*str)
+      return true;
+    ++str;
+  }
+  return false;
+}
+
+std::string valueToString(LargestInt value) {
+  UIntToStringBuffer buffer;
+  char* current = buffer + sizeof(buffer);
+  if (value == Value::minLargestInt) {
+    uintToString(LargestUInt(Value::maxLargestInt) + 1, current);
+    *--current = '-';
+  } else if (value < 0) {
+    uintToString(LargestUInt(-value), current);
+    *--current = '-';
+  } else {
+    uintToString(LargestUInt(value), current);
+  }
+  assert(current >= buffer);
+  return current;
+}
+
+std::string valueToString(LargestUInt value) {
+  UIntToStringBuffer buffer;
+  char* current = buffer + sizeof(buffer);
+  uintToString(value, current);
+  assert(current >= buffer);
+  return current;
+}
+
+#if defined(JSON_HAS_INT64)
+
+std::string valueToString(Int value) {
+  return valueToString(LargestInt(value));
+}
+
+std::string valueToString(UInt value) {
+  return valueToString(LargestUInt(value));
+}
+
+#endif // # if defined(JSON_HAS_INT64)
+
+std::string valueToString(double value, bool useSpecialFloats, unsigned int precision) {
+  // Allocate a buffer that is more than large enough to store the 16 digits of
+  // precision requested below.
+  char buffer[32];
+  int len = -1;
+
+  char formatString[6];
+  sprintf(formatString, "%%.%dg", precision);
+
+  // Print into the buffer. We need not request the alternative representation
+  // that always has a decimal point because JSON doesn't distingish the
+  // concepts of reals and integers.
+  if (isfinite(value)) {
+    len = snprintf(buffer, sizeof(buffer), formatString, value);
+  } else {
+    // IEEE standard states that NaN values will not compare to themselves
+    if (value != value) {
+      len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "NaN" : "null");
+    } else if (value < 0) {
+      len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "-Infinity" : "-1e+9999");
+    } else {
+      len = snprintf(buffer, sizeof(buffer), useSpecialFloats ? "Infinity" : "1e+9999");
+    }
+    // For those, we do not need to call fixNumLoc, but it is fast.
+  }
+  assert(len >= 0);
+  fixNumericLocale(buffer, buffer + len);
+  return buffer;
+}
+
+std::string valueToString(double value) { return valueToString(value, false, 17); }
+
+std::string valueToString(bool value) { return value ? "true" : "false"; }
+
+std::string valueToQuotedString(const char* value) {
+  if (value == NULL)
+    return "";
+  // Not sure how to handle unicode...
+  if (strpbrk(value, "\"\\\b\f\n\r\t") == NULL &&
+      !containsControlCharacter(value))
+    return std::string("\"") + value + "\"";
+  // We have to walk value and escape any special characters.
+  // Appending to std::string is not efficient, but this should be rare.
+  // (Note: forward slashes are *not* rare, but I am not escaping them.)
+  std::string::size_type maxsize =
+      strlen(value) * 2 + 3; // allescaped+quotes+NULL
+  std::string result;
+  result.reserve(maxsize); // to avoid lots of mallocs
+  result += "\"";
+  for (const char* c = value; *c != 0; ++c) {
+    switch (*c) {
+    case '\"':
+      result += "\\\"";
+      break;
+    case '\\':
+      result += "\\\\";
+      break;
+    case '\b':
+      result += "\\b";
+      break;
+    case '\f':
+      result += "\\f";
+      break;
+    case '\n':
+      result += "\\n";
+      break;
+    case '\r':
+      result += "\\r";
+      break;
+    case '\t':
+      result += "\\t";
+      break;
+    // case '/':
+    // Even though \/ is considered a legal escape in JSON, a bare
+    // slash is also legal, so I see no reason to escape it.
+    // (I hope I am not misunderstanding something.
+    // blep notes: actually escaping \/ may be useful in javascript to avoid </
+    // sequence.
+    // Should add a flag to allow this compatibility mode and prevent this
+    // sequence from occurring.
+    default:
+      if (isControlCharacter(*c)) {
+        std::ostringstream oss;
+        oss << "\\u" << std::hex << std::uppercase << std::setfill('0')
+            << std::setw(4) << static_cast<int>(*c);
+        result += oss.str();
+      } else {
+        result += *c;
+      }
+      break;
+    }
+  }
+  result += "\"";
+  return result;
+}
+
+// https://github.com/upcaste/upcaste/blob/master/src/upcore/src/cstring/strnpbrk.cpp
+static char const* strnpbrk(char const* s, char const* accept, size_t n) {
+  assert((s || !n) && accept);
+
+  char const* const end = s + n;
+  for (char const* cur = s; cur < end; ++cur) {
+    int const c = *cur;
+    for (char const* a = accept; *a; ++a) {
+      if (*a == c) {
+        return cur;
+      }
+    }
+  }
+  return NULL;
+}
+static std::string valueToQuotedStringN(const char* value, unsigned length) {
+  if (value == NULL)
+    return "";
+  // Not sure how to handle unicode...
+  if (strnpbrk(value, "\"\\\b\f\n\r\t", length) == NULL &&
+      !containsControlCharacter0(value, length))
+    return std::string("\"") + value + "\"";
+  // We have to walk value and escape any special characters.
+  // Appending to std::string is not efficient, but this should be rare.
+  // (Note: forward slashes are *not* rare, but I am not escaping them.)
+  std::string::size_type maxsize =
+      length * 2 + 3; // allescaped+quotes+NULL
+  std::string result;
+  result.reserve(maxsize); // to avoid lots of mallocs
+  result += "\"";
+  char const* end = value + length;
+  for (const char* c = value; c != end; ++c) {
+    switch (*c) {
+    case '\"':
+      result += "\\\"";
+      break;
+    case '\\':
+      result += "\\\\";
+      break;
+    case '\b':
+      result += "\\b";
+      break;
+    case '\f':
+      result += "\\f";
+      break;
+    case '\n':
+      result += "\\n";
+      break;
+    case '\r':
+      result += "\\r";
+      break;
+    case '\t':
+      result += "\\t";
+      break;
+    // case '/':
+    // Even though \/ is considered a legal escape in JSON, a bare
+    // slash is also legal, so I see no reason to escape it.
+    // (I hope I am not misunderstanding something.)
+    // blep notes: actually escaping \/ may be useful in javascript to avoid </
+    // sequence.
+    // Should add a flag to allow this compatibility mode and prevent this
+    // sequence from occurring.
+    default:
+      if ((isControlCharacter(*c)) || (*c == 0)) {
+        std::ostringstream oss;
+        oss << "\\u" << std::hex << std::uppercase << std::setfill('0')
+            << std::setw(4) << static_cast<int>(*c);
+        result += oss.str();
+      } else {
+        result += *c;
+      }
+      break;
+    }
+  }
+  result += "\"";
+  return result;
+}
+
+// Class Writer
+// //////////////////////////////////////////////////////////////////
+Writer::~Writer() {}
+
+// Class FastWriter
+// //////////////////////////////////////////////////////////////////
+
+FastWriter::FastWriter()
+    : yamlCompatiblityEnabled_(false), dropNullPlaceholders_(false),
+      omitEndingLineFeed_(false) {}
+
+void FastWriter::enableYAMLCompatibility() { yamlCompatiblityEnabled_ = true; }
+
+void FastWriter::dropNullPlaceholders() { dropNullPlaceholders_ = true; }
+
+void FastWriter::omitEndingLineFeed() { omitEndingLineFeed_ = true; }
+
+std::string FastWriter::write(const Value& root) {
+  document_ = "";
+  writeValue(root);
+  if (!omitEndingLineFeed_)
+    document_ += "\n";
+  return document_;
+}
+
+void FastWriter::writeValue(const Value& value) {
+  switch (value.type()) {
+  case nullValue:
+    if (!dropNullPlaceholders_)
+      document_ += "null";
+    break;
+  case intValue:
+    document_ += valueToString(value.asLargestInt());
+    break;
+  case uintValue:
+    document_ += valueToString(value.asLargestUInt());
+    break;
+  case realValue:
+    document_ += valueToString(value.asDouble());
+    break;
+  case stringValue:
+  {
+    // Is NULL possible for value.string_?
+    char const* str;
+    char const* end;
+    bool ok = value.getString(&str, &end);
+    if (ok) document_ += valueToQuotedStringN(str, static_cast<unsigned>(end-str));
+    break;
+  }
+  case booleanValue:
+    document_ += valueToString(value.asBool());
+    break;
+  case arrayValue: {
+    document_ += '[';
+    int size = value.size();
+    for (int index = 0; index < size; ++index) {
+      if (index > 0)
+        document_ += ',';
+      writeValue(value[index]);
+    }
+    document_ += ']';
+  } break;
+  case objectValue: {
+    Value::Members members(value.getMemberNames());
+    document_ += '{';
+    for (Value::Members::iterator it = members.begin(); it != members.end();
+         ++it) {
+      const std::string& name = *it;
+      if (it != members.begin())
+        document_ += ',';
+      document_ += valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length()));
+      document_ += yamlCompatiblityEnabled_ ? ": " : ":";
+      writeValue(value[name]);
+    }
+    document_ += '}';
+  } break;
+  }
+}
+
+// Class StyledWriter
+// //////////////////////////////////////////////////////////////////
+
+StyledWriter::StyledWriter()
+    : rightMargin_(74), indentSize_(3), addChildValues_() {}
+
+std::string StyledWriter::write(const Value& root) {
+  document_ = "";
+  addChildValues_ = false;
+  indentString_ = "";
+  writeCommentBeforeValue(root);
+  writeValue(root);
+  writeCommentAfterValueOnSameLine(root);
+  document_ += "\n";
+  return document_;
+}
+
+void StyledWriter::writeValue(const Value& value) {
+  switch (value.type()) {
+  case nullValue:
+    pushValue("null");
+    break;
+  case intValue:
+    pushValue(valueToString(value.asLargestInt()));
+    break;
+  case uintValue:
+    pushValue(valueToString(value.asLargestUInt()));
+    break;
+  case realValue:
+    pushValue(valueToString(value.asDouble()));
+    break;
+  case stringValue:
+  {
+    // Is NULL possible for value.string_?
+    char const* str;
+    char const* end;
+    bool ok = value.getString(&str, &end);
+    if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
+    else pushValue("");
+    break;
+  }
+  case booleanValue:
+    pushValue(valueToString(value.asBool()));
+    break;
+  case arrayValue:
+    writeArrayValue(value);
+    break;
+  case objectValue: {
+    Value::Members members(value.getMemberNames());
+    if (members.empty())
+      pushValue("{}");
+    else {
+      writeWithIndent("{");
+      indent();
+      Value::Members::iterator it = members.begin();
+      for (;;) {
+        const std::string& name = *it;
+        const Value& childValue = value[name];
+        writeCommentBeforeValue(childValue);
+        writeWithIndent(valueToQuotedString(name.c_str()));
+        document_ += " : ";
+        writeValue(childValue);
+        if (++it == members.end()) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        document_ += ',';
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("}");
+    }
+  } break;
+  }
+}
+
+void StyledWriter::writeArrayValue(const Value& value) {
+  unsigned size = value.size();
+  if (size == 0)
+    pushValue("[]");
+  else {
+    bool isArrayMultiLine = isMultineArray(value);
+    if (isArrayMultiLine) {
+      writeWithIndent("[");
+      indent();
+      bool hasChildValue = !childValues_.empty();
+      unsigned index = 0;
+      for (;;) {
+        const Value& childValue = value[index];
+        writeCommentBeforeValue(childValue);
+        if (hasChildValue)
+          writeWithIndent(childValues_[index]);
+        else {
+          writeIndent();
+          writeValue(childValue);
+        }
+        if (++index == size) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        document_ += ',';
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("]");
+    } else // output on a single line
+    {
+      assert(childValues_.size() == size);
+      document_ += "[ ";
+      for (unsigned index = 0; index < size; ++index) {
+        if (index > 0)
+          document_ += ", ";
+        document_ += childValues_[index];
+      }
+      document_ += " ]";
+    }
+  }
+}
+
+bool StyledWriter::isMultineArray(const Value& value) {
+  int size = value.size();
+  bool isMultiLine = size * 3 >= rightMargin_;
+  childValues_.clear();
+  for (int index = 0; index < size && !isMultiLine; ++index) {
+    const Value& childValue = value[index];
+    isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
+                        childValue.size() > 0);
+  }
+  if (!isMultiLine) // check if line length > max line length
+  {
+    childValues_.reserve(size);
+    addChildValues_ = true;
+    int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+    for (int index = 0; index < size; ++index) {
+      if (hasCommentForValue(value[index])) {
+        isMultiLine = true;
+      }
+      writeValue(value[index]);
+      lineLength += int(childValues_[index].length());
+    }
+    addChildValues_ = false;
+    isMultiLine = isMultiLine || lineLength >= rightMargin_;
+  }
+  return isMultiLine;
+}
+
+void StyledWriter::pushValue(const std::string& value) {
+  if (addChildValues_)
+    childValues_.push_back(value);
+  else
+    document_ += value;
+}
+
+void StyledWriter::writeIndent() {
+  if (!document_.empty()) {
+    char last = document_[document_.length() - 1];
+    if (last == ' ') // already indented
+      return;
+    if (last != '\n') // Comments may add new-line
+      document_ += '\n';
+  }
+  document_ += indentString_;
+}
+
+void StyledWriter::writeWithIndent(const std::string& value) {
+  writeIndent();
+  document_ += value;
+}
+
+void StyledWriter::indent() { indentString_ += std::string(indentSize_, ' '); }
+
+void StyledWriter::unindent() {
+  assert(int(indentString_.size()) >= indentSize_);
+  indentString_.resize(indentString_.size() - indentSize_);
+}
+
+void StyledWriter::writeCommentBeforeValue(const Value& root) {
+  if (!root.hasComment(commentBefore))
+    return;
+
+  document_ += "\n";
+  writeIndent();
+  const std::string& comment = root.getComment(commentBefore);
+  std::string::const_iterator iter = comment.begin();
+  while (iter != comment.end()) {
+    document_ += *iter;
+    if (*iter == '\n' &&
+       (iter != comment.end() && *(iter + 1) == '/'))
+      writeIndent();
+    ++iter;
+  }
+
+  // Comments are stripped of trailing newlines, so add one here
+  document_ += "\n";
+}
+
+void StyledWriter::writeCommentAfterValueOnSameLine(const Value& root) {
+  if (root.hasComment(commentAfterOnSameLine))
+    document_ += " " + root.getComment(commentAfterOnSameLine);
+
+  if (root.hasComment(commentAfter)) {
+    document_ += "\n";
+    document_ += root.getComment(commentAfter);
+    document_ += "\n";
+  }
+}
+
+bool StyledWriter::hasCommentForValue(const Value& value) {
+  return value.hasComment(commentBefore) ||
+         value.hasComment(commentAfterOnSameLine) ||
+         value.hasComment(commentAfter);
+}
+
+// Class StyledStreamWriter
+// //////////////////////////////////////////////////////////////////
+
+StyledStreamWriter::StyledStreamWriter(std::string indentation)
+    : document_(NULL), rightMargin_(74), indentation_(indentation),
+      addChildValues_() {}
+
+void StyledStreamWriter::write(std::ostream& out, const Value& root) {
+  document_ = &out;
+  addChildValues_ = false;
+  indentString_ = "";
+  indented_ = true;
+  writeCommentBeforeValue(root);
+  if (!indented_) writeIndent();
+  indented_ = true;
+  writeValue(root);
+  writeCommentAfterValueOnSameLine(root);
+  *document_ << "\n";
+  document_ = NULL; // Forget the stream, for safety.
+}
+
+void StyledStreamWriter::writeValue(const Value& value) {
+  switch (value.type()) {
+  case nullValue:
+    pushValue("null");
+    break;
+  case intValue:
+    pushValue(valueToString(value.asLargestInt()));
+    break;
+  case uintValue:
+    pushValue(valueToString(value.asLargestUInt()));
+    break;
+  case realValue:
+    pushValue(valueToString(value.asDouble()));
+    break;
+  case stringValue:
+  {
+    // Is NULL possible for value.string_?
+    char const* str;
+    char const* end;
+    bool ok = value.getString(&str, &end);
+    if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
+    else pushValue("");
+    break;
+  }
+  case booleanValue:
+    pushValue(valueToString(value.asBool()));
+    break;
+  case arrayValue:
+    writeArrayValue(value);
+    break;
+  case objectValue: {
+    Value::Members members(value.getMemberNames());
+    if (members.empty())
+      pushValue("{}");
+    else {
+      writeWithIndent("{");
+      indent();
+      Value::Members::iterator it = members.begin();
+      for (;;) {
+        const std::string& name = *it;
+        const Value& childValue = value[name];
+        writeCommentBeforeValue(childValue);
+        writeWithIndent(valueToQuotedString(name.c_str()));
+        *document_ << " : ";
+        writeValue(childValue);
+        if (++it == members.end()) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        *document_ << ",";
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("}");
+    }
+  } break;
+  }
+}
+
+void StyledStreamWriter::writeArrayValue(const Value& value) {
+  unsigned size = value.size();
+  if (size == 0)
+    pushValue("[]");
+  else {
+    bool isArrayMultiLine = isMultineArray(value);
+    if (isArrayMultiLine) {
+      writeWithIndent("[");
+      indent();
+      bool hasChildValue = !childValues_.empty();
+      unsigned index = 0;
+      for (;;) {
+        const Value& childValue = value[index];
+        writeCommentBeforeValue(childValue);
+        if (hasChildValue)
+          writeWithIndent(childValues_[index]);
+        else {
+          if (!indented_) writeIndent();
+          indented_ = true;
+          writeValue(childValue);
+          indented_ = false;
+        }
+        if (++index == size) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        *document_ << ",";
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("]");
+    } else // output on a single line
+    {
+      assert(childValues_.size() == size);
+      *document_ << "[ ";
+      for (unsigned index = 0; index < size; ++index) {
+        if (index > 0)
+          *document_ << ", ";
+        *document_ << childValues_[index];
+      }
+      *document_ << " ]";
+    }
+  }
+}
+
+bool StyledStreamWriter::isMultineArray(const Value& value) {
+  int size = value.size();
+  bool isMultiLine = size * 3 >= rightMargin_;
+  childValues_.clear();
+  for (int index = 0; index < size && !isMultiLine; ++index) {
+    const Value& childValue = value[index];
+    isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
+                        childValue.size() > 0);
+  }
+  if (!isMultiLine) // check if line length > max line length
+  {
+    childValues_.reserve(size);
+    addChildValues_ = true;
+    int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+    for (int index = 0; index < size; ++index) {
+      if (hasCommentForValue(value[index])) {
+        isMultiLine = true;
+      }
+      writeValue(value[index]);
+      lineLength += int(childValues_[index].length());
+    }
+    addChildValues_ = false;
+    isMultiLine = isMultiLine || lineLength >= rightMargin_;
+  }
+  return isMultiLine;
+}
+
+void StyledStreamWriter::pushValue(const std::string& value) {
+  if (addChildValues_)
+    childValues_.push_back(value);
+  else
+    *document_ << value;
+}
+
+void StyledStreamWriter::writeIndent() {
+  // blep intended this to look at the so-far-written string
+  // to determine whether we are already indented, but
+  // with a stream we cannot do that. So we rely on some saved state.
+  // The caller checks indented_.
+  *document_ << '\n' << indentString_;
+}
+
+void StyledStreamWriter::writeWithIndent(const std::string& value) {
+  if (!indented_) writeIndent();
+  *document_ << value;
+  indented_ = false;
+}
+
+void StyledStreamWriter::indent() { indentString_ += indentation_; }
+
+void StyledStreamWriter::unindent() {
+  assert(indentString_.size() >= indentation_.size());
+  indentString_.resize(indentString_.size() - indentation_.size());
+}
+
+void StyledStreamWriter::writeCommentBeforeValue(const Value& root) {
+  if (!root.hasComment(commentBefore))
+    return;
+
+  if (!indented_) writeIndent();
+  const std::string& comment = root.getComment(commentBefore);
+  std::string::const_iterator iter = comment.begin();
+  while (iter != comment.end()) {
+    *document_ << *iter;
+    if (*iter == '\n' &&
+       (iter != comment.end() && *(iter + 1) == '/'))
+      // writeIndent();  // would include newline
+      *document_ << indentString_;
+    ++iter;
+  }
+  indented_ = false;
+}
+
+void StyledStreamWriter::writeCommentAfterValueOnSameLine(const Value& root) {
+  if (root.hasComment(commentAfterOnSameLine))
+    *document_ << ' ' << root.getComment(commentAfterOnSameLine);
+
+  if (root.hasComment(commentAfter)) {
+    writeIndent();
+    *document_ << root.getComment(commentAfter);
+  }
+  indented_ = false;
+}
+
+bool StyledStreamWriter::hasCommentForValue(const Value& value) {
+  return value.hasComment(commentBefore) ||
+         value.hasComment(commentAfterOnSameLine) ||
+         value.hasComment(commentAfter);
+}
+
+//////////////////////////
+// BuiltStyledStreamWriter
+
+/// Scoped enums are not available until C++11.
+struct CommentStyle {
+  /// Decide whether to write comments.
+  enum Enum {
+    None,  ///< Drop all comments.
+    Most,  ///< Recover odd behavior of previous versions (not implemented yet).
+    All  ///< Keep all comments.
+  };
+};
+
+struct BuiltStyledStreamWriter : public StreamWriter
+{
+  BuiltStyledStreamWriter(
+      std::string const& indentation,
+      CommentStyle::Enum cs,
+      std::string const& colonSymbol,
+      std::string const& nullSymbol,
+      std::string const& endingLineFeedSymbol,
+      bool useSpecialFloats,
+      unsigned int precision);
+  int write(Value const& root, std::ostream* sout) override;
+private:
+  void writeValue(Value const& value);
+  void writeArrayValue(Value const& value);
+  bool isMultineArray(Value const& value);
+  void pushValue(std::string const& value);
+  void writeIndent();
+  void writeWithIndent(std::string const& value);
+  void indent();
+  void unindent();
+  void writeCommentBeforeValue(Value const& root);
+  void writeCommentAfterValueOnSameLine(Value const& root);
+  static bool hasCommentForValue(const Value& value);
+
+  typedef std::vector<std::string> ChildValues;
+
+  ChildValues childValues_;
+  std::string indentString_;
+  int rightMargin_;
+  std::string indentation_;
+  CommentStyle::Enum cs_;
+  std::string colonSymbol_;
+  std::string nullSymbol_;
+  std::string endingLineFeedSymbol_;
+  bool addChildValues_ : 1;
+  bool indented_ : 1;
+  bool useSpecialFloats_ : 1;
+  unsigned int precision_;
+};
+BuiltStyledStreamWriter::BuiltStyledStreamWriter(
+      std::string const& indentation,
+      CommentStyle::Enum cs,
+      std::string const& colonSymbol,
+      std::string const& nullSymbol,
+      std::string const& endingLineFeedSymbol,
+      bool useSpecialFloats,
+      unsigned int precision)
+  : rightMargin_(74)
+  , indentation_(indentation)
+  , cs_(cs)
+  , colonSymbol_(colonSymbol)
+  , nullSymbol_(nullSymbol)
+  , endingLineFeedSymbol_(endingLineFeedSymbol)
+  , addChildValues_(false)
+  , indented_(false)
+  , useSpecialFloats_(useSpecialFloats)
+  , precision_(precision)
+{
+}
+int BuiltStyledStreamWriter::write(Value const& root, std::ostream* sout)
+{
+  sout_ = sout;
+  addChildValues_ = false;
+  indented_ = true;
+  indentString_ = "";
+  writeCommentBeforeValue(root);
+  if (!indented_) writeIndent();
+  indented_ = true;
+  writeValue(root);
+  writeCommentAfterValueOnSameLine(root);
+  *sout_ << endingLineFeedSymbol_;
+  sout_ = NULL;
+  return 0;
+}
+void BuiltStyledStreamWriter::writeValue(Value const& value) {
+  switch (value.type()) {
+  case nullValue:
+    pushValue(nullSymbol_);
+    break;
+  case intValue:
+    pushValue(valueToString(value.asLargestInt()));
+    break;
+  case uintValue:
+    pushValue(valueToString(value.asLargestUInt()));
+    break;
+  case realValue:
+    pushValue(valueToString(value.asDouble(), useSpecialFloats_, precision_));
+    break;
+  case stringValue:
+  {
+    // Is NULL is possible for value.string_?
+    char const* str;
+    char const* end;
+    bool ok = value.getString(&str, &end);
+    if (ok) pushValue(valueToQuotedStringN(str, static_cast<unsigned>(end-str)));
+    else pushValue("");
+    break;
+  }
+  case booleanValue:
+    pushValue(valueToString(value.asBool()));
+    break;
+  case arrayValue:
+    writeArrayValue(value);
+    break;
+  case objectValue: {
+    Value::Members members(value.getMemberNames());
+    if (members.empty())
+      pushValue("{}");
+    else {
+      writeWithIndent("{");
+      indent();
+      Value::Members::iterator it = members.begin();
+      for (;;) {
+        std::string const& name = *it;
+        Value const& childValue = value[name];
+        writeCommentBeforeValue(childValue);
+        writeWithIndent(valueToQuotedStringN(name.data(), static_cast<unsigned>(name.length())));
+        *sout_ << colonSymbol_;
+        writeValue(childValue);
+        if (++it == members.end()) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        *sout_ << ",";
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("}");
+    }
+  } break;
+  }
+}
+
+void BuiltStyledStreamWriter::writeArrayValue(Value const& value) {
+  unsigned size = value.size();
+  if (size == 0)
+    pushValue("[]");
+  else {
+    bool isMultiLine = (cs_ == CommentStyle::All) || isMultineArray(value);
+    if (isMultiLine) {
+      writeWithIndent("[");
+      indent();
+      bool hasChildValue = !childValues_.empty();
+      unsigned index = 0;
+      for (;;) {
+        Value const& childValue = value[index];
+        writeCommentBeforeValue(childValue);
+        if (hasChildValue)
+          writeWithIndent(childValues_[index]);
+        else {
+          if (!indented_) writeIndent();
+          indented_ = true;
+          writeValue(childValue);
+          indented_ = false;
+        }
+        if (++index == size) {
+          writeCommentAfterValueOnSameLine(childValue);
+          break;
+        }
+        *sout_ << ",";
+        writeCommentAfterValueOnSameLine(childValue);
+      }
+      unindent();
+      writeWithIndent("]");
+    } else // output on a single line
+    {
+      assert(childValues_.size() == size);
+      *sout_ << "[";
+      if (!indentation_.empty()) *sout_ << " ";
+      for (unsigned index = 0; index < size; ++index) {
+        if (index > 0)
+          *sout_ << ", ";
+        *sout_ << childValues_[index];
+      }
+      if (!indentation_.empty()) *sout_ << " ";
+      *sout_ << "]";
+    }
+  }
+}
+
+bool BuiltStyledStreamWriter::isMultineArray(Value const& value) {
+  int size = value.size();
+  bool isMultiLine = size * 3 >= rightMargin_;
+  childValues_.clear();
+  for (int index = 0; index < size && !isMultiLine; ++index) {
+    Value const& childValue = value[index];
+    isMultiLine = ((childValue.isArray() || childValue.isObject()) &&
+                        childValue.size() > 0);
+  }
+  if (!isMultiLine) // check if line length > max line length
+  {
+    childValues_.reserve(size);
+    addChildValues_ = true;
+    int lineLength = 4 + (size - 1) * 2; // '[ ' + ', '*n + ' ]'
+    for (int index = 0; index < size; ++index) {
+      if (hasCommentForValue(value[index])) {
+        isMultiLine = true;
+      }
+      writeValue(value[index]);
+      lineLength += int(childValues_[index].length());
+    }
+    addChildValues_ = false;
+    isMultiLine = isMultiLine || lineLength >= rightMargin_;
+  }
+  return isMultiLine;
+}
+
+void BuiltStyledStreamWriter::pushValue(std::string const& value) {
+  if (addChildValues_)
+    childValues_.push_back(value);
+  else
+    *sout_ << value;
+}
+
+void BuiltStyledStreamWriter::writeIndent() {
+  // blep intended this to look at the so-far-written string
+  // to determine whether we are already indented, but
+  // with a stream we cannot do that. So we rely on some saved state.
+  // The caller checks indented_.
+
+  if (!indentation_.empty()) {
+    // In this case, drop newlines too.
+    *sout_ << '\n' << indentString_;
+  }
+}
+
+void BuiltStyledStreamWriter::writeWithIndent(std::string const& value) {
+  if (!indented_) writeIndent();
+  *sout_ << value;
+  indented_ = false;
+}
+
+void BuiltStyledStreamWriter::indent() { indentString_ += indentation_; }
+
+void BuiltStyledStreamWriter::unindent() {
+  assert(indentString_.size() >= indentation_.size());
+  indentString_.resize(indentString_.size() - indentation_.size());
+}
+
+void BuiltStyledStreamWriter::writeCommentBeforeValue(Value const& root) {
+  if (cs_ == CommentStyle::None) return;
+  if (!root.hasComment(commentBefore))
+    return;
+
+  if (!indented_) writeIndent();
+  const std::string& comment = root.getComment(commentBefore);
+  std::string::const_iterator iter = comment.begin();
+  while (iter != comment.end()) {
+    *sout_ << *iter;
+    if (*iter == '\n' &&
+       (iter != comment.end() && *(iter + 1) == '/'))
+      // writeIndent();  // would write extra newline
+      *sout_ << indentString_;
+    ++iter;
+  }
+  indented_ = false;
+}
+
+void BuiltStyledStreamWriter::writeCommentAfterValueOnSameLine(Value const& root) {
+  if (cs_ == CommentStyle::None) return;
+  if (root.hasComment(commentAfterOnSameLine))
+    *sout_ << " " + root.getComment(commentAfterOnSameLine);
+
+  if (root.hasComment(commentAfter)) {
+    writeIndent();
+    *sout_ << root.getComment(commentAfter);
+  }
+}
+
+// static
+bool BuiltStyledStreamWriter::hasCommentForValue(const Value& value) {
+  return value.hasComment(commentBefore) ||
+         value.hasComment(commentAfterOnSameLine) ||
+         value.hasComment(commentAfter);
+}
+
+///////////////
+// StreamWriter
+
+StreamWriter::StreamWriter()
+    : sout_(NULL)
+{
+}
+StreamWriter::~StreamWriter()
+{
+}
+StreamWriter::Factory::~Factory()
+{}
+StreamWriterBuilder::StreamWriterBuilder()
+{
+  setDefaults(&settings_);
+}
+StreamWriterBuilder::~StreamWriterBuilder()
+{}
+StreamWriter* StreamWriterBuilder::newStreamWriter() const
+{
+  std::string indentation = settings_["indentation"].asString();
+  std::string cs_str = settings_["commentStyle"].asString();
+  bool eyc = settings_["enableYAMLCompatibility"].asBool();
+  bool dnp = settings_["dropNullPlaceholders"].asBool();
+  bool usf = settings_["useSpecialFloats"].asBool(); 
+  unsigned int pre = settings_["precision"].asUInt();
+  CommentStyle::Enum cs = CommentStyle::All;
+  if (cs_str == "All") {
+    cs = CommentStyle::All;
+  } else if (cs_str == "None") {
+    cs = CommentStyle::None;
+  } else {
+    throwRuntimeError("commentStyle must be 'All' or 'None'");
+  }
+  std::string colonSymbol = " : ";
+  if (eyc) {
+    colonSymbol = ": ";
+  } else if (indentation.empty()) {
+    colonSymbol = ":";
+  }
+  std::string nullSymbol = "null";
+  if (dnp) {
+    nullSymbol = "";
+  }
+  if (pre > 17) pre = 17;
+  std::string endingLineFeedSymbol = "";
+  return new BuiltStyledStreamWriter(
+      indentation, cs,
+      colonSymbol, nullSymbol, endingLineFeedSymbol, usf, pre);
+}
+static void getValidWriterKeys(std::set<std::string>* valid_keys)
+{
+  valid_keys->clear();
+  valid_keys->insert("indentation");
+  valid_keys->insert("commentStyle");
+  valid_keys->insert("enableYAMLCompatibility");
+  valid_keys->insert("dropNullPlaceholders");
+  valid_keys->insert("useSpecialFloats");
+  valid_keys->insert("precision");
+}
+bool StreamWriterBuilder::validate(Json::Value* invalid) const
+{
+  Json::Value my_invalid;
+  if (!invalid) invalid = &my_invalid;  // so we do not need to test for NULL
+  Json::Value& inv = *invalid;
+  std::set<std::string> valid_keys;
+  getValidWriterKeys(&valid_keys);
+  Value::Members keys = settings_.getMemberNames();
+  size_t n = keys.size();
+  for (size_t i = 0; i < n; ++i) {
+    std::string const& key = keys[i];
+    if (valid_keys.find(key) == valid_keys.end()) {
+      inv[key] = settings_[key];
+    }
+  }
+  return 0u == inv.size();
+}
+Value& StreamWriterBuilder::operator[](std::string key)
+{
+  return settings_[key];
+}
+// static
+void StreamWriterBuilder::setDefaults(Json::Value* settings)
+{
+  //! [StreamWriterBuilderDefaults]
+  (*settings)["commentStyle"] = "All";
+  (*settings)["indentation"] = "\t";
+  (*settings)["enableYAMLCompatibility"] = false;
+  (*settings)["dropNullPlaceholders"] = false;
+  (*settings)["useSpecialFloats"] = false;
+  (*settings)["precision"] = 17;
+  //! [StreamWriterBuilderDefaults]
+}
+
+std::string writeString(StreamWriter::Factory const& builder, Value const& root) {
+  std::ostringstream sout;
+  StreamWriterPtr const writer(builder.newStreamWriter());
+  writer->write(root, &sout);
+  return sout.str();
+}
+
+std::ostream& operator<<(std::ostream& sout, Value const& root) {
+  StreamWriterBuilder builder;
+  StreamWriterPtr const writer(builder.newStreamWriter());
+  writer->write(root, &sout);
+  return sout;
+}
+
+} // namespace Json
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: src/lib_json/json_writer.cpp
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+

+ 1 - 0
csharp/.gitignore

@@ -33,3 +33,4 @@ mono/.libs
 mono/*.exe
 mono/*.dll
 lib/protoc.exe
+*.ncrunch*

+ 1 - 1
csharp/CHANGES.txt

@@ -128,7 +128,7 @@ Fixes:
 - Issue 12:	default value for enumerate fields must be filled out
 
 Other:
-- Rewrite of build using MSBbuild instead of NAnt
+- Rewrite of build using MSbuild instead of NAnt
 - Moved to NUnit Version 2.2.8.0
 - Changed to using secure .snk for releases
 

+ 37 - 0
csharp/Google.Protobuf.Tools.nuspec

@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="utf-8"?>
+<package>
+  <metadata>
+    <id>Google.Protobuf.Tools</id>
+    <title>Google Protocol Buffers tools</title>
+    <summary>Tools for Protocol Buffers - Google's data interchange format.</summary>
+    <description>See project site for more info.</description>
+    <version>3.0.0-beta3</version>
+    <authors>Google Inc.</authors>
+    <owners>protobuf-packages</owners>
+    <licenseUrl>https://github.com/google/protobuf/blob/master/LICENSE</licenseUrl>
+    <projectUrl>https://github.com/google/protobuf</projectUrl>
+    <requireLicenseAcceptance>false</requireLicenseAcceptance>
+    <releaseNotes>Tools for Protocol Buffers</releaseNotes>
+    <copyright>Copyright 2015, Google Inc.</copyright>
+    <tags>Protocol Buffers Binary Serialization Format Google proto proto3</tags>
+  </metadata>
+  <files>
+    <file src="protoc\windows_x86\protoc.exe" target="tools\windows_x86\protoc.exe" />
+    <file src="protoc\windows_x64\protoc.exe" target="tools\windows_x64\protoc.exe" />
+    <file src="protoc\linux_x86\protoc" target="tools\linux_x86\protoc" />
+    <file src="protoc\linux_x64\protoc" target="tools\linux_x64\protoc" />
+    <file src="protoc\macosx_x86\protoc" target="tools\macosx_x86\protoc" />
+    <file src="protoc\macosx_x64\protoc" target="tools\macosx_x64\protoc" />
+    <file src="..\src\google\protobuf\any.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\api.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\descriptor.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\duration.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\empty.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\field_mask.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\source_context.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\struct.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\timestamp.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\type.proto" target="tools\google\protobuf" />
+    <file src="..\src\google\protobuf\wrappers.proto" target="tools\google\protobuf" />
+  </files>
+</package>

+ 32 - 16
csharp/README.md

@@ -1,18 +1,7 @@
 This directory contains the C# Protocol Buffers runtime library.
 
-Status: Alpha - ready for early adopters
-========================================
-
-This code is still under significant churn. Unlike the original port,
-it only supports proto3 (but not *all* of proto3 yet) - there are no
-unknown fields or extensions, for example. protoc will (eventually)
-deliberately fail if it is asked to generate C# code for proto2
-messages other than descriptor.proto, which is still required for
-reflection. (It's currently exposed publicly, but won't be
-eventually.)
-
-Also unlike the original port, the new version embraces mutability -
-there are no builder types.
+Status: Beta - ready for external testing
+=========================================
 
 Usage
 =====
@@ -36,14 +25,25 @@ The runtime library is built as a portable class library, supporting:
 - Windows 8
 - Windows Phone Silverlight 8
 - Windows Phone 8.1
-- .NET Core (dnxcore)
+- .NET Core
+
+You should be able to use Protocol Buffers in Visual Studio 2012 and
+all later versions. This includes all code generated by `protoc`,
+which only uses features from C# 3 and earlier.
 
 Building
 ========
 
-Open the `src/Google.Protobuf.sln` solution in Visual Studio. Click "Build solution" to build the solution. You should be able to run the NUnit test from Test Explorer (you might need to install NUnit Visual Studio add-in).
+Open the `src/Google.Protobuf.sln` solution in Visual Studio 2015 or
+later. You should be able to run the NUnit test from Test Explorer
+(you might need to install NUnit Visual Studio add-in).
 
-Supported Visual Studio versions are VS2013 (update 4) and VS2015. On Linux, you can also use Monodevelop 5.9 (older versions might work fine).
+Although *users* of this project are only expected to have Visual
+Studio 2012 or later, *developers* of the library are required to
+have Visual Studio 2015 or later, as the library uses C# 6 features
+in its implementation. These features have no impact when using the
+compiled code - they're only relevant when building the
+`Google.Protobuf` assembly.
 
 History of C# protobufs
 =======================
@@ -52,3 +52,19 @@ This subtree was originally imported from https://github.com/jskeet/protobuf-csh
 and represents the latest development version of C# protobufs, that will now be developed
 and maintained by Google. All the development will be done in open, under this repository
 (https://github.com/google/protobuf).
+
+The previous project differs from this project in a number of ways:
+
+- The old code only supported proto2; the new code only supports
+proto3 (so no unknown fields, no required/optional distinction, no
+extensions)
+- The old code was based on immutable message types and builders for
+them
+- The old code did not support maps or `oneof`
+- The old code had its own JSON representation, whereas the new code
+uses the standard protobuf JSON representation
+- The old code had no notion of the "well-known types" which have
+special support in the new code
+- The old project supported some older platforms (such as older
+versions of Silverlight) which are not currently supported in the
+new project

+ 2 - 12
csharp/generate_protos.sh

@@ -3,20 +3,10 @@
 # You first need to make sure protoc has been built (see instructions on
 # building protoc in root of this repository)
 
-# This script performs a few fix-ups as part of generation. These are:
-# - descriptor.proto is renamed to descriptor_proto_file.proto before
-#   generation, to avoid the naming collision between the class for the file
-#   descriptor and its Descriptor property
-# - This change also impacts UnittestCustomOptions, which expects to
-#   use a class of Descriptor when it's actually been renamed to
-#   DescriptorProtoFile.
-# - Issue 307 (codegen for double-nested types) breaks Unittest.proto and
-#   its lite equivalents.
-
 set -ex
 
 # cd to repository root
-cd $(dirname $0)/..
+pushd $(dirname $0)/..
 
 # Protocol buffer compiler to use. If the PROTOC variable is set,
 # use that. Otherwise, probe for expected locations under both
@@ -68,5 +58,5 @@ $PROTOC -Icsharp/protos --csharp_out=csharp/src/Google.Protobuf.Test \
 $PROTOC -Iexamples --csharp_out=csharp/src/AddressBook \
     examples/addressbook.proto
 
-$PROTOC -Iconformance --csharp_out=csharp/src/Google.Protobuf.Conformance \
+$PROTOC -Iconformance -Isrc --csharp_out=csharp/src/Google.Protobuf.Conformance \
     conformance/conformance.proto

BIN
csharp/keys/Google.Protobuf.snk


+ 5 - 1
csharp/keys/README.md

@@ -2,4 +2,8 @@ Contents
 --------
 
 - Google.Protobuf.public.snk:
-  Public key to verify strong name of Google.Protobuf assemblies.
+  Public key to verify strong name of Google.Protobuf assemblies.
+- Google.Protobuf.snk:
+  Signing key to provide strong name of Google.Protobuf assemblies.
+  As per [Microsoft guidance](https://msdn.microsoft.com/en-us/library/wd40t7ad(v=vs.110).aspx)
+  signing key should be checked into the repository.

+ 8 - 1
csharp/protos/unittest_issues.proto

@@ -116,4 +116,11 @@ message TestJsonFieldOrdering {
     string o2_string = 3;
   }
   
-}
+}
+
+message TestJsonName {
+  // Message for testing the effects for of the json_name option
+  string name = 1;
+  string description = 2 [json_name = "desc"];
+  string guid = 3 [json_name = "exid"];
+}

+ 3 - 3
csharp/src/AddressBook/AddPerson.cs

@@ -73,13 +73,13 @@ namespace Google.Protobuf.Examples.AddressBook
                 switch (type)
                 {
                     case "mobile":
-                        phoneNumber.Type = Person.Types.PhoneType.MOBILE;
+                        phoneNumber.Type = Person.Types.PhoneType.Mobile;
                         break;
                     case "home":
-                        phoneNumber.Type = Person.Types.PhoneType.HOME;
+                        phoneNumber.Type = Person.Types.PhoneType.Home;
                         break;
                     case "work":
-                        phoneNumber.Type = Person.Types.PhoneType.WORK;
+                        phoneNumber.Type = Person.Types.PhoneType.Work;
                         break;
                     default:
                         output.Write("Unknown phone type. Using default.");

+ 25 - 22
csharp/src/AddressBook/Addressbook.cs

@@ -11,7 +11,7 @@ namespace Google.Protobuf.Examples.AddressBook {
 
   /// <summary>Holder for reflection information generated from addressbook.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class Addressbook {
+  public static partial class AddressbookReflection {
 
     #region Descriptor
     /// <summary>File descriptor for addressbook.proto</summary>
@@ -20,7 +20,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static Addressbook() {
+    static AddressbookReflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "ChFhZGRyZXNzYm9vay5wcm90bxIIdHV0b3JpYWwi1QEKBlBlcnNvbhIMCgRu",
@@ -32,24 +32,27 @@ namespace Google.Protobuf.Examples.AddressBook {
             "GAEgAygLMhAudHV0b3JpYWwuUGVyc29uQlAKFGNvbS5leGFtcGxlLnR1dG9y",
             "aWFsQhFBZGRyZXNzQm9va1Byb3Rvc6oCJEdvb2dsZS5Qcm90b2J1Zi5FeGFt",
             "cGxlcy5BZGRyZXNzQm9va2IGcHJvdG8z"));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
           new pbr::FileDescriptor[] { },
-          new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person), new[]{ "Name", "Id", "Email", "Phones" }, null, new[]{ typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType) }, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber), new[]{ "Number", "Type" }, null, null, null)}),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.AddressBook), new[]{ "People" }, null, null, null)
+          new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person), global::Google.Protobuf.Examples.AddressBook.Person.Parser, new[]{ "Name", "Id", "Email", "Phones" }, null, new[]{ typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType) }, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber), global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneNumber.Parser, new[]{ "Number", "Type" }, null, null, null)}),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.Examples.AddressBook.AddressBook), global::Google.Protobuf.Examples.AddressBook.AddressBook.Parser, new[]{ "People" }, null, null, null)
           }));
     }
     #endregion
 
   }
   #region Messages
+  /// <summary>
+  ///  [START messages]
+  /// </summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
   public sealed partial class Person : pb::IMessage<Person> {
     private static readonly pb::MessageParser<Person> _parser = new pb::MessageParser<Person>(() => new Person());
     public static pb::MessageParser<Person> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.Examples.AddressBook.Addressbook.Descriptor.MessageTypes[0]; }
+      get { return global::Google.Protobuf.Examples.AddressBook.AddressbookReflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -79,7 +82,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     public string Name {
       get { return name_; }
       set {
-        name_ = pb::Preconditions.CheckNotNull(value, "value");
+        name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
       }
     }
 
@@ -102,7 +105,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     public string Email {
       get { return email_; }
       set {
-        email_ = pb::Preconditions.CheckNotNull(value, "value");
+        email_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
       }
     }
 
@@ -143,7 +146,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -225,9 +228,9 @@ namespace Google.Protobuf.Examples.AddressBook {
     [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
     public static partial class Types {
       public enum PhoneType {
-        MOBILE = 0,
-        HOME = 1,
-        WORK = 2,
+        [pbr::OriginalName("MOBILE")] Mobile = 0,
+        [pbr::OriginalName("HOME")] Home = 1,
+        [pbr::OriginalName("WORK")] Work = 2,
       }
 
       [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
@@ -264,13 +267,13 @@ namespace Google.Protobuf.Examples.AddressBook {
         public string Number {
           get { return number_; }
           set {
-            number_ = pb::Preconditions.CheckNotNull(value, "value");
+            number_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
           }
         }
 
         /// <summary>Field number for the "type" field.</summary>
         public const int TypeFieldNumber = 2;
-        private global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType type_ = global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType.MOBILE;
+        private global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType type_ = 0;
         public global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType Type {
           get { return type_; }
           set {
@@ -297,12 +300,12 @@ namespace Google.Protobuf.Examples.AddressBook {
         public override int GetHashCode() {
           int hash = 1;
           if (Number.Length != 0) hash ^= Number.GetHashCode();
-          if (Type != global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType.MOBILE) hash ^= Type.GetHashCode();
+          if (Type != 0) hash ^= Type.GetHashCode();
           return hash;
         }
 
         public override string ToString() {
-          return pb::JsonFormatter.Default.Format(this);
+          return pb::JsonFormatter.ToDiagnosticString(this);
         }
 
         public void WriteTo(pb::CodedOutputStream output) {
@@ -310,7 +313,7 @@ namespace Google.Protobuf.Examples.AddressBook {
             output.WriteRawTag(10);
             output.WriteString(Number);
           }
-          if (Type != global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType.MOBILE) {
+          if (Type != 0) {
             output.WriteRawTag(16);
             output.WriteEnum((int) Type);
           }
@@ -321,7 +324,7 @@ namespace Google.Protobuf.Examples.AddressBook {
           if (Number.Length != 0) {
             size += 1 + pb::CodedOutputStream.ComputeStringSize(Number);
           }
-          if (Type != global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType.MOBILE) {
+          if (Type != 0) {
             size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Type);
           }
           return size;
@@ -334,7 +337,7 @@ namespace Google.Protobuf.Examples.AddressBook {
           if (other.Number.Length != 0) {
             Number = other.Number;
           }
-          if (other.Type != global::Google.Protobuf.Examples.AddressBook.Person.Types.PhoneType.MOBILE) {
+          if (other.Type != 0) {
             Type = other.Type;
           }
         }
@@ -374,7 +377,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     public static pb::MessageParser<AddressBook> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.Examples.AddressBook.Addressbook.Descriptor.MessageTypes[1]; }
+      get { return global::Google.Protobuf.Examples.AddressBook.AddressbookReflection.Descriptor.MessageTypes[1]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -426,7 +429,7 @@ namespace Google.Protobuf.Examples.AddressBook {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {

+ 3 - 3
csharp/src/AddressBook/ListPeople.cs

@@ -55,13 +55,13 @@ namespace Google.Protobuf.Examples.AddressBook
                 {
                     switch (phoneNumber.Type)
                     {
-                        case Person.Types.PhoneType.MOBILE:
+                        case Person.Types.PhoneType.Mobile:
                             Console.Write("  Mobile phone #: ");
                             break;
-                        case Person.Types.PhoneType.HOME:
+                        case Person.Types.PhoneType.Home:
                             Console.Write("  Home phone #: ");
                             break;
-                        case Person.Types.PhoneType.WORK:
+                        case Person.Types.PhoneType.Work:
                             Console.Write("  Work phone #: ");
                             break;
                     }

+ 5 - 2
csharp/src/AddressBook/SampleUsage.cs

@@ -56,8 +56,11 @@ namespace Google.Protobuf.Examples.AddressBook
             }
             Person copy = Person.Parser.ParseFrom(bytes);
 
-            // A more streamlined approach might look like this:
-            bytes = copy.ToByteArray();
+            AddressBook book = new AddressBook
+            {
+                People = { copy }
+            };
+            bytes = book.ToByteArray();
             // And read the address book back again
             AddressBook restored = AddressBook.Parser.ParseFrom(bytes);
             // The message performs a deep-comparison on equality:

Файловите разлики са ограничени, защото са твърде много
+ 168 - 119
csharp/src/Google.Protobuf.Conformance/Conformance.cs


+ 41 - 25
csharp/src/Google.Protobuf.Conformance/Program.cs

@@ -31,6 +31,7 @@
 #endregion
 
 using Conformance;
+using Google.Protobuf.Reflection;
 using System;
 using System.IO;
 
@@ -47,16 +48,17 @@ namespace Google.Protobuf.Conformance
             // This way we get the binary streams instead of readers/writers.
             var input = new BinaryReader(Console.OpenStandardInput());
             var output = new BinaryWriter(Console.OpenStandardOutput());
+            var typeRegistry = TypeRegistry.FromMessages(TestAllTypes.Descriptor);
 
             int count = 0;
-            while (RunTest(input, output))
+            while (RunTest(input, output, typeRegistry))
             {
                 count++;
             }
             Console.Error.WriteLine("Received EOF after {0} tests", count);
         }
 
-        private static bool RunTest(BinaryReader input, BinaryWriter output)
+        private static bool RunTest(BinaryReader input, BinaryWriter output, TypeRegistry typeRegistry)
         {
             int? size = ReadInt32(input);
             if (size == null)
@@ -69,7 +71,7 @@ namespace Google.Protobuf.Conformance
                 throw new EndOfStreamException("Read " + inputData.Length + " bytes of data when expecting " + size);
             }
             ConformanceRequest request = ConformanceRequest.Parser.ParseFrom(inputData);
-            ConformanceResponse response = PerformRequest(request);
+            ConformanceResponse response = PerformRequest(request, typeRegistry);
             byte[] outputData = response.ToByteArray();
             output.Write(outputData.Length);
             output.Write(outputData);
@@ -77,34 +79,48 @@ namespace Google.Protobuf.Conformance
             return true;
         }
 
-        private static ConformanceResponse PerformRequest(ConformanceRequest request)
+        private static ConformanceResponse PerformRequest(ConformanceRequest request, TypeRegistry typeRegistry)
         {
             TestAllTypes message;
-            switch (request.PayloadCase)
+            try
             {
-                case ConformanceRequest.PayloadOneofCase.JsonPayload:
-                    return new ConformanceResponse { Skipped = "JSON parsing not implemented in C# yet" };
-                case ConformanceRequest.PayloadOneofCase.ProtobufPayload:
-                    try
-                    {
+                switch (request.PayloadCase)
+                {
+                    case ConformanceRequest.PayloadOneofCase.JsonPayload:
+                        var parser = new JsonParser(new JsonParser.Settings(20, typeRegistry));
+                        message = parser.Parse<TestAllTypes>(request.JsonPayload);
+                        break;
+                    case ConformanceRequest.PayloadOneofCase.ProtobufPayload:
                         message = TestAllTypes.Parser.ParseFrom(request.ProtobufPayload);
-                    }
-                    catch (InvalidProtocolBufferException e)
-                    {
-                        return new ConformanceResponse { ParseError = e.Message };
-                    }
-                    break;
-                default:
-                    throw new Exception("Unsupported request payload: " + request.PayloadCase);
+                        break;
+                    default:
+                        throw new Exception("Unsupported request payload: " + request.PayloadCase);
+                }
             }
-            switch (request.RequestedOutputFormat)
+            catch (InvalidProtocolBufferException e)
             {
-                case global::Conformance.WireFormat.JSON:
-                    return new ConformanceResponse { JsonPayload = JsonFormatter.Default.Format(message) };
-                case global::Conformance.WireFormat.PROTOBUF:
-                    return new ConformanceResponse { ProtobufPayload = message.ToByteString() };
-                default:
-                    throw new Exception("Unsupported request output format: " + request.PayloadCase);
+                return new ConformanceResponse { ParseError = e.Message };
+            }
+            catch (InvalidJsonException e)
+            {
+                return new ConformanceResponse { ParseError = e.Message };
+            }
+            try
+            {
+                switch (request.RequestedOutputFormat)
+                {
+                    case global::Conformance.WireFormat.Json:
+                        var formatter = new JsonFormatter(new JsonFormatter.Settings(false, typeRegistry));
+                        return new ConformanceResponse { JsonPayload = formatter.Format(message) };
+                    case global::Conformance.WireFormat.Protobuf:
+                        return new ConformanceResponse { ProtobufPayload = message.ToByteString() };
+                    default:
+                        throw new Exception("Unsupported request output format: " + request.PayloadCase);
+                }
+            }
+            catch (InvalidOperationException e)
+            {
+                return new ConformanceResponse { SerializeError = e.Message };
             }
         }
 

+ 72 - 4
csharp/src/Google.Protobuf.Test/CodedInputStreamTest.cs

@@ -284,7 +284,7 @@ namespace Google.Protobuf
             Assert.Throws<InvalidProtocolBufferException>(() => input.ReadBytes());
         }
 
-        private static TestRecursiveMessage MakeRecursiveMessage(int depth)
+        internal static TestRecursiveMessage MakeRecursiveMessage(int depth)
         {
             if (depth == 0)
             {
@@ -296,7 +296,7 @@ namespace Google.Protobuf
             }
         }
 
-        private static void AssertMessageDepth(TestRecursiveMessage message, int depth)
+        internal static void AssertMessageDepth(TestRecursiveMessage message, int depth)
         {
             if (depth == 0)
             {
@@ -469,6 +469,52 @@ namespace Google.Protobuf
             Assert.AreEqual("field 3", input.ReadString());
         }
 
+        [Test]
+        public void SkipGroup_WrongEndGroupTag()
+        {
+            // Create an output stream with:
+            // Field 1: string "field 1"
+            // Start group 2
+            //   Field 3: fixed int32
+            // End group 4 (should give an error)
+            var stream = new MemoryStream();
+            var output = new CodedOutputStream(stream);
+            output.WriteTag(1, WireFormat.WireType.LengthDelimited);
+            output.WriteString("field 1");
+
+            // The outer group...
+            output.WriteTag(2, WireFormat.WireType.StartGroup);
+            output.WriteTag(3, WireFormat.WireType.Fixed32);
+            output.WriteFixed32(100);
+            output.WriteTag(4, WireFormat.WireType.EndGroup);
+            output.Flush();
+            stream.Position = 0;
+
+            // Now act like a generated client
+            var input = new CodedInputStream(stream);
+            Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.LengthDelimited), input.ReadTag());
+            Assert.AreEqual("field 1", input.ReadString());
+            Assert.AreEqual(WireFormat.MakeTag(2, WireFormat.WireType.StartGroup), input.ReadTag());
+            Assert.Throws<InvalidProtocolBufferException>(input.SkipLastField);
+        }
+
+        [Test]
+        public void RogueEndGroupTag()
+        {
+            // If we have an end-group tag without a leading start-group tag, generated
+            // code will just call SkipLastField... so that should fail.
+
+            var stream = new MemoryStream();
+            var output = new CodedOutputStream(stream);
+            output.WriteTag(1, WireFormat.WireType.EndGroup);
+            output.Flush();
+            stream.Position = 0;
+
+            var input = new CodedInputStream(stream);
+            Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.EndGroup), input.ReadTag());
+            Assert.Throws<InvalidProtocolBufferException>(input.SkipLastField);
+        }
+
         [Test]
         public void EndOfStreamReachedWhileSkippingGroup()
         {
@@ -484,7 +530,7 @@ namespace Google.Protobuf
             // Now act like a generated client
             var input = new CodedInputStream(stream);
             input.ReadTag();
-            Assert.Throws<InvalidProtocolBufferException>(() => input.SkipLastField());
+            Assert.Throws<InvalidProtocolBufferException>(input.SkipLastField);
         }
 
         [Test]
@@ -506,7 +552,7 @@ namespace Google.Protobuf
             // Now act like a generated client
             var input = new CodedInputStream(stream);
             Assert.AreEqual(WireFormat.MakeTag(1, WireFormat.WireType.StartGroup), input.ReadTag());
-            Assert.Throws<InvalidProtocolBufferException>(() => input.SkipLastField());
+            Assert.Throws<InvalidProtocolBufferException>(input.SkipLastField);
         }
 
         [Test]
@@ -526,5 +572,27 @@ namespace Google.Protobuf
             Assert.Throws<ArgumentOutOfRangeException>(() => CodedInputStream.CreateWithLimits(stream, 0, 1));
             Assert.Throws<ArgumentOutOfRangeException>(() => CodedInputStream.CreateWithLimits(stream, 1, 0));
         }
+
+        [Test]
+        public void Dispose_DisposesUnderlyingStream()
+        {
+            var memoryStream = new MemoryStream();
+            Assert.IsTrue(memoryStream.CanRead);
+            using (var cis = new CodedInputStream(memoryStream))
+            {
+            }
+            Assert.IsFalse(memoryStream.CanRead); // Disposed
+        }
+
+        [Test]
+        public void Dispose_WithLeaveOpen()
+        {
+            var memoryStream = new MemoryStream();
+            Assert.IsTrue(memoryStream.CanRead);
+            using (var cis = new CodedInputStream(memoryStream, true))
+            {
+            }
+            Assert.IsTrue(memoryStream.CanRead); // We left the stream open
+        }
     }
 }

+ 28 - 0
csharp/src/Google.Protobuf.Test/CodedOutputStreamTest.cs

@@ -387,5 +387,33 @@ namespace Google.Protobuf
                 Assert.IsTrue(cin.IsAtEnd);
             }
         }
+
+        [Test]
+        public void Dispose_DisposesUnderlyingStream()
+        {
+            var memoryStream = new MemoryStream();
+            Assert.IsTrue(memoryStream.CanWrite);
+            using (var cos = new CodedOutputStream(memoryStream))
+            {
+                cos.WriteRawByte(0);
+                Assert.AreEqual(0, memoryStream.Position); // Not flushed yet
+            }
+            Assert.AreEqual(1, memoryStream.ToArray().Length); // Flushed data from CodedOutputStream to MemoryStream
+            Assert.IsFalse(memoryStream.CanWrite); // Disposed
+        }
+
+        [Test]
+        public void Dispose_WithLeaveOpen()
+        {
+            var memoryStream = new MemoryStream();
+            Assert.IsTrue(memoryStream.CanWrite);
+            using (var cos = new CodedOutputStream(memoryStream, true))
+            {
+                cos.WriteRawByte(0);
+                Assert.AreEqual(0, memoryStream.Position); // Not flushed yet
+            }
+            Assert.AreEqual(1, memoryStream.Position); // Flushed data from CodedOutputStream to MemoryStream
+            Assert.IsTrue(memoryStream.CanWrite); // We left the stream open
+        }
     }
 }

+ 2 - 54
csharp/src/Google.Protobuf.Test/Collections/MapFieldTest.cs

@@ -56,7 +56,7 @@ namespace Google.Protobuf.Collections
         }
 
         [Test]
-        public void NullValues()
+        public void NullValuesProhibited()
         {
             TestNullValues<int?>(0);
             TestNullValues("");
@@ -65,19 +65,12 @@ namespace Google.Protobuf.Collections
 
         private void TestNullValues<T>(T nonNullValue)
         {
-            var map = new MapField<int, T>(false);
+            var map = new MapField<int, T>();
             var nullValue = (T) (object) null;
             Assert.Throws<ArgumentNullException>(() => map.Add(0, nullValue));
             Assert.Throws<ArgumentNullException>(() => map[0] = nullValue);
             map.Add(1, nonNullValue);
             map[1] = nonNullValue;
-
-            // Doesn't throw...
-            map = new MapField<int, T>(true);
-            map.Add(0, nullValue);
-            map[0] = nullValue;
-            map.Add(1, nonNullValue);
-            map[1] = nonNullValue;
         }
 
         [Test]
@@ -160,27 +153,6 @@ namespace Google.Protobuf.Collections
             EqualityTester.AssertInequality(map1, map2);
         }
 
-        [Test]
-        public void EqualityHandlesNullValues()
-        {
-            var map1 = new MapField<string, ForeignMessage>();
-            map1.Add("a", new ForeignMessage { C = 10 });
-            map1.Add("b", null);
-
-            var map2 = new MapField<string, ForeignMessage>();
-            map2.Add("a", new ForeignMessage { C = 10 });
-            map2.Add("b", null);
-
-            EqualityTester.AssertEquality(map1, map2);
-            // Check the null value isn't ignored entirely...
-            Assert.IsTrue(map1.Remove("b"));
-            EqualityTester.AssertInequality(map1, map2);
-            map1.Add("b", new ForeignMessage());
-            EqualityTester.AssertInequality(map1, map2);
-            map1["b"] = null;
-            EqualityTester.AssertEquality(map1, map2);
-        }
-
         [Test]
         public void Add_Dictionary()
         {
@@ -453,30 +425,6 @@ namespace Google.Protobuf.Collections
             Assert.Throws<ArgumentNullException>(() => dictionary["x"] = null);
         }
 
-        [Test]
-        public void AllowNullValues_Property()
-        {
-            // Non-message reference type values are non-nullable by default, but can be overridden
-            Assert.IsFalse(new MapField<int, string>().AllowsNullValues);
-            Assert.IsFalse(new MapField<int, string>(false).AllowsNullValues);
-            Assert.IsTrue(new MapField<int, string>(true).AllowsNullValues);
-
-            // Non-nullable value type values are never nullable
-            Assert.IsFalse(new MapField<int, int>().AllowsNullValues);
-            Assert.IsFalse(new MapField<int, int>(false).AllowsNullValues);
-            Assert.Throws<ArgumentException>(() => new MapField<int, int>(true));
-
-            // Message type values are nullable by default, but can be overridden
-            Assert.IsTrue(new MapField<int, TestAllTypes>().AllowsNullValues);
-            Assert.IsFalse(new MapField<int, TestAllTypes>(false).AllowsNullValues);
-            Assert.IsTrue(new MapField<int, TestAllTypes>(true).AllowsNullValues);
-
-            // Nullable value type values are nullable by default, but can be overridden
-            Assert.IsTrue(new MapField<int, int?>().AllowsNullValues);
-            Assert.IsFalse(new MapField<int, int?>(false).AllowsNullValues);
-            Assert.IsTrue(new MapField<int, int?>(true).AllowsNullValues);
-        }
-
         [Test]
         public void KeysReturnsLiveView()
         {

+ 1 - 1
csharp/src/Google.Protobuf.Test/FieldCodecTest.cs

@@ -58,7 +58,7 @@ namespace Google.Protobuf
             new FieldCodecTestData<float>(FieldCodec.ForFloat(100), 1234.5f, "Float"),
             new FieldCodecTestData<double>(FieldCodec.ForDouble(100), 1234567890.5d, "Double"),
             new FieldCodecTestData<ForeignEnum>(
-                FieldCodec.ForEnum(100, t => (int) t, t => (ForeignEnum) t), ForeignEnum.FOREIGN_BAZ, "Enum"),
+                FieldCodec.ForEnum(100, t => (int) t, t => (ForeignEnum) t), ForeignEnum.ForeignBaz, "Enum"),
             new FieldCodecTestData<ForeignMessage>(
                 FieldCodec.ForMessage(100, ForeignMessage.Parser), new ForeignMessage { C = 10 }, "Message"),
         };

+ 89 - 21
csharp/src/Google.Protobuf.Test/GeneratedMessageTest.cs

@@ -66,13 +66,13 @@ namespace Google.Protobuf
             Assert.AreEqual(0, message.SingleFixed32);
             Assert.AreEqual(0L, message.SingleFixed64);
             Assert.AreEqual(0.0f, message.SingleFloat);
-            Assert.AreEqual(ForeignEnum.FOREIGN_UNSPECIFIED, message.SingleForeignEnum);
+            Assert.AreEqual(ForeignEnum.ForeignUnspecified, message.SingleForeignEnum);
             Assert.IsNull(message.SingleForeignMessage);
-            Assert.AreEqual(ImportEnum.IMPORT_ENUM_UNSPECIFIED, message.SingleImportEnum);
+            Assert.AreEqual(ImportEnum.Unspecified, message.SingleImportEnum);
             Assert.IsNull(message.SingleImportMessage);
             Assert.AreEqual(0, message.SingleInt32);
             Assert.AreEqual(0L, message.SingleInt64);
-            Assert.AreEqual(TestAllTypes.Types.NestedEnum.NESTED_ENUM_UNSPECIFIED, message.SingleNestedEnum);
+            Assert.AreEqual(TestAllTypes.Types.NestedEnum.Unspecified, message.SingleNestedEnum);
             Assert.IsNull(message.SingleNestedMessage);
             Assert.IsNull(message.SinglePublicImportMessage);
             Assert.AreEqual(0, message.SingleSfixed32);
@@ -145,13 +145,13 @@ namespace Google.Protobuf
                 SingleFixed32 = 23,
                 SingleFixed64 = 1234567890123,
                 SingleFloat = 12.25f,
-                SingleForeignEnum = ForeignEnum.FOREIGN_BAR,
+                SingleForeignEnum = ForeignEnum.ForeignBar,
                 SingleForeignMessage = new ForeignMessage { C = 10 },
-                SingleImportEnum = ImportEnum.IMPORT_BAZ,
+                SingleImportEnum = ImportEnum.ImportBaz,
                 SingleImportMessage = new ImportMessage { D = 20 },
                 SingleInt32 = 100,
                 SingleInt64 = 3210987654321,
-                SingleNestedEnum = TestAllTypes.Types.NestedEnum.FOO,
+                SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo,
                 SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 },
                 SinglePublicImportMessage = new PublicImportMessage { E = 54 },
                 SingleSfixed32 = -123,
@@ -179,13 +179,13 @@ namespace Google.Protobuf
                 RepeatedFixed32 = { uint.MaxValue, 23 },
                 RepeatedFixed64 = { ulong.MaxValue, 1234567890123 },
                 RepeatedFloat = { 100f, 12.25f },
-                RepeatedForeignEnum = { ForeignEnum.FOREIGN_FOO, ForeignEnum.FOREIGN_BAR },
+                RepeatedForeignEnum = { ForeignEnum.ForeignFoo, ForeignEnum.ForeignBar },
                 RepeatedForeignMessage = { new ForeignMessage(), new ForeignMessage { C = 10 } },
-                RepeatedImportEnum = { ImportEnum.IMPORT_BAZ, ImportEnum.IMPORT_ENUM_UNSPECIFIED },
+                RepeatedImportEnum = { ImportEnum.ImportBaz, ImportEnum.Unspecified },
                 RepeatedImportMessage = { new ImportMessage { D = 20 }, new ImportMessage { D = 25 } },
                 RepeatedInt32 = { 100, 200 },
                 RepeatedInt64 = { 3210987654321, long.MaxValue },
-                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.FOO, TestAllTypes.Types.NestedEnum.NEG },
+                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg },
                 RepeatedNestedMessage = { new TestAllTypes.Types.NestedMessage { Bb = 35 }, new TestAllTypes.Types.NestedMessage { Bb = 10 } },
                 RepeatedPublicImportMessage = { new PublicImportMessage { E = 54 }, new PublicImportMessage { E = -1 } },
                 RepeatedSfixed32 = { -123, 123 },
@@ -221,11 +221,11 @@ namespace Google.Protobuf
                 },
                 MapInt32ForeignMessage = {
                     { 0, new ForeignMessage { C = 10 } },
-                    { 5, null },
+                    { 5, new ForeignMessage() },
                 },
                 MapInt32Enum = {
-                    { 1, MapEnum.MAP_ENUM_BAR },
-                    { 2000, MapEnum.MAP_ENUM_FOO }
+                    { 1, MapEnum.Bar },
+                    { 2000, MapEnum.Foo }
                 }
             };
 
@@ -249,7 +249,7 @@ namespace Google.Protobuf
             Assert.AreEqual(1, parsed.MapInt32Bytes.Count);
             Assert.AreEqual(ByteString.Empty, parsed.MapInt32Bytes[0]);
         }
-        
+
         [Test]
         public void MapWithOnlyValue()
         {
@@ -268,6 +268,40 @@ namespace Google.Protobuf
             Assert.AreEqual(nestedMessage, parsed.MapInt32ForeignMessage[0]);
         }
 
+        [Test]
+        public void MapWithOnlyKey_PrimitiveValue()
+        {
+            // Hand-craft the stream to contain a single entry with just a key.
+            var memoryStream = new MemoryStream();
+            var output = new CodedOutputStream(memoryStream);
+            output.WriteTag(TestMap.MapInt32DoubleFieldNumber, WireFormat.WireType.LengthDelimited);
+            int key = 10;
+            output.WriteLength(1 + CodedOutputStream.ComputeInt32Size(key));
+            output.WriteTag(1, WireFormat.WireType.Varint);
+            output.WriteInt32(key);
+            output.Flush();
+
+            var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray());
+            Assert.AreEqual(0.0, parsed.MapInt32Double[key]);
+        }
+
+        [Test]
+        public void MapWithOnlyKey_MessageValue()
+        {
+            // Hand-craft the stream to contain a single entry with just a key.
+            var memoryStream = new MemoryStream();
+            var output = new CodedOutputStream(memoryStream);
+            output.WriteTag(TestMap.MapInt32ForeignMessageFieldNumber, WireFormat.WireType.LengthDelimited);
+            int key = 10;
+            output.WriteLength(1 + CodedOutputStream.ComputeInt32Size(key));
+            output.WriteTag(1, WireFormat.WireType.Varint);
+            output.WriteInt32(key);
+            output.Flush();
+
+            var parsed = TestMap.Parser.ParseFrom(memoryStream.ToArray());
+            Assert.AreEqual(new ForeignMessage(), parsed.MapInt32ForeignMessage[key]);
+        }
+
         [Test]
         public void MapIgnoresExtraFieldsWithinEntryMessages()
         {
@@ -415,7 +449,7 @@ namespace Google.Protobuf
                 SingleFloat = 12.25f,
                 SingleInt32 = 100,
                 SingleInt64 = 3210987654321,
-                SingleNestedEnum = TestAllTypes.Types.NestedEnum.FOO,
+                SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo,
                 SingleSfixed32 = -123,
                 SingleSfixed64 = -12345678901234,
                 SingleSint32 = -456,
@@ -445,7 +479,7 @@ namespace Google.Protobuf
                 RepeatedFloat = { 100f, 12.25f },
                 RepeatedInt32 = { 100, 200 },
                 RepeatedInt64 = { 3210987654321, long.MaxValue },
-                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.FOO, TestAllTypes.Types.NestedEnum.NEG },
+                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg },
                 RepeatedSfixed32 = { -123, 123 },
                 RepeatedSfixed64 = { -12345678901234, 12345678901234 },
                 RepeatedSint32 = { -456, 100 },
@@ -565,6 +599,16 @@ namespace Google.Protobuf
             Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.None, message.OneofFieldCase);
         }
 
+        [Test]
+        public void Oneof_DefaultValuesNotEqual()
+        {
+            var message1 = new TestAllTypes { OneofString = "" };
+            var message2 = new TestAllTypes { OneofUint32 = 0 };
+            Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofString, message1.OneofFieldCase);
+            Assert.AreEqual(TestAllTypes.OneofFieldOneofCase.OneofUint32, message2.OneofFieldCase);
+            Assert.AreNotEqual(message1, message2);
+        }
+
         [Test]
         public void OneofSerialization_NonDefaultValue()
         {
@@ -626,7 +670,7 @@ namespace Google.Protobuf
         {
             // 130, 3 is the message tag
             // 1 is the data length - but there's no data.
-            var data = new byte[] { 130, 3, 1 };            
+            var data = new byte[] { 130, 3, 1 };
             Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseFrom(data));
         }
 
@@ -635,21 +679,45 @@ namespace Google.Protobuf
         /// for details; we may want to change this.
         /// </summary>
         [Test]
-        public void ExtraEndGroupSkipped()
+        public void ExtraEndGroupThrows()
         {
             var message = SampleMessages.CreateFullTestAllTypes();
             var stream = new MemoryStream();
             var output = new CodedOutputStream(stream);
 
-            output.WriteTag(100, WireFormat.WireType.EndGroup);
             output.WriteTag(TestAllTypes.SingleFixed32FieldNumber, WireFormat.WireType.Fixed32);
             output.WriteFixed32(123);
+            output.WriteTag(100, WireFormat.WireType.EndGroup);
 
             output.Flush();
 
             stream.Position = 0;
-            var parsed = TestAllTypes.Parser.ParseFrom(stream);
-            Assert.AreEqual(new TestAllTypes { SingleFixed32 = 123 }, parsed);
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseFrom(stream));
+        }
+
+        [Test]
+        public void CustomDiagnosticMessage_DirectToStringCall()
+        {
+            var message = new ForeignMessage { C = 31 };
+            Assert.AreEqual("{ \"c\": 31, \"@cInHex\": \"1f\" }", message.ToString());
+            Assert.AreEqual("{ \"c\": 31 }", JsonFormatter.Default.Format(message));
+        }
+
+        [Test]
+        public void CustomDiagnosticMessage_Nested()
+        {
+            var message = new TestAllTypes { SingleForeignMessage = new ForeignMessage { C = 16 } };
+            Assert.AreEqual("{ \"singleForeignMessage\": { \"c\": 16, \"@cInHex\": \"10\" } }", message.ToString());
+            Assert.AreEqual("{ \"singleForeignMessage\": { \"c\": 16 } }", JsonFormatter.Default.Format(message));
+        }
+
+        [Test]
+        public void CustomDiagnosticMessage_DirectToTextWriterCall()
+        {
+            var message = new ForeignMessage { C = 31 };
+            var writer = new StringWriter();
+            JsonFormatter.Default.Format(message, writer);
+            Assert.AreEqual("{ \"c\": 31 }", writer.ToString());
         }
     }
-}
+}

+ 6 - 1
csharp/src/Google.Protobuf.Test/Google.Protobuf.Test.csproj

@@ -55,7 +55,7 @@
     <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
     <Prefer32Bit>false</Prefer32Bit>
     <SignAssembly>True</SignAssembly>
-    <AssemblyOriginatorKeyFile>C:\keys\Google.Protobuf.snk</AssemblyOriginatorKeyFile>
+    <AssemblyOriginatorKeyFile>..\..\keys\Google.Protobuf.snk</AssemblyOriginatorKeyFile>
   </PropertyGroup>
   <ItemGroup>
     <Reference Include="mscorlib" />
@@ -95,10 +95,14 @@
     <Compile Include="Collections\MapFieldTest.cs" />
     <Compile Include="Collections\RepeatedFieldTest.cs" />
     <Compile Include="JsonFormatterTest.cs" />
+    <Compile Include="JsonParserTest.cs" />
+    <Compile Include="JsonTokenizerTest.cs" />
     <Compile Include="Reflection\DescriptorsTest.cs" />
     <Compile Include="Reflection\FieldAccessTest.cs" />
+    <Compile Include="Reflection\TypeRegistryTest.cs" />
     <Compile Include="SampleEnum.cs" />
     <Compile Include="SampleMessages.cs" />
+    <Compile Include="TestProtos\ForeignMessagePartial.cs" />
     <Compile Include="TestProtos\MapUnittestProto3.cs" />
     <Compile Include="TestProtos\UnittestImportProto3.cs" />
     <Compile Include="TestProtos\UnittestImportPublicProto3.cs" />
@@ -111,6 +115,7 @@
     <Compile Include="TestProtos\UnittestWellKnownTypes.cs" />
     <Compile Include="WellKnownTypes\AnyTest.cs" />
     <Compile Include="WellKnownTypes\DurationTest.cs" />
+    <Compile Include="WellKnownTypes\FieldMaskTest.cs" />
     <Compile Include="WellKnownTypes\TimestampTest.cs" />
     <Compile Include="WellKnownTypes\WrappersTest.cs" />
   </ItemGroup>

+ 19 - 0
csharp/src/Google.Protobuf.Test/IssuesTest.cs

@@ -59,5 +59,24 @@ namespace Google.Protobuf
             // Underscores aren't reflected in the JSON.
             Assert.AreEqual("{ \"types\": 10, \"descriptor\": 20 }", message.ToString());
         }
+
+        [Test]
+        public void JsonNameParseTest()
+        {
+            var settings = new JsonParser.Settings(10, TypeRegistry.FromFiles(UnittestIssuesReflection.Descriptor));
+            var parser = new JsonParser(settings);
+
+            // It is safe to use either original field name or explicitly specified json_name
+            Assert.AreEqual(new TestJsonName { Name = "test", Description = "test2", Guid = "test3" },
+                parser.Parse<TestJsonName>("{ \"name\": \"test\", \"desc\": \"test2\", \"guid\": \"test3\" }"));
+        }
+
+        [Test]
+        public void JsonNameFormatTest()
+        {
+            var message = new TestJsonName { Name = "test", Description = "test2", Guid = "test3" };
+            Assert.AreEqual("{ \"name\": \"test\", \"desc\": \"test2\", \"exid\": \"test3\" }",
+                JsonFormatter.Default.Format(message));
+        }
     }
 }

+ 156 - 49
csharp/src/Google.Protobuf.Test/JsonFormatterTest.cs

@@ -35,6 +35,9 @@ using Google.Protobuf.TestProtos;
 using NUnit.Framework;
 using UnitTest.Issues.TestProtos;
 using Google.Protobuf.WellKnownTypes;
+using Google.Protobuf.Reflection;
+
+using static Google.Protobuf.JsonParserTest; // For WrapInQuotes
 
 namespace Google.Protobuf
 {
@@ -72,13 +75,13 @@ namespace Google.Protobuf
                 SingleFixed32 = 23,
                 SingleFixed64 = 1234567890123,
                 SingleFloat = 12.25f,
-                SingleForeignEnum = ForeignEnum.FOREIGN_BAR,
+                SingleForeignEnum = ForeignEnum.ForeignBar,
                 SingleForeignMessage = new ForeignMessage { C = 10 },
-                SingleImportEnum = ImportEnum.IMPORT_BAZ,
+                SingleImportEnum = ImportEnum.ImportBaz,
                 SingleImportMessage = new ImportMessage { D = 20 },
                 SingleInt32 = 100,
                 SingleInt64 = 3210987654321,
-                SingleNestedEnum = TestAllTypes.Types.NestedEnum.FOO,
+                SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo,
                 SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 },
                 SinglePublicImportMessage = new PublicImportMessage { E = 54 },
                 SingleSfixed32 = -123,
@@ -162,41 +165,31 @@ namespace Google.Protobuf
         }
 
         [Test]
-        public void UnknownEnumValueOmitted_SingleField()
+        public void UnknownEnumValueNumeric_SingleField()
         {
             var message = new TestAllTypes { SingleForeignEnum = (ForeignEnum) 100 };
-            AssertJson("{ }", JsonFormatter.Default.Format(message));
+            AssertJson("{ 'singleForeignEnum': 100 }", JsonFormatter.Default.Format(message));
         }
 
         [Test]
-        public void UnknownEnumValueOmitted_RepeatedField()
+        public void UnknownEnumValueNumeric_RepeatedField()
         {
-            var message = new TestAllTypes { RepeatedForeignEnum = { ForeignEnum.FOREIGN_BAZ, (ForeignEnum) 100, ForeignEnum.FOREIGN_FOO } };
-            AssertJson("{ 'repeatedForeignEnum': [ 'FOREIGN_BAZ', 'FOREIGN_FOO' ] }", JsonFormatter.Default.Format(message));
+            var message = new TestAllTypes { RepeatedForeignEnum = { ForeignEnum.ForeignBaz, (ForeignEnum) 100, ForeignEnum.ForeignFoo } };
+            AssertJson("{ 'repeatedForeignEnum': [ 'FOREIGN_BAZ', 100, 'FOREIGN_FOO' ] }", JsonFormatter.Default.Format(message));
         }
 
         [Test]
-        public void UnknownEnumValueOmitted_MapField()
+        public void UnknownEnumValueNumeric_MapField()
         {
-            // This matches the C++ behaviour.
-            var message = new TestMap { MapInt32Enum = { { 1, MapEnum.MAP_ENUM_FOO }, { 2, (MapEnum) 100 }, { 3, MapEnum.MAP_ENUM_BAR } } };
-            AssertJson("{ 'mapInt32Enum': { '1': 'MAP_ENUM_FOO', '3': 'MAP_ENUM_BAR' } }", JsonFormatter.Default.Format(message));
+            var message = new TestMap { MapInt32Enum = { { 1, MapEnum.Foo }, { 2, (MapEnum) 100 }, { 3, MapEnum.Bar } } };
+            AssertJson("{ 'mapInt32Enum': { '1': 'MAP_ENUM_FOO', '2': 100, '3': 'MAP_ENUM_BAR' } }", JsonFormatter.Default.Format(message));
         }
 
         [Test]
-        public void UnknownEnumValueOmitted_RepeatedField_AllEntriesUnknown()
+        public void UnknownEnumValue_RepeatedField_AllEntriesUnknown()
         {
-            // *Maybe* we should hold off on writing the "[" until we find that we've got at least one value to write...
-            // but this is what happens at the moment, and it doesn't seem too awful.
             var message = new TestAllTypes { RepeatedForeignEnum = { (ForeignEnum) 200, (ForeignEnum) 100 } };
-            AssertJson("{ 'repeatedForeignEnum': [ ] }", JsonFormatter.Default.Format(message));
-        }
-
-        [Test]
-        public void NullValueForMessage()
-        {
-            var message = new TestMap { MapInt32ForeignMessage = { { 10, null } } };
-            AssertJson("{ 'mapInt32ForeignMessage': { '10': null } }", JsonFormatter.Default.Format(message));
+            AssertJson("{ 'repeatedForeignEnum': [ 200, 100 ] }", JsonFormatter.Default.Format(message));
         }
 
         [Test]
@@ -275,6 +268,13 @@ namespace Google.Protobuf
             AssertJson(expectedJson, JsonFormatter.Default.Format(message));
         }
 
+        [Test]
+        public void WrapperFormatting_Message()
+        {
+            Assert.AreEqual("\"\"", JsonFormatter.Default.Format(new StringValue()));
+            Assert.AreEqual("0", JsonFormatter.Default.Format(new Int32Value()));
+        }
+
         [Test]
         public void WrapperFormatting_IncludeNull()
         {
@@ -311,26 +311,50 @@ namespace Google.Protobuf
         }
 
         [Test]
-        public void TimestampStandalone()
+        [TestCase("1970-01-01T00:00:00Z", 0)]
+        [TestCase("1970-01-01T00:00:00.000000001Z", 1)]
+        [TestCase("1970-01-01T00:00:00.000000010Z", 10)]
+        [TestCase("1970-01-01T00:00:00.000000100Z", 100)]
+        [TestCase("1970-01-01T00:00:00.000001Z", 1000)]
+        [TestCase("1970-01-01T00:00:00.000010Z", 10000)]
+        [TestCase("1970-01-01T00:00:00.000100Z", 100000)]
+        [TestCase("1970-01-01T00:00:00.001Z", 1000000)]
+        [TestCase("1970-01-01T00:00:00.010Z", 10000000)]
+        [TestCase("1970-01-01T00:00:00.100Z", 100000000)]
+        [TestCase("1970-01-01T00:00:00.120Z", 120000000)]
+        [TestCase("1970-01-01T00:00:00.123Z", 123000000)]
+        [TestCase("1970-01-01T00:00:00.123400Z", 123400000)]
+        [TestCase("1970-01-01T00:00:00.123450Z", 123450000)]
+        [TestCase("1970-01-01T00:00:00.123456Z", 123456000)]
+        [TestCase("1970-01-01T00:00:00.123456700Z", 123456700)]
+        [TestCase("1970-01-01T00:00:00.123456780Z", 123456780)]
+        [TestCase("1970-01-01T00:00:00.123456789Z", 123456789)]
+        public void TimestampStandalone(string expected, int nanos)
         {
-            Assert.AreEqual("1970-01-01T00:00:00Z", new Timestamp().ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.100Z", new Timestamp { Nanos = 100000000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.120Z", new Timestamp { Nanos = 120000000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123Z", new Timestamp { Nanos = 123000000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123400Z", new Timestamp { Nanos = 123400000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123450Z", new Timestamp { Nanos = 123450000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123456Z", new Timestamp { Nanos = 123456000 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123456700Z", new Timestamp { Nanos = 123456700 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123456780Z", new Timestamp { Nanos = 123456780 }.ToString());
-            Assert.AreEqual("1970-01-01T00:00:00.123456789Z", new Timestamp { Nanos = 123456789 }.ToString());
+            Assert.AreEqual(WrapInQuotes(expected), new Timestamp { Nanos = nanos }.ToString());
+        }
 
-            // One before and one after the Unix epoch
-            Assert.AreEqual("1673-06-19T12:34:56Z",
+        [Test]
+        public void TimestampStandalone_FromDateTime()
+        {
+            // One before and one after the Unix epoch, more easily represented via DateTime.
+            Assert.AreEqual("\"1673-06-19T12:34:56Z\"",
                 new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp().ToString());
-            Assert.AreEqual("2015-07-31T10:29:34Z",
+            Assert.AreEqual("\"2015-07-31T10:29:34Z\"",
                 new DateTime(2015, 7, 31, 10, 29, 34, DateTimeKind.Utc).ToTimestamp().ToString());
         }
 
+        [Test]
+        [TestCase(-1, -1)] // Would be valid as duration
+        [TestCase(1, Timestamp.MaxNanos + 1)]
+        [TestCase(Timestamp.UnixSecondsAtBclMaxValue + 1, 0)]
+        [TestCase(Timestamp.UnixSecondsAtBclMinValue - 1, 0)]
+        public void TimestampStandalone_NonNormalized(long seconds, int nanoseconds)
+        {
+            var timestamp = new Timestamp { Seconds = seconds, Nanos = nanoseconds };
+            Assert.Throws<InvalidOperationException>(() => JsonFormatter.Default.Format(timestamp));
+        }
+
         [Test]
         public void TimestampField()
         {
@@ -342,6 +366,14 @@ namespace Google.Protobuf
         [TestCase(0, 0, "0s")]
         [TestCase(1, 0, "1s")]
         [TestCase(-1, 0, "-1s")]
+        [TestCase(0, 1, "0.000000001s")]
+        [TestCase(0, 10, "0.000000010s")]
+        [TestCase(0, 100, "0.000000100s")]
+        [TestCase(0, 1000, "0.000001s")]
+        [TestCase(0, 10000, "0.000010s")]
+        [TestCase(0, 100000, "0.000100s")]
+        [TestCase(0, 1000000, "0.001s")]
+        [TestCase(0, 10000000, "0.010s")]
         [TestCase(0, 100000000, "0.100s")]
         [TestCase(0, 120000000, "0.120s")]
         [TestCase(0, 123000000, "0.123s")]
@@ -354,12 +386,19 @@ namespace Google.Protobuf
         [TestCase(0, -100000000, "-0.100s")]
         [TestCase(1, 100000000, "1.100s")]
         [TestCase(-1, -100000000, "-1.100s")]
-        // Non-normalized examples
-        [TestCase(1, 2123456789, "3.123456789s")]
-        [TestCase(1, -100000000, "0.900s")]
         public void DurationStandalone(long seconds, int nanoseconds, string expected)
         {
-            Assert.AreEqual(expected, new Duration { Seconds = seconds, Nanos = nanoseconds }.ToString());
+            var json = JsonFormatter.Default.Format(new Duration { Seconds = seconds, Nanos = nanoseconds });
+            Assert.AreEqual(WrapInQuotes(expected), json);
+        }
+
+        [Test]
+        [TestCase(1, 2123456789)]
+        [TestCase(1, -100000000)]
+        public void DurationStandalone_NonNormalized(long seconds, int nanoseconds)
+        {
+            var duration = new Duration { Seconds = seconds, Nanos = nanoseconds };
+            Assert.Throws<InvalidOperationException>(() => JsonFormatter.Default.Format(duration));
         }
 
         [Test]
@@ -376,26 +415,36 @@ namespace Google.Protobuf
             {
                 Fields =
                 {
-                    { "a", new Value { NullValue = new NullValue() } },
-                    { "b", new Value { BoolValue = false } },
-                    { "c", new Value { NumberValue = 10.5 } },
-                    { "d", new Value { StringValue = "text" } },
-                    { "e", new Value { ListValue = new ListValue { Values = { new Value { StringValue = "t1" }, new Value { NumberValue = 5 } } } } },
-                    { "f", new Value { StructValue = new Struct { Fields = { { "nested", new Value { StringValue = "value" } } } } } }
+                    { "a", Value.ForNull() },
+                    { "b", Value.ForBool(false) },
+                    { "c", Value.ForNumber(10.5) },
+                    { "d", Value.ForString("text") },
+                    { "e", Value.ForList(Value.ForString("t1"), Value.ForNumber(5)) },
+                    { "f", Value.ForStruct(new Struct { Fields = { { "nested", Value.ForString("value") } } }) }
                 }
             };
             AssertJson("{ 'a': null, 'b': false, 'c': 10.5, 'd': 'text', 'e': [ 't1', 5 ], 'f': { 'nested': 'value' } }", message.ToString());
         }
 
+        [Test]
+        [TestCase("foo__bar")]
+        [TestCase("foo_3_ar")]
+        [TestCase("fooBar")]
+        public void FieldMaskInvalid(string input)
+        {
+            var mask = new FieldMask { Paths = { input } };
+            Assert.Throws<InvalidOperationException>(() => JsonFormatter.Default.Format(mask));
+        }
+
         [Test]
         public void FieldMaskStandalone()
         {
             var fieldMask = new FieldMask { Paths = { "", "single", "with_underscore", "nested.field.name", "nested..double_dot" } };
-            Assert.AreEqual(",single,withUnderscore,nested.field.name,nested..doubleDot", fieldMask.ToString());
+            Assert.AreEqual("\",single,withUnderscore,nested.field.name,nested..doubleDot\"", fieldMask.ToString());
 
             // Invalid, but we shouldn't create broken JSON...
             fieldMask = new FieldMask { Paths = { "x\\y" } };
-            Assert.AreEqual(@"x\\y", fieldMask.ToString());
+            Assert.AreEqual(@"""x\\y""", fieldMask.ToString());
         }
 
         [Test]
@@ -405,6 +454,64 @@ namespace Google.Protobuf
             AssertJson("{ 'fieldMaskField': 'user.displayName,photo' }", JsonFormatter.Default.Format(message));
         }
 
+        // SourceContext is an example of a well-known type with no special JSON handling
+        [Test]
+        public void SourceContextStandalone()
+        {
+            var message = new SourceContext { FileName = "foo.proto" };
+            AssertJson("{ 'fileName': 'foo.proto' }", JsonFormatter.Default.Format(message));
+        }
+
+        [Test]
+        public void AnyWellKnownType()
+        {
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(Timestamp.Descriptor)));
+            var timestamp = new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp();
+            var any = Any.Pack(timestamp);
+            AssertJson("{ '@type': 'type.googleapis.com/google.protobuf.Timestamp', 'value': '1673-06-19T12:34:56Z' }", formatter.Format(any));
+        }
+
+        [Test]
+        public void AnyMessageType()
+        {
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor)));
+            var message = new TestAllTypes { SingleInt32 = 10, SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } };
+            var any = Any.Pack(message);
+            AssertJson("{ '@type': 'type.googleapis.com/protobuf_unittest.TestAllTypes', 'singleInt32': 10, 'singleNestedMessage': { 'bb': 20 } }", formatter.Format(any));
+        }
+
+        [Test]
+        public void AnyMessageType_CustomPrefix()
+        {
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor)));
+            var message = new TestAllTypes { SingleInt32 = 10 };
+            var any = Any.Pack(message, "foo.bar/baz");
+            AssertJson("{ '@type': 'foo.bar/baz/protobuf_unittest.TestAllTypes', 'singleInt32': 10 }", formatter.Format(any));
+        }
+
+        [Test]
+        public void AnyNested()
+        {
+            var registry = TypeRegistry.FromMessages(TestWellKnownTypes.Descriptor, TestAllTypes.Descriptor);
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry));
+
+            // Nest an Any as the value of an Any.
+            var doubleNestedMessage = new TestAllTypes { SingleInt32 = 20 };
+            var nestedMessage = Any.Pack(doubleNestedMessage);
+            var message = new TestWellKnownTypes { AnyField = Any.Pack(nestedMessage) };
+            AssertJson("{ 'anyField': { '@type': 'type.googleapis.com/google.protobuf.Any', 'value': { '@type': 'type.googleapis.com/protobuf_unittest.TestAllTypes', 'singleInt32': 20 } } }",
+                formatter.Format(message));
+        }
+
+        [Test]
+        public void AnyUnknownType()
+        {
+            // The default type registry doesn't have any types in it.
+            var message = new TestAllTypes();
+            var any = Any.Pack(message);
+            Assert.Throws<InvalidOperationException>(() => JsonFormatter.Default.Format(any));
+        }
+
         /// <summary>
         /// Checks that the actual JSON is the same as the expected JSON - but after replacing
         /// all apostrophes in the expected JSON with double quotes. This basically makes the tests easier

+ 936 - 0
csharp/src/Google.Protobuf.Test/JsonParserTest.cs

@@ -0,0 +1,936 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+using Google.Protobuf.Reflection;
+using Google.Protobuf.TestProtos;
+using Google.Protobuf.WellKnownTypes;
+using NUnit.Framework;
+using System;
+
+namespace Google.Protobuf
+{
+    /// <summary>
+    /// Unit tests for JSON parsing.
+    /// </summary>
+    public class JsonParserTest
+    {
+        // Sanity smoke test
+        [Test]
+        public void AllTypesRoundtrip()
+        {
+            AssertRoundtrip(SampleMessages.CreateFullTestAllTypes());
+        }
+
+        [Test]
+        public void Maps()
+        {
+            AssertRoundtrip(new TestMap { MapStringString = { { "with spaces", "bar" }, { "a", "b" } } });
+            AssertRoundtrip(new TestMap { MapInt32Int32 = { { 0, 1 }, { 2, 3 } } });
+            AssertRoundtrip(new TestMap { MapBoolBool = { { false, true }, { true, false } } });
+        }
+
+        [Test]
+        [TestCase(" 1 ")]
+        [TestCase("+1")]
+        [TestCase("1,000")]
+        [TestCase("1.5")]
+        public void IntegerMapKeysAreStrict(string keyText)
+        {
+            // Test that integer parsing is strict. We assume that if this is correct for int32,
+            // it's correct for other numeric key types.
+            var json = "{ \"mapInt32Int32\": { \"" + keyText + "\" : \"1\" } }";
+            Assert.Throws<InvalidProtocolBufferException>(() => JsonParser.Default.Parse<TestMap>(json));
+        }
+
+        [Test]
+        public void OriginalFieldNameAccepted()
+        {
+            var json = "{ \"single_int32\": 10 }";
+            var expected = new TestAllTypes { SingleInt32 = 10 };
+            Assert.AreEqual(expected, TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void SourceContextRoundtrip()
+        {
+            AssertRoundtrip(new SourceContext { FileName = "foo.proto" });
+        }
+
+        [Test]
+        public void SingularWrappers_DefaultNonNullValues()
+        {
+            var message = new TestWellKnownTypes
+            {
+                StringField = "",
+                BytesField = ByteString.Empty,
+                BoolField = false,
+                FloatField = 0f,
+                DoubleField = 0d,
+                Int32Field = 0,
+                Int64Field = 0,
+                Uint32Field = 0,
+                Uint64Field = 0
+            };
+            AssertRoundtrip(message);
+        }
+
+        [Test]
+        public void SingularWrappers_NonDefaultValues()
+        {
+            var message = new TestWellKnownTypes
+            {
+                StringField = "x",
+                BytesField = ByteString.CopyFrom(1, 2, 3),
+                BoolField = true,
+                FloatField = 12.5f,
+                DoubleField = 12.25d,
+                Int32Field = 1,
+                Int64Field = 2,
+                Uint32Field = 3,
+                Uint64Field = 4
+            };
+            AssertRoundtrip(message);
+        }
+
+        [Test]
+        public void SingularWrappers_ExplicitNulls()
+        {
+            // When we parse the "valueField": null part, we remember it... basically, it's one case
+            // where explicit default values don't fully roundtrip.
+            var message = new TestWellKnownTypes { ValueField = Value.ForNull() };
+            var json = new JsonFormatter(new JsonFormatter.Settings(true)).Format(message);
+            var parsed = JsonParser.Default.Parse<TestWellKnownTypes>(json);
+            Assert.AreEqual(message, parsed);
+        }
+
+        [Test]
+        [TestCase(typeof(Int32Value), "32", 32)]
+        [TestCase(typeof(Int64Value), "32", 32L)]
+        [TestCase(typeof(UInt32Value), "32", 32U)]
+        [TestCase(typeof(UInt64Value), "32", 32UL)]
+        [TestCase(typeof(StringValue), "\"foo\"", "foo")]
+        [TestCase(typeof(FloatValue), "1.5", 1.5f)]
+        [TestCase(typeof(DoubleValue), "1.5", 1.5d)]
+        public void Wrappers_Standalone(System.Type wrapperType, string json, object expectedValue)
+        {
+            IMessage parsed = (IMessage)Activator.CreateInstance(wrapperType);
+            IMessage expected = (IMessage)Activator.CreateInstance(wrapperType);
+            JsonParser.Default.Merge(parsed, "null");
+            Assert.AreEqual(expected, parsed);
+
+            JsonParser.Default.Merge(parsed, json);
+            expected.Descriptor.Fields[WrappersReflection.WrapperValueFieldNumber].Accessor.SetValue(expected, expectedValue);
+            Assert.AreEqual(expected, parsed);
+        }
+
+        [Test]
+        public void ExplicitNullValue()
+        {
+            string json = "{\"valueField\": null}";
+            var message = JsonParser.Default.Parse<TestWellKnownTypes>(json);
+            Assert.AreEqual(new TestWellKnownTypes { ValueField = Value.ForNull() }, message);
+        }
+
+        [Test]
+        public void BytesWrapper_Standalone()
+        {
+            ByteString data = ByteString.CopyFrom(1, 2, 3);
+            // Can't do this with attributes...
+            var parsed = JsonParser.Default.Parse<BytesValue>(WrapInQuotes(data.ToBase64()));
+            var expected = new BytesValue { Value = data };
+            Assert.AreEqual(expected, parsed);
+        }
+
+        [Test]
+        public void RepeatedWrappers()
+        {
+            var message = new RepeatedWellKnownTypes
+            {
+                BoolField = { true, false },
+                BytesField = { ByteString.CopyFrom(1, 2, 3), ByteString.CopyFrom(4, 5, 6), ByteString.Empty },
+                DoubleField = { 12.5, -1.5, 0d },
+                FloatField = { 123.25f, -20f, 0f },
+                Int32Field = { int.MaxValue, int.MinValue, 0 },
+                Int64Field = { long.MaxValue, long.MinValue, 0L },
+                StringField = { "First", "Second", "" },
+                Uint32Field = { uint.MaxValue, uint.MinValue, 0U },
+                Uint64Field = { ulong.MaxValue, ulong.MinValue, 0UL },
+            };
+            AssertRoundtrip(message);
+        }
+
+        [Test]
+        public void RepeatedField_NullElementProhibited()
+        {
+            string json = "{ \"repeated_foreign_message\": [null] }";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void RepeatedField_NullOverallValueAllowed()
+        {
+            string json = "{ \"repeated_foreign_message\": null }";
+            Assert.AreEqual(new TestAllTypes(), TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("{ \"mapInt32Int32\": { \"10\": null }")]
+        [TestCase("{ \"mapStringString\": { \"abc\": null }")]
+        [TestCase("{ \"mapInt32ForeignMessage\": { \"10\": null }")]
+        public void MapField_NullValueProhibited(string json)
+        {
+            Assert.Throws<InvalidProtocolBufferException>(() => TestMap.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void MapField_NullOverallValueAllowed()
+        {
+            string json = "{ \"mapInt32Int32\": null }";
+            Assert.AreEqual(new TestMap(), TestMap.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void IndividualWrapperTypes()
+        {
+            Assert.AreEqual(new StringValue { Value = "foo" }, StringValue.Parser.ParseJson("\"foo\""));
+            Assert.AreEqual(new Int32Value { Value = 1 }, Int32Value.Parser.ParseJson("1"));
+            // Can parse strings directly too
+            Assert.AreEqual(new Int32Value { Value = 1 }, Int32Value.Parser.ParseJson("\"1\""));
+        }
+
+        private static void AssertRoundtrip<T>(T message) where T : IMessage<T>, new()
+        {
+            var clone = message.Clone();
+            var json = JsonFormatter.Default.Format(message);
+            var parsed = JsonParser.Default.Parse<T>(json);
+            Assert.AreEqual(clone, parsed);
+        }
+
+        [Test]
+        [TestCase("0", 0)]
+        [TestCase("-0", 0)] // Not entirely clear whether we intend to allow this...
+        [TestCase("1", 1)]
+        [TestCase("-1", -1)]
+        [TestCase("2147483647", 2147483647)]
+        [TestCase("-2147483648", -2147483648)]
+        public void StringToInt32_Valid(string jsonValue, int expectedParsedValue)
+        {
+            string json = "{ \"singleInt32\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleInt32);
+        }
+
+        [Test]
+        [TestCase("+0")]
+        [TestCase(" 1")]
+        [TestCase("1 ")]
+        [TestCase("00")]
+        [TestCase("-00")]
+        [TestCase("--1")]
+        [TestCase("+1")]
+        [TestCase("1.5")]
+        [TestCase("1e10")]
+        [TestCase("2147483648")]
+        [TestCase("-2147483649")]
+        public void StringToInt32_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleInt32\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0U)]
+        [TestCase("1", 1U)]
+        [TestCase("4294967295", 4294967295U)]
+        public void StringToUInt32_Valid(string jsonValue, uint expectedParsedValue)
+        {
+            string json = "{ \"singleUint32\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleUint32);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("-1")]
+        [TestCase("4294967296")]
+        public void StringToUInt32_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleUint32\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0L)]
+        [TestCase("1", 1L)]
+        [TestCase("-1", -1L)]
+        [TestCase("9223372036854775807", 9223372036854775807)]
+        [TestCase("-9223372036854775808", -9223372036854775808)]
+        public void StringToInt64_Valid(string jsonValue, long expectedParsedValue)
+        {
+            string json = "{ \"singleInt64\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleInt64);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("-9223372036854775809")]
+        [TestCase("9223372036854775808")]
+        public void StringToInt64_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleInt64\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0UL)]
+        [TestCase("1", 1UL)]
+        [TestCase("18446744073709551615", 18446744073709551615)]
+        public void StringToUInt64_Valid(string jsonValue, ulong expectedParsedValue)
+        {
+            string json = "{ \"singleUint64\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleUint64);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("-1")]
+        [TestCase("18446744073709551616")]
+        public void StringToUInt64_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleUint64\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0d)]
+        [TestCase("1", 1d)]
+        [TestCase("1.000000", 1d)]
+        [TestCase("1.0000000000000000000000001", 1d)] // We don't notice that we haven't preserved the exact value
+        [TestCase("-1", -1d)]
+        [TestCase("1e1", 10d)]
+        [TestCase("1e01", 10d)] // Leading decimals are allowed in exponents
+        [TestCase("1E1", 10d)] // Either case is fine
+        [TestCase("-1e1", -10d)]
+        [TestCase("1.5e1", 15d)]
+        [TestCase("-1.5e1", -15d)]
+        [TestCase("15e-1", 1.5d)]
+        [TestCase("-15e-1", -1.5d)]
+        [TestCase("1.79769e308", 1.79769e308)]
+        [TestCase("-1.79769e308", -1.79769e308)]
+        [TestCase("Infinity", double.PositiveInfinity)]
+        [TestCase("-Infinity", double.NegativeInfinity)]
+        [TestCase("NaN", double.NaN)]
+        public void StringToDouble_Valid(string jsonValue, double expectedParsedValue)
+        {
+            string json = "{ \"singleDouble\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleDouble);
+        }
+
+        [Test]
+        [TestCase("1.7977e308")]
+        [TestCase("-1.7977e308")]
+        [TestCase("1e309")]
+        [TestCase("1,0")]
+        [TestCase("1.0.0")]
+        [TestCase("+1")]
+        [TestCase("00")]
+        [TestCase("01")]
+        [TestCase("-00")]
+        [TestCase("-01")]
+        [TestCase("--1")]
+        [TestCase(" Infinity")]
+        [TestCase(" -Infinity")]
+        [TestCase("NaN ")]
+        [TestCase("Infinity ")]
+        [TestCase("-Infinity ")]
+        [TestCase(" NaN")]
+        [TestCase("INFINITY")]
+        [TestCase("nan")]
+        [TestCase("\u00BD")] // 1/2 as a single Unicode character. Just sanity checking...
+        public void StringToDouble_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleDouble\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0f)]
+        [TestCase("1", 1f)]
+        [TestCase("1.000000", 1f)]
+        [TestCase("-1", -1f)]
+        [TestCase("3.402823e38", 3.402823e38f)]
+        [TestCase("-3.402823e38", -3.402823e38f)]
+        [TestCase("1.5e1", 15f)]
+        [TestCase("15e-1", 1.5f)]
+        public void StringToFloat_Valid(string jsonValue, float expectedParsedValue)
+        {
+            string json = "{ \"singleFloat\": \"" + jsonValue + "\"}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleFloat);
+        }
+
+        [Test]
+        [TestCase("3.402824e38")]
+        [TestCase("-3.402824e38")]
+        [TestCase("1,0")]
+        [TestCase("1.0.0")]
+        [TestCase("+1")]
+        [TestCase("00")]
+        [TestCase("--1")]
+        public void StringToFloat_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleFloat\": \"" + jsonValue + "\"}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0)]
+        [TestCase("-0", 0)] // Not entirely clear whether we intend to allow this...
+        [TestCase("1", 1)]
+        [TestCase("-1", -1)]
+        [TestCase("2147483647", 2147483647)]
+        [TestCase("-2147483648", -2147483648)]
+        [TestCase("1e1", 10)]
+        [TestCase("-1e1", -10)]
+        [TestCase("10.00", 10)]
+        [TestCase("-10.00", -10)]
+        public void NumberToInt32_Valid(string jsonValue, int expectedParsedValue)
+        {
+            string json = "{ \"singleInt32\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleInt32);
+        }
+
+        [Test]
+        [TestCase("+0", typeof(InvalidJsonException))]
+        [TestCase("00", typeof(InvalidJsonException))]
+        [TestCase("-00", typeof(InvalidJsonException))]
+        [TestCase("--1", typeof(InvalidJsonException))]
+        [TestCase("+1", typeof(InvalidJsonException))]
+        [TestCase("1.5", typeof(InvalidProtocolBufferException))]
+        // Value is out of range
+        [TestCase("1e10", typeof(InvalidProtocolBufferException))]
+        [TestCase("2147483648", typeof(InvalidProtocolBufferException))]
+        [TestCase("-2147483649", typeof(InvalidProtocolBufferException))]
+        public void NumberToInt32_Invalid(string jsonValue, System.Type expectedExceptionType)
+        {
+            string json = "{ \"singleInt32\": " + jsonValue + "}";
+            Assert.Throws(expectedExceptionType, () => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0U)]
+        [TestCase("1", 1U)]
+        [TestCase("4294967295", 4294967295U)]
+        public void NumberToUInt32_Valid(string jsonValue, uint expectedParsedValue)
+        {
+            string json = "{ \"singleUint32\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleUint32);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("-1")]
+        [TestCase("4294967296")]
+        public void NumberToUInt32_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleUint32\": " + jsonValue + "}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0L)]
+        [TestCase("1", 1L)]
+        [TestCase("-1", -1L)]
+        // long.MaxValue isn't actually representable as a double. This string value is the highest
+        // representable value which isn't greater than long.MaxValue.
+        [TestCase("9223372036854774784", 9223372036854774784)]
+        [TestCase("-9223372036854775808", -9223372036854775808)]
+        public void NumberToInt64_Valid(string jsonValue, long expectedParsedValue)
+        {
+            string json = "{ \"singleInt64\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleInt64);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("9223372036854775808")]
+        // Theoretical bound would be -9223372036854775809, but when that is parsed to a double
+        // we end up with the exact value of long.MinValue due to lack of precision. The value here
+        // is the "next double down".
+        [TestCase("-9223372036854780000")]
+        public void NumberToInt64_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleInt64\": " + jsonValue + "}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0UL)]
+        [TestCase("1", 1UL)]
+        // ulong.MaxValue isn't representable as a double. This value is the largest double within
+        // the range of ulong.
+        [TestCase("18446744073709549568", 18446744073709549568UL)]
+        public void NumberToUInt64_Valid(string jsonValue, ulong expectedParsedValue)
+        {
+            string json = "{ \"singleUint64\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleUint64);
+        }
+
+        // Assume that anything non-bounds-related is covered in the Int32 case
+        [Test]
+        [TestCase("-1")]
+        [TestCase("18446744073709551616")]
+        public void NumberToUInt64_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleUint64\": " + jsonValue + "}";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0d)]
+        [TestCase("1", 1d)]
+        [TestCase("1.000000", 1d)]
+        [TestCase("1.0000000000000000000000001", 1d)] // We don't notice that we haven't preserved the exact value
+        [TestCase("-1", -1d)]
+        [TestCase("1e1", 10d)]
+        [TestCase("1e01", 10d)] // Leading decimals are allowed in exponents
+        [TestCase("1E1", 10d)] // Either case is fine
+        [TestCase("-1e1", -10d)]
+        [TestCase("1.5e1", 15d)]
+        [TestCase("-1.5e1", -15d)]
+        [TestCase("15e-1", 1.5d)]
+        [TestCase("-15e-1", -1.5d)]
+        [TestCase("1.79769e308", 1.79769e308)]
+        [TestCase("-1.79769e308", -1.79769e308)]
+        public void NumberToDouble_Valid(string jsonValue, double expectedParsedValue)
+        {
+            string json = "{ \"singleDouble\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleDouble);
+        }
+
+        [Test]
+        [TestCase("1.7977e308")]
+        [TestCase("-1.7977e308")]
+        [TestCase("1e309")]
+        [TestCase("1,0")]
+        [TestCase("1.0.0")]
+        [TestCase("+1")]
+        [TestCase("00")]
+        [TestCase("--1")]
+        [TestCase("\u00BD")] // 1/2 as a single Unicode character. Just sanity checking...
+        public void NumberToDouble_Invalid(string jsonValue)
+        {
+            string json = "{ \"singleDouble\": " + jsonValue + "}";
+            Assert.Throws<InvalidJsonException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("0", 0f)]
+        [TestCase("1", 1f)]
+        [TestCase("1.000000", 1f)]
+        [TestCase("-1", -1f)]
+        [TestCase("3.402823e38", 3.402823e38f)]
+        [TestCase("-3.402823e38", -3.402823e38f)]
+        [TestCase("1.5e1", 15f)]
+        [TestCase("15e-1", 1.5f)]
+        public void NumberToFloat_Valid(string jsonValue, float expectedParsedValue)
+        {
+            string json = "{ \"singleFloat\": " + jsonValue + "}";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(expectedParsedValue, parsed.SingleFloat);
+        }
+
+        [Test]
+        [TestCase("3.402824e38", typeof(InvalidProtocolBufferException))]
+        [TestCase("-3.402824e38", typeof(InvalidProtocolBufferException))]
+        [TestCase("1,0", typeof(InvalidJsonException))]
+        [TestCase("1.0.0", typeof(InvalidJsonException))]
+        [TestCase("+1", typeof(InvalidJsonException))]
+        [TestCase("00", typeof(InvalidJsonException))]
+        [TestCase("--1", typeof(InvalidJsonException))]
+        public void NumberToFloat_Invalid(string jsonValue, System.Type expectedExceptionType)
+        {
+            string json = "{ \"singleFloat\": " + jsonValue + "}";
+            Assert.Throws(expectedExceptionType, () => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        // The simplest way of testing that the value has parsed correctly is to reformat it,
+        // as we trust the formatting. In many cases that will give the same result as the input,
+        // so in those cases we accept an expectedFormatted value of null. Sometimes the results
+        // will be different though, due to a different number of digits being provided.
+        [Test]
+        // Z offset
+        [TestCase("2015-10-09T14:46:23.123456789Z", null)]
+        [TestCase("2015-10-09T14:46:23.123456Z", null)]
+        [TestCase("2015-10-09T14:46:23.123Z", null)]
+        [TestCase("2015-10-09T14:46:23Z", null)]
+        [TestCase("2015-10-09T14:46:23.123456000Z", "2015-10-09T14:46:23.123456Z")]
+        [TestCase("2015-10-09T14:46:23.1234560Z", "2015-10-09T14:46:23.123456Z")]
+        [TestCase("2015-10-09T14:46:23.123000000Z", "2015-10-09T14:46:23.123Z")]
+        [TestCase("2015-10-09T14:46:23.1230Z", "2015-10-09T14:46:23.123Z")]
+        [TestCase("2015-10-09T14:46:23.00Z", "2015-10-09T14:46:23Z")]
+
+        // +00:00 offset
+        [TestCase("2015-10-09T14:46:23.123456789+00:00", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T14:46:23.123456+00:00", "2015-10-09T14:46:23.123456Z")]
+        [TestCase("2015-10-09T14:46:23.123+00:00", "2015-10-09T14:46:23.123Z")]
+        [TestCase("2015-10-09T14:46:23+00:00", "2015-10-09T14:46:23Z")]
+        [TestCase("2015-10-09T14:46:23.123456000+00:00", "2015-10-09T14:46:23.123456Z")]
+        [TestCase("2015-10-09T14:46:23.1234560+00:00", "2015-10-09T14:46:23.123456Z")]
+        [TestCase("2015-10-09T14:46:23.123000000+00:00", "2015-10-09T14:46:23.123Z")]
+        [TestCase("2015-10-09T14:46:23.1230+00:00", "2015-10-09T14:46:23.123Z")]
+        [TestCase("2015-10-09T14:46:23.00+00:00", "2015-10-09T14:46:23Z")]
+
+        // Other offsets (assume by now that the subsecond handling is okay)
+        [TestCase("2015-10-09T15:46:23.123456789+01:00", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T13:46:23.123456789-01:00", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T15:16:23.123456789+00:30", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T14:16:23.123456789-00:30", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T16:31:23.123456789+01:45", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-09T13:01:23.123456789-01:45", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-10T08:46:23.123456789+18:00", "2015-10-09T14:46:23.123456789Z")]
+        [TestCase("2015-10-08T20:46:23.123456789-18:00", "2015-10-09T14:46:23.123456789Z")]
+
+        // Leap years and min/max
+        [TestCase("2016-02-29T14:46:23.123456789Z", null)]
+        [TestCase("2000-02-29T14:46:23.123456789Z", null)]
+        [TestCase("0001-01-01T00:00:00Z", null)]
+        [TestCase("9999-12-31T23:59:59.999999999Z", null)]
+        public void Timestamp_Valid(string jsonValue, string expectedFormatted)
+        {
+            expectedFormatted = expectedFormatted ?? jsonValue;
+            string json = WrapInQuotes(jsonValue);
+            var parsed = Timestamp.Parser.ParseJson(json);
+            Assert.AreEqual(WrapInQuotes(expectedFormatted), parsed.ToString());
+        }
+
+        [Test]
+        [TestCase("2015-10-09 14:46:23.123456789Z", Description = "No T between date and time")]
+        [TestCase("2015/10/09T14:46:23.123456789Z", Description = "Wrong date separators")]
+        [TestCase("2015-10-09T14.46.23.123456789Z", Description = "Wrong time separators")]
+        [TestCase("2015-10-09T14:46:23,123456789Z", Description = "Wrong fractional second separators (valid ISO-8601 though)")]
+        [TestCase(" 2015-10-09T14:46:23.123456789Z", Description = "Whitespace at start")]
+        [TestCase("2015-10-09T14:46:23.123456789Z ", Description = "Whitespace at end")]
+        [TestCase("2015-10-09T14:46:23.1234567890", Description = "Too many digits")]
+        [TestCase("2015-10-09T14:46:23.123456789", Description = "No offset")]
+        [TestCase("2015-13-09T14:46:23.123456789Z", Description = "Invalid month")]
+        [TestCase("2015-10-32T14:46:23.123456789Z", Description = "Invalid day")]
+        [TestCase("2015-10-09T24:00:00.000000000Z", Description = "Invalid hour (valid ISO-8601 though)")]
+        [TestCase("2015-10-09T14:60:23.123456789Z", Description = "Invalid minutes")]
+        [TestCase("2015-10-09T14:46:60.123456789Z", Description = "Invalid seconds")]
+        [TestCase("2015-10-09T14:46:23.123456789+18:01", Description = "Offset too large (positive)")]
+        [TestCase("2015-10-09T14:46:23.123456789-18:01", Description = "Offset too large (negative)")]
+        [TestCase("2015-10-09T14:46:23.123456789-00:00", Description = "Local offset (-00:00) makes no sense here")]
+        [TestCase("0001-01-01T00:00:00+00:01", Description = "Value before earliest when offset applied")]
+        [TestCase("9999-12-31T23:59:59.999999999-00:01", Description = "Value after latest when offset applied")]
+        [TestCase("2100-02-29T14:46:23.123456789Z", Description = "Feb 29th on a non-leap-year")]
+        public void Timestamp_Invalid(string jsonValue)
+        {
+            string json = WrapInQuotes(jsonValue);
+            Assert.Throws<InvalidProtocolBufferException>(() => Timestamp.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void StructValue_Null()
+        {
+            Assert.AreEqual(new Value { NullValue = 0 }, Value.Parser.ParseJson("null"));
+        }
+
+        [Test]
+        public void StructValue_String()
+        {
+            Assert.AreEqual(new Value { StringValue = "hi" }, Value.Parser.ParseJson("\"hi\""));
+        }
+
+        [Test]
+        public void StructValue_Bool()
+        {
+            Assert.AreEqual(new Value { BoolValue = true }, Value.Parser.ParseJson("true"));
+            Assert.AreEqual(new Value { BoolValue = false }, Value.Parser.ParseJson("false"));
+        }
+
+        [Test]
+        public void StructValue_List()
+        {
+            Assert.AreEqual(Value.ForList(Value.ForNumber(1), Value.ForString("x")), Value.Parser.ParseJson("[1, \"x\"]"));
+        }
+
+        [Test]
+        public void ParseListValue()
+        {
+            Assert.AreEqual(new ListValue { Values = { Value.ForNumber(1), Value.ForString("x") } }, ListValue.Parser.ParseJson("[1, \"x\"]"));
+        }
+
+        [Test]
+        public void StructValue_Struct()
+        {
+            Assert.AreEqual(
+                Value.ForStruct(new Struct { Fields = { { "x", Value.ForNumber(1) }, { "y", Value.ForString("z") } } }),
+                Value.Parser.ParseJson("{ \"x\": 1, \"y\": \"z\" }"));
+        }
+
+        [Test]
+        public void ParseStruct()
+        {
+            Assert.AreEqual(new Struct { Fields = { { "x", Value.ForNumber(1) }, { "y", Value.ForString("z") } } },
+                Struct.Parser.ParseJson("{ \"x\": 1, \"y\": \"z\" }"));
+        }
+
+        // TODO for duration parsing: upper and lower bounds.
+        // +/- 315576000000 seconds
+
+        [Test]
+        [TestCase("1.123456789s", null)]
+        [TestCase("1.123456s", null)]
+        [TestCase("1.123s", null)]
+        [TestCase("1.12300s", "1.123s")]
+        [TestCase("1.12345s", "1.123450s")]
+        [TestCase("1s", null)]
+        [TestCase("-1.123456789s", null)]
+        [TestCase("-1.123456s", null)]
+        [TestCase("-1.123s", null)]
+        [TestCase("-1s", null)]
+        [TestCase("0.123s", null)]
+        [TestCase("-0.123s", null)]
+        [TestCase("123456.123s", null)]
+        [TestCase("-123456.123s", null)]
+        // Upper and lower bounds
+        [TestCase("315576000000s", null)]
+        [TestCase("-315576000000s", null)]
+        public void Duration_Valid(string jsonValue, string expectedFormatted)
+        {
+            expectedFormatted = expectedFormatted ?? jsonValue;
+            string json = WrapInQuotes(jsonValue);
+            var parsed = Duration.Parser.ParseJson(json);
+            Assert.AreEqual(WrapInQuotes(expectedFormatted), parsed.ToString());
+        }
+
+        // The simplest way of testing that the value has parsed correctly is to reformat it,
+        // as we trust the formatting. In many cases that will give the same result as the input,
+        // so in those cases we accept an expectedFormatted value of null. Sometimes the results
+        // will be different though, due to a different number of digits being provided.
+        [Test]
+        [TestCase("1.1234567890s", Description = "Too many digits")]
+        [TestCase("1.123456789", Description = "No suffix")]
+        [TestCase("1.123456789ss", Description = "Too much suffix")]
+        [TestCase("1.123456789S", Description = "Upper case suffix")]
+        [TestCase("+1.123456789s", Description = "Leading +")]
+        [TestCase(".123456789s", Description = "No integer before the fraction")]
+        [TestCase("1,123456789s", Description = "Comma as decimal separator")]
+        [TestCase("1x1.123456789s", Description = "Non-digit in integer part")]
+        [TestCase("1.1x3456789s", Description = "Non-digit in fractional part")]
+        [TestCase(" 1.123456789s", Description = "Whitespace before fraction")]
+        [TestCase("1.123456789s ", Description = "Whitespace after value")]
+        [TestCase("01.123456789s", Description = "Leading zero (positive)")]
+        [TestCase("-01.123456789s", Description = "Leading zero (negative)")]
+        [TestCase("--0.123456789s", Description = "Double minus sign")]
+        // Violate upper/lower bounds in various ways
+        [TestCase("315576000001s", Description = "Integer part too large")]
+        [TestCase("3155760000000s", Description = "Integer part too long (positive)")]
+        [TestCase("-3155760000000s", Description = "Integer part too long (negative)")]
+        public void Duration_Invalid(string jsonValue)
+        {
+            string json = WrapInQuotes(jsonValue);
+            Assert.Throws<InvalidProtocolBufferException>(() => Duration.Parser.ParseJson(json));
+        }
+
+        // Not as many tests for field masks as I'd like; more to be added when we have more
+        // detailed specifications.
+
+        [Test]
+        [TestCase("")]
+        [TestCase("foo", "foo")]
+        [TestCase("foo,bar", "foo", "bar")]
+        [TestCase("foo.bar", "foo.bar")]
+        [TestCase("fooBar", "foo_bar")]
+        [TestCase("fooBar.bazQux", "foo_bar.baz_qux")]
+        public void FieldMask_Valid(string jsonValue, params string[] expectedPaths)
+        {
+            string json = WrapInQuotes(jsonValue);
+            var parsed = FieldMask.Parser.ParseJson(json);
+            CollectionAssert.AreEqual(expectedPaths, parsed.Paths);
+        }
+
+        [Test]
+        [TestCase("foo_bar")]
+        public void FieldMask_Invalid(string jsonValue)
+        {
+            string json = WrapInQuotes(jsonValue);
+            Assert.Throws<InvalidProtocolBufferException>(() => FieldMask.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void Any_RegularMessage()
+        {
+            var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor);
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, TypeRegistry.FromMessages(TestAllTypes.Descriptor)));
+            var message = new TestAllTypes { SingleInt32 = 10, SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 20 } };
+            var original = Any.Pack(message);
+            var json = formatter.Format(original); // This is tested in JsonFormatterTest
+            var parser = new JsonParser(new JsonParser.Settings(10, registry));
+            Assert.AreEqual(original, parser.Parse<Any>(json));
+            string valueFirstJson = "{ \"singleInt32\": 10, \"singleNestedMessage\": { \"bb\": 20 }, \"@type\": \"type.googleapis.com/protobuf_unittest.TestAllTypes\" }";
+            Assert.AreEqual(original, parser.Parse<Any>(valueFirstJson));
+        }
+
+        [Test]
+        public void Any_CustomPrefix()
+        {
+            var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor);
+            var message = new TestAllTypes { SingleInt32 = 10 };
+            var original = Any.Pack(message, "custom.prefix/middle-part");
+            var parser = new JsonParser(new JsonParser.Settings(10, registry));
+            string json = "{ \"@type\": \"custom.prefix/middle-part/protobuf_unittest.TestAllTypes\", \"singleInt32\": 10 }";
+            Assert.AreEqual(original, parser.Parse<Any>(json));
+        }
+
+        [Test]
+        public void Any_UnknownType()
+        {
+            string json = "{ \"@type\": \"type.googleapis.com/bogus\" }";
+            Assert.Throws<InvalidOperationException>(() => Any.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void Any_NoTypeUrl()
+        {
+            string json = "{ \"foo\": \"bar\" }";
+            Assert.Throws<InvalidProtocolBufferException>(() => Any.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void Any_WellKnownType()
+        {
+            var registry = TypeRegistry.FromMessages(Timestamp.Descriptor);
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry));
+            var timestamp = new DateTime(1673, 6, 19, 12, 34, 56, DateTimeKind.Utc).ToTimestamp();
+            var original = Any.Pack(timestamp);
+            var json = formatter.Format(original); // This is tested in JsonFormatterTest
+            var parser = new JsonParser(new JsonParser.Settings(10, registry));
+            Assert.AreEqual(original, parser.Parse<Any>(json));
+            string valueFirstJson = "{ \"value\": \"1673-06-19T12:34:56Z\", \"@type\": \"type.googleapis.com/google.protobuf.Timestamp\" }";
+            Assert.AreEqual(original, parser.Parse<Any>(valueFirstJson));
+        }
+
+        [Test]
+        public void Any_Nested()
+        {
+            var registry = TypeRegistry.FromMessages(TestWellKnownTypes.Descriptor, TestAllTypes.Descriptor);
+            var formatter = new JsonFormatter(new JsonFormatter.Settings(false, registry));
+            var parser = new JsonParser(new JsonParser.Settings(10, registry));
+            var doubleNestedMessage = new TestAllTypes { SingleInt32 = 20 };
+            var nestedMessage = Any.Pack(doubleNestedMessage);
+            var message = new TestWellKnownTypes { AnyField = Any.Pack(nestedMessage) };
+            var json = formatter.Format(message);
+            // Use the descriptor-based parser just for a change.
+            Assert.AreEqual(message, parser.Parse(json, TestWellKnownTypes.Descriptor));
+        }
+
+        [Test]
+        public void DataAfterObject()
+        {
+            string json = "{} 10";
+            Assert.Throws<InvalidJsonException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        /// <summary>
+        /// JSON equivalent to <see cref="CodedInputStreamTest.MaliciousRecursion"/>
+        /// </summary>
+        [Test]
+        public void MaliciousRecursion()
+        {
+            string data64 = CodedInputStreamTest.MakeRecursiveMessage(64).ToString();
+            string data65 = CodedInputStreamTest.MakeRecursiveMessage(65).ToString();
+
+            var parser64 = new JsonParser(new JsonParser.Settings(64));
+            CodedInputStreamTest.AssertMessageDepth(parser64.Parse<TestRecursiveMessage>(data64), 64);
+            Assert.Throws<InvalidProtocolBufferException>(() => parser64.Parse<TestRecursiveMessage>(data65));
+
+            var parser63 = new JsonParser(new JsonParser.Settings(63));
+            Assert.Throws<InvalidProtocolBufferException>(() => parser63.Parse<TestRecursiveMessage>(data64));
+        }
+
+        [Test]
+        [TestCase("AQI")]
+        [TestCase("_-==")]
+        public void Bytes_InvalidBase64(string badBase64)
+        {
+            string json = "{ \"singleBytes\": \"" + badBase64 + "\" }";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        [TestCase("\"FOREIGN_BAR\"", ForeignEnum.ForeignBar)]
+        [TestCase("5", ForeignEnum.ForeignBar)]
+        [TestCase("100", (ForeignEnum)100)]
+        public void EnumValid(string value, ForeignEnum expectedValue)
+        {
+            string json = "{ \"singleForeignEnum\": " + value + " }";
+            var parsed = TestAllTypes.Parser.ParseJson(json);
+            Assert.AreEqual(new TestAllTypes { SingleForeignEnum = expectedValue }, parsed);
+        }
+
+        [Test]
+        [TestCase("\"NOT_A_VALID_VALUE\"")]
+        [TestCase("5.5")]
+        public void Enum_Invalid(string value)
+        {
+            string json = "{ \"singleForeignEnum\": " + value + " }";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        [Test]
+        public void OneofDuplicate_Invalid()
+        {
+            string json = "{ \"oneofString\": \"x\", \"oneofUint32\": 10 }";
+            Assert.Throws<InvalidProtocolBufferException>(() => TestAllTypes.Parser.ParseJson(json));
+        }
+
+        /// <summary>
+        /// Various tests use strings which have quotes round them for parsing or as the result
+        /// of formatting, but without those quotes being specified in the tests (for the sake of readability).
+        /// This method simply returns the input, wrapped in double quotes.
+        /// </summary>
+        internal static string WrapInQuotes(string text)
+        {
+            return '"' + text + '"';
+        }
+    }
+}

+ 408 - 0
csharp/src/Google.Protobuf.Test/JsonTokenizerTest.cs

@@ -0,0 +1,408 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2008 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+using NUnit.Framework;
+using System;
+using System.IO;
+
+namespace Google.Protobuf
+{
+    public class JsonTokenizerTest
+    {
+        [Test]
+        public void EmptyObjectValue()
+        {
+            AssertTokens("{}", JsonToken.StartObject, JsonToken.EndObject);
+        }
+
+        [Test]
+        public void EmptyArrayValue()
+        {
+            AssertTokens("[]", JsonToken.StartArray, JsonToken.EndArray);
+        }
+
+        [Test]
+        [TestCase("foo", "foo")]
+        [TestCase("tab\\t", "tab\t")]
+        [TestCase("line\\nfeed", "line\nfeed")]
+        [TestCase("carriage\\rreturn", "carriage\rreturn")]
+        [TestCase("back\\bspace", "back\bspace")]
+        [TestCase("form\\ffeed", "form\ffeed")]
+        [TestCase("escaped\\/slash", "escaped/slash")]
+        [TestCase("escaped\\\\backslash", "escaped\\backslash")]
+        [TestCase("escaped\\\"quote", "escaped\"quote")]
+        [TestCase("foo {}[] bar", "foo {}[] bar")]
+        [TestCase("foo\\u09aFbar", "foo\u09afbar")] // Digits, upper hex, lower hex
+        [TestCase("ab\ud800\udc00cd", "ab\ud800\udc00cd")]
+        [TestCase("ab\\ud800\\udc00cd", "ab\ud800\udc00cd")]
+        public void StringValue(string json, string expectedValue)
+        {
+            AssertTokensNoReplacement("\"" + json + "\"", JsonToken.Value(expectedValue));
+        }
+
+        // Valid surrogate pairs, with mixed escaping. These test cases can't be expressed
+        // using TestCase as they have no valid UTF-8 representation.
+        // It's unclear exactly how we should handle a mixture of escaped or not: that can't
+        // come from UTF-8 text, but could come from a .NET string. For the moment,
+        // treat it as valid in the obvious way.
+        [Test]
+        public void MixedSurrogatePairs()
+        {
+            string expected = "\ud800\udc00";
+            AssertTokens("'\\ud800\udc00'", JsonToken.Value(expected));
+            AssertTokens("'\ud800\\udc00'", JsonToken.Value(expected));
+        }
+
+        [Test]
+        public void ObjectDepth()
+        {
+            string json = "{ \"foo\": { \"x\": 1, \"y\": [ 0 ] } }";
+            var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json));
+            // If we had more tests like this, I'd introduce a helper method... but for one test, it's not worth it.
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.Name("foo"), tokenizer.Next());
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.StartObject, tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.Name("x"), tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.Value(1), tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.Name("y"), tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.StartArray, tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth); // Depth hasn't changed in array
+            Assert.AreEqual(JsonToken.Value(0), tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.EndArray, tokenizer.Next());
+            Assert.AreEqual(2, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.EndObject, tokenizer.Next());
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+        }
+
+        [Test]
+        public void ObjectDepth_WithPushBack()
+        {
+            string json = "{}";
+            var tokenizer = JsonTokenizer.FromTextReader(new StringReader(json));
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+            var token = tokenizer.Next();
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+            // When we push back a "start object", we should effectively be back to the previous depth.
+            tokenizer.PushBack(token);
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+            // Read the same token again, and get back to depth 1
+            token = tokenizer.Next();
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+
+            // Now the same in reverse, with EndObject
+            token = tokenizer.Next();
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+            tokenizer.PushBack(token);
+            Assert.AreEqual(1, tokenizer.ObjectDepth);
+            tokenizer.Next();
+            Assert.AreEqual(0, tokenizer.ObjectDepth);
+        }
+
+        [Test]
+        [TestCase("embedded tab\t")]
+        [TestCase("embedded CR\r")]
+        [TestCase("embedded LF\n")]
+        [TestCase("embedded bell\u0007")]
+        [TestCase("bad escape\\a")]
+        [TestCase("incomplete escape\\")]
+        [TestCase("incomplete Unicode escape\\u000")]
+        [TestCase("invalid Unicode escape\\u000H")]
+        // Surrogate pair handling, both in raw .NET strings and escaped. We only need
+        // to detect this in strings, as non-ASCII characters anywhere other than in strings
+        // will already lead to parsing errors.
+        [TestCase("\\ud800")]
+        [TestCase("\\udc00")]
+        [TestCase("\\ud800x")]
+        [TestCase("\\udc00x")]
+        [TestCase("\\udc00\\ud800y")]
+        public void InvalidStringValue(string json)
+        {
+            AssertThrowsAfter("\"" + json + "\"");
+        }
+
+        // Tests for invalid strings that can't be expressed in attributes,
+        // as the constants can't be expressed as UTF-8 strings.
+        [Test]
+        public void InvalidSurrogatePairs()
+        {
+            AssertThrowsAfter("\"\ud800x\"");
+            AssertThrowsAfter("\"\udc00y\"");
+            AssertThrowsAfter("\"\udc00\ud800y\"");
+        }
+
+        [Test]
+        [TestCase("0", 0)]
+        [TestCase("-0", 0)] // We don't distinguish between positive and negative 0
+        [TestCase("1", 1)]
+        [TestCase("-1", -1)]
+        // From here on, assume leading sign is okay...
+        [TestCase("1.125", 1.125)]
+        [TestCase("1.0", 1)]
+        [TestCase("1e5", 100000)]
+        [TestCase("1e000000", 1)] // Weird, but not prohibited by the spec
+        [TestCase("1E5", 100000)]
+        [TestCase("1e+5", 100000)]
+        [TestCase("1E-5", 0.00001)]
+        [TestCase("123E-2", 1.23)]
+        [TestCase("123.45E3", 123450)]
+        [TestCase("   1   ", 1)]
+        public void NumberValue(string json, double expectedValue)
+        {
+            AssertTokens(json, JsonToken.Value(expectedValue));
+        }
+
+        [Test]
+        [TestCase("00")]
+        [TestCase(".5")]
+        [TestCase("1.")]
+        [TestCase("1e")]
+        [TestCase("1e-")]
+        [TestCase("--")]
+        [TestCase("--1")]
+        [TestCase("-1.7977e308")]
+        [TestCase("1.7977e308")]
+        public void InvalidNumberValue(string json)
+        {
+            AssertThrowsAfter(json);
+        }
+
+        [Test]
+        [TestCase("nul")]
+        [TestCase("nothing")]
+        [TestCase("truth")]
+        [TestCase("fALSEhood")]
+        public void InvalidLiterals(string json)
+        {
+            AssertThrowsAfter(json);
+        }
+
+        [Test]
+        public void NullValue()
+        {
+            AssertTokens("null", JsonToken.Null);
+        }
+
+        [Test]
+        public void TrueValue()
+        {
+            AssertTokens("true", JsonToken.True);
+        }
+
+        [Test]
+        public void FalseValue()
+        {
+            AssertTokens("false", JsonToken.False);
+        }
+
+        [Test]
+        public void SimpleObject()
+        {
+            AssertTokens("{'x': 'y'}",
+                JsonToken.StartObject, JsonToken.Name("x"), JsonToken.Value("y"), JsonToken.EndObject);
+        }
+        
+        [Test]
+        [TestCase("[10, 20", 3)]
+        [TestCase("[10,", 2)]
+        [TestCase("[10:20]", 2)]
+        [TestCase("[", 1)]
+        [TestCase("[,", 1)]
+        [TestCase("{", 1)]
+        [TestCase("{,", 1)]
+        [TestCase("{[", 1)]
+        [TestCase("{{", 1)]
+        [TestCase("{0", 1)]
+        [TestCase("{null", 1)]
+        [TestCase("{false", 1)]
+        [TestCase("{true", 1)]
+        [TestCase("}", 0)]
+        [TestCase("]", 0)]
+        [TestCase(",", 0)]
+        [TestCase("'foo' 'bar'", 1)]
+        [TestCase(":", 0)]
+        [TestCase("'foo", 0)] // Incomplete string
+        [TestCase("{ 'foo' }", 2)]
+        [TestCase("{ x:1", 1)] // Property names must be quoted
+        [TestCase("{]", 1)]
+        [TestCase("[}", 1)]
+        [TestCase("[1,", 2)]
+        [TestCase("{'x':0]", 3)]
+        [TestCase("{ 'foo': }", 2)]
+        [TestCase("{ 'foo':'bar', }", 3)]
+        public void InvalidStructure(string json, int expectedValidTokens)
+        {
+            // Note: we don't test that the earlier tokens are exactly as expected,
+            // partly because that's hard to parameterize.
+            var reader = new StringReader(json.Replace('\'', '"'));
+            var tokenizer = JsonTokenizer.FromTextReader(reader);
+            for (int i = 0; i < expectedValidTokens; i++)
+            {
+                Assert.IsNotNull(tokenizer.Next());
+            }
+            Assert.Throws<InvalidJsonException>(() => tokenizer.Next());
+        }
+
+        [Test]
+        public void ArrayMixedType()
+        {
+            AssertTokens("[1, 'foo', null, false, true, [2], {'x':'y' }]",
+                JsonToken.StartArray,
+                JsonToken.Value(1),
+                JsonToken.Value("foo"),
+                JsonToken.Null,
+                JsonToken.False,
+                JsonToken.True,
+                JsonToken.StartArray,
+                JsonToken.Value(2),
+                JsonToken.EndArray,
+                JsonToken.StartObject,
+                JsonToken.Name("x"),
+                JsonToken.Value("y"),
+                JsonToken.EndObject,
+                JsonToken.EndArray);
+        }
+
+        [Test]
+        public void ObjectMixedType()
+        {
+            AssertTokens(@"{'a': 1, 'b': 'bar', 'c': null, 'd': false, 'e': true, 
+                           'f': [2], 'g': {'x':'y' }}",
+                JsonToken.StartObject,
+                JsonToken.Name("a"),
+                JsonToken.Value(1),
+                JsonToken.Name("b"),
+                JsonToken.Value("bar"),
+                JsonToken.Name("c"),
+                JsonToken.Null,
+                JsonToken.Name("d"),
+                JsonToken.False,
+                JsonToken.Name("e"),
+                JsonToken.True,
+                JsonToken.Name("f"),
+                JsonToken.StartArray,
+                JsonToken.Value(2),
+                JsonToken.EndArray,
+                JsonToken.Name("g"),
+                JsonToken.StartObject,
+                JsonToken.Name("x"),
+                JsonToken.Value("y"),
+                JsonToken.EndObject,
+                JsonToken.EndObject);
+        }
+
+        [Test]
+        public void NextAfterEndDocumentThrows()
+        {
+            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null"));
+            Assert.AreEqual(JsonToken.Null, tokenizer.Next());
+            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
+            Assert.Throws<InvalidOperationException>(() => tokenizer.Next());
+        }
+
+        [Test]
+        public void CanPushBackEndDocument()
+        {
+            var tokenizer = JsonTokenizer.FromTextReader(new StringReader("null"));
+            Assert.AreEqual(JsonToken.Null, tokenizer.Next());
+            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
+            tokenizer.PushBack(JsonToken.EndDocument);
+            Assert.AreEqual(JsonToken.EndDocument, tokenizer.Next());
+            Assert.Throws<InvalidOperationException>(() => tokenizer.Next());
+        }
+       
+        /// <summary>
+        /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
+        /// All apostrophes are first converted to double quotes, allowing any tests
+        /// that don't need to check actual apostrophe handling to use apostrophes in the JSON, avoiding
+        /// messy string literal escaping. The "end document" token is not specified in the list of 
+        /// expected tokens, but is implicit.
+        /// </summary>
+        private static void AssertTokens(string json, params JsonToken[] expectedTokens)
+        {
+            AssertTokensNoReplacement(json.Replace('\'', '"'), expectedTokens);
+        }
+
+        /// <summary>
+        /// Asserts that the specified JSON is tokenized into the given sequence of tokens.
+        /// Unlike <see cref="AssertTokens(string, JsonToken[])"/>, this does not perform any character
+        /// replacement on the specified JSON, and should be used when the text contains apostrophes which
+        /// are expected to be used *as* apostrophes. The "end document" token is not specified in the list of 
+        /// expected tokens, but is implicit.
+        /// </summary>
+        private static void AssertTokensNoReplacement(string json, params JsonToken[] expectedTokens)
+        {
+            var reader = new StringReader(json);
+            var tokenizer = JsonTokenizer.FromTextReader(reader);
+            for (int i = 0; i < expectedTokens.Length; i++)
+            {
+                var actualToken = tokenizer.Next();
+                if (actualToken == JsonToken.EndDocument)
+                {
+                    Assert.Fail("Expected {0} but reached end of token stream", expectedTokens[i]);
+                }
+                Assert.AreEqual(expectedTokens[i], actualToken);
+            }
+            var finalToken = tokenizer.Next();
+            if (finalToken != JsonToken.EndDocument)
+            {
+                Assert.Fail("Expected token stream to be exhausted; received {0}", finalToken);
+            }
+        }
+
+        private static void AssertThrowsAfter(string json, params JsonToken[] expectedTokens)
+        {
+            var reader = new StringReader(json);
+            var tokenizer = JsonTokenizer.FromTextReader(reader);
+            for (int i = 0; i < expectedTokens.Length; i++)
+            {
+                var actualToken = tokenizer.Next();
+                if (actualToken == JsonToken.EndDocument)
+                {
+                    Assert.Fail("Expected {0} but reached end of document", expectedTokens[i]);
+                }
+                Assert.AreEqual(expectedTokens[i], actualToken);
+            }
+            Assert.Throws<InvalidJsonException>(() => tokenizer.Next());
+        }
+    }
+}

+ 21 - 24
csharp/src/Google.Protobuf.Test/Reflection/DescriptorsTest.cs

@@ -46,7 +46,7 @@ namespace Google.Protobuf.Reflection
         [Test]
         public void FileDescriptor()
         {
-            FileDescriptor file = UnittestProto3.Descriptor;
+            FileDescriptor file = UnittestProto3Reflection.Descriptor;
 
             Assert.AreEqual("google/protobuf/unittest_proto3.proto", file.Name);
             Assert.AreEqual("protobuf_unittest", file.Package);
@@ -56,14 +56,15 @@ namespace Google.Protobuf.Reflection
 
             // unittest.proto doesn't have any public imports, but unittest_import.proto does.
             Assert.AreEqual(0, file.PublicDependencies.Count);
-            Assert.AreEqual(1, UnittestImportProto3.Descriptor.PublicDependencies.Count);
-            Assert.AreEqual(UnittestImportPublicProto3.Descriptor, UnittestImportProto3.Descriptor.PublicDependencies[0]);
+            Assert.AreEqual(1, UnittestImportProto3Reflection.Descriptor.PublicDependencies.Count);
+            Assert.AreEqual(UnittestImportPublicProto3Reflection.Descriptor, UnittestImportProto3Reflection.Descriptor.PublicDependencies[0]);
 
             Assert.AreEqual(1, file.Dependencies.Count);
-            Assert.AreEqual(UnittestImportProto3.Descriptor, file.Dependencies[0]);
+            Assert.AreEqual(UnittestImportProto3Reflection.Descriptor, file.Dependencies[0]);
 
             MessageDescriptor messageType = TestAllTypes.Descriptor;
-            Assert.AreSame(typeof(TestAllTypes), messageType.GeneratedType);
+            Assert.AreSame(typeof(TestAllTypes), messageType.ClrType);
+            Assert.AreSame(TestAllTypes.Parser, messageType.Parser);
             Assert.AreEqual(messageType, file.MessageTypes[0]);
             Assert.AreEqual(messageType, file.FindTypeByName<MessageDescriptor>("TestAllTypes"));
             Assert.Null(file.FindTypeByName<MessageDescriptor>("NoSuchType"));
@@ -76,8 +77,8 @@ namespace Google.Protobuf.Reflection
             Assert.AreEqual(file.EnumTypes[0], file.FindTypeByName<EnumDescriptor>("ForeignEnum"));
             Assert.Null(file.FindTypeByName<EnumDescriptor>("NoSuchType"));
             Assert.Null(file.FindTypeByName<EnumDescriptor>("protobuf_unittest.ForeignEnum"));
-            Assert.AreEqual(1, UnittestImportProto3.Descriptor.EnumTypes.Count);
-            Assert.AreEqual("ImportEnum", UnittestImportProto3.Descriptor.EnumTypes[0].Name);
+            Assert.AreEqual(1, UnittestImportProto3Reflection.Descriptor.EnumTypes.Count);
+            Assert.AreEqual("ImportEnum", UnittestImportProto3Reflection.Descriptor.EnumTypes[0].Name);
             for (int i = 0; i < file.EnumTypes.Count; i++)
             {
                 Assert.AreEqual(i, file.EnumTypes[i].Index);
@@ -94,7 +95,7 @@ namespace Google.Protobuf.Reflection
 
             Assert.AreEqual("TestAllTypes", messageType.Name);
             Assert.AreEqual("protobuf_unittest.TestAllTypes", messageType.FullName);
-            Assert.AreEqual(UnittestProto3.Descriptor, messageType.File);
+            Assert.AreEqual(UnittestProto3Reflection.Descriptor, messageType.File);
             Assert.IsNull(messageType.ContainingType);
             Assert.IsNull(messageType.Proto.Options);
 
@@ -102,7 +103,7 @@ namespace Google.Protobuf.Reflection
 
             Assert.AreEqual("NestedMessage", nestedType.Name);
             Assert.AreEqual("protobuf_unittest.TestAllTypes.NestedMessage", nestedType.FullName);
-            Assert.AreEqual(UnittestProto3.Descriptor, nestedType.File);
+            Assert.AreEqual(UnittestProto3Reflection.Descriptor, nestedType.File);
             Assert.AreEqual(messageType, nestedType.ContainingType);
 
             FieldDescriptor field = messageType.Fields.InDeclarationOrder()[0];
@@ -146,7 +147,7 @@ namespace Google.Protobuf.Reflection
                             primitiveField.FullName);
             Assert.AreEqual(1, primitiveField.FieldNumber);
             Assert.AreEqual(messageType, primitiveField.ContainingType);
-            Assert.AreEqual(UnittestProto3.Descriptor, primitiveField.File);
+            Assert.AreEqual(UnittestProto3Reflection.Descriptor, primitiveField.File);
             Assert.AreEqual(FieldType.Int32, primitiveField.FieldType);
             Assert.IsNull(primitiveField.Proto.Options);
             
@@ -175,26 +176,26 @@ namespace Google.Protobuf.Reflection
         public void EnumDescriptor()
         {
             // Note: this test is a bit different to the Java version because there's no static way of getting to the descriptor
-            EnumDescriptor enumType = UnittestProto3.Descriptor.FindTypeByName<EnumDescriptor>("ForeignEnum");
+            EnumDescriptor enumType = UnittestProto3Reflection.Descriptor.FindTypeByName<EnumDescriptor>("ForeignEnum");
             EnumDescriptor nestedType = TestAllTypes.Descriptor.FindDescriptor<EnumDescriptor>("NestedEnum");
 
             Assert.AreEqual("ForeignEnum", enumType.Name);
             Assert.AreEqual("protobuf_unittest.ForeignEnum", enumType.FullName);
-            Assert.AreEqual(UnittestProto3.Descriptor, enumType.File);
+            Assert.AreEqual(UnittestProto3Reflection.Descriptor, enumType.File);
             Assert.Null(enumType.ContainingType);
             Assert.Null(enumType.Proto.Options);
 
             Assert.AreEqual("NestedEnum", nestedType.Name);
             Assert.AreEqual("protobuf_unittest.TestAllTypes.NestedEnum",
                             nestedType.FullName);
-            Assert.AreEqual(UnittestProto3.Descriptor, nestedType.File);
+            Assert.AreEqual(UnittestProto3Reflection.Descriptor, nestedType.File);
             Assert.AreEqual(TestAllTypes.Descriptor, nestedType.ContainingType);
 
             EnumValueDescriptor value = enumType.FindValueByName("FOREIGN_FOO");
             Assert.AreEqual(value, enumType.Values[1]);
             Assert.AreEqual("FOREIGN_FOO", value.Name);
             Assert.AreEqual(4, value.Number);
-            Assert.AreEqual((int) ForeignEnum.FOREIGN_FOO, value.Number);
+            Assert.AreEqual((int) ForeignEnum.ForeignFoo, value.Number);
             Assert.AreEqual(value, enumType.FindValueByNumber(4));
             Assert.Null(enumType.FindValueByName("NO_SUCH_VALUE"));
             for (int i = 0; i < enumType.Values.Count; i++)
@@ -226,17 +227,12 @@ namespace Google.Protobuf.Reflection
         }
 
         [Test]
-        public void ConstructionWithoutGeneratedCodeInfo()
+        public void MapEntryMessageDescriptor()
         {
-            var data = UnittestIssues.Descriptor.Proto.ToByteArray();
-            var newDescriptor = Google.Protobuf.Reflection.FileDescriptor.InternalBuildGeneratedFileFrom(data, new Reflection.FileDescriptor[] { }, null);
-
-            // We should still be able to get at a field...
-            var messageDescriptor = newDescriptor.FindTypeByName<MessageDescriptor>("ItemField");
-            var fieldDescriptor = messageDescriptor.FindFieldByName("item");
-            // But there shouldn't be an accessor (or a generated type for the message)
-            Assert.IsNull(fieldDescriptor.Accessor);
-            Assert.IsNull(messageDescriptor.GeneratedType);
+            var descriptor = MapWellKnownTypes.Descriptor.NestedTypes[0];
+            Assert.IsNull(descriptor.Parser);
+            Assert.IsNull(descriptor.ClrType);
+            Assert.IsNull(descriptor.Fields[1].Accessor);
         }
 
         // From TestFieldOrdering:
@@ -257,6 +253,7 @@ namespace Google.Protobuf.Reflection
         public void DescriptorProtoFileDescriptor()
         {
             var descriptor = Google.Protobuf.Reflection.FileDescriptor.DescriptorProtoFileDescriptor;
+            Assert.AreEqual("google/protobuf/descriptor.proto", descriptor.Name);
         }
     }
 }

+ 2 - 2
csharp/src/Google.Protobuf.Test/Reflection/FieldAccessTest.cs

@@ -128,7 +128,7 @@ namespace Google.Protobuf.Reflection
             fields[TestAllTypes.SingleInt32FieldNumber].Accessor.SetValue(message, 500);
             fields[TestAllTypes.SingleStringFieldNumber].Accessor.SetValue(message, "It's a string");
             fields[TestAllTypes.SingleBytesFieldNumber].Accessor.SetValue(message, ByteString.CopyFrom(99, 98, 97));
-            fields[TestAllTypes.SingleForeignEnumFieldNumber].Accessor.SetValue(message, ForeignEnum.FOREIGN_FOO);
+            fields[TestAllTypes.SingleForeignEnumFieldNumber].Accessor.SetValue(message, ForeignEnum.ForeignFoo);
             fields[TestAllTypes.SingleForeignMessageFieldNumber].Accessor.SetValue(message, new ForeignMessage { C = 12345 });
             fields[TestAllTypes.SingleDoubleFieldNumber].Accessor.SetValue(message, 20150701.5);
 
@@ -138,7 +138,7 @@ namespace Google.Protobuf.Reflection
                 SingleInt32 = 500,
                 SingleString = "It's a string",
                 SingleBytes = ByteString.CopyFrom(99, 98, 97),
-                SingleForeignEnum = ForeignEnum.FOREIGN_FOO,
+                SingleForeignEnum = ForeignEnum.ForeignFoo,
                 SingleForeignMessage = new ForeignMessage { C = 12345 },
                 SingleDouble = 20150701.5
             };

+ 94 - 0
csharp/src/Google.Protobuf.Test/Reflection/TypeRegistryTest.cs

@@ -0,0 +1,94 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2015 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+using Google.Protobuf.TestProtos;
+using Google.Protobuf.WellKnownTypes;
+using NUnit.Framework;
+
+namespace Google.Protobuf.Reflection
+{
+    public class TypeRegistryTest
+    {
+        // Most of our tests use messages. Simple test that we really can use files...
+        [Test]
+        public void CreateWithFileDescriptor()
+        {
+            var registry = TypeRegistry.FromFiles(DurationReflection.Descriptor, StructReflection.Descriptor);
+            AssertDescriptorPresent(registry, Duration.Descriptor);
+            AssertDescriptorPresent(registry, ListValue.Descriptor);
+            AssertDescriptorAbsent(registry, Timestamp.Descriptor);
+        }
+
+        [Test]
+        public void TypesFromSameFile()
+        {
+            // Just for kicks, let's start with a nested type
+            var registry = TypeRegistry.FromMessages(TestAllTypes.Types.NestedMessage.Descriptor);
+            // Top-level...
+            AssertDescriptorPresent(registry, TestFieldOrderings.Descriptor);
+            // ... and nested (not the same as the original NestedMessage!)
+            AssertDescriptorPresent(registry, TestFieldOrderings.Types.NestedMessage.Descriptor);
+        }
+
+        [Test]
+        public void DependenciesAreIncluded()
+        {
+            var registry = TypeRegistry.FromMessages(TestAllTypes.Descriptor);
+            // Direct dependencies
+            AssertDescriptorPresent(registry, ImportMessage.Descriptor);
+            // Public dependencies
+            AssertDescriptorPresent(registry, PublicImportMessage.Descriptor);
+        }
+
+        [Test]
+        public void DuplicateFiles()
+        {
+            // Duplicates via dependencies and simply via repetition
+            var registry = TypeRegistry.FromFiles(
+                UnittestProto3Reflection.Descriptor, UnittestImportProto3Reflection.Descriptor,
+                TimestampReflection.Descriptor, TimestampReflection.Descriptor);
+            AssertDescriptorPresent(registry, TestAllTypes.Descriptor);
+            AssertDescriptorPresent(registry, ImportMessage.Descriptor);
+            AssertDescriptorPresent(registry, Timestamp.Descriptor);
+        }
+
+        private static void AssertDescriptorPresent(TypeRegistry registry, MessageDescriptor descriptor)
+        {
+            Assert.AreSame(descriptor, registry.Find(descriptor.FullName));
+        }
+
+        private static void AssertDescriptorAbsent(TypeRegistry registry, MessageDescriptor descriptor)
+        {
+            Assert.IsNull(registry.Find(descriptor.FullName));
+        }
+    }
+}

+ 8 - 8
csharp/src/Google.Protobuf.Test/SampleMessages.cs

@@ -54,13 +54,13 @@ namespace Google.Protobuf
                 SingleFixed32 = 23,
                 SingleFixed64 = 1234567890123,
                 SingleFloat = 12.25f,
-                SingleForeignEnum = ForeignEnum.FOREIGN_BAR,
+                SingleForeignEnum = ForeignEnum.ForeignBar,
                 SingleForeignMessage = new ForeignMessage { C = 10 },
-                SingleImportEnum = ImportEnum.IMPORT_BAZ,
+                SingleImportEnum = ImportEnum.ImportBaz,
                 SingleImportMessage = new ImportMessage { D = 20 },
                 SingleInt32 = 100,
                 SingleInt64 = 3210987654321,
-                SingleNestedEnum = TestAllTypes.Types.NestedEnum.FOO,
+                SingleNestedEnum = TestAllTypes.Types.NestedEnum.Foo,
                 SingleNestedMessage = new TestAllTypes.Types.NestedMessage { Bb = 35 },
                 SinglePublicImportMessage = new PublicImportMessage { E = 54 },
                 SingleSfixed32 = -123,
@@ -76,13 +76,13 @@ namespace Google.Protobuf
                 RepeatedFixed32 = { UInt32.MaxValue, 23 },
                 RepeatedFixed64 = { UInt64.MaxValue, 1234567890123 },
                 RepeatedFloat = { 100f, 12.25f },
-                RepeatedForeignEnum = { ForeignEnum.FOREIGN_FOO, ForeignEnum.FOREIGN_BAR },
+                RepeatedForeignEnum = { ForeignEnum.ForeignFoo, ForeignEnum.ForeignBar },
                 RepeatedForeignMessage = { new ForeignMessage(), new ForeignMessage { C = 10 } },
-                RepeatedImportEnum = { ImportEnum.IMPORT_BAZ, ImportEnum.IMPORT_ENUM_UNSPECIFIED },
+                RepeatedImportEnum = { ImportEnum.ImportBaz, ImportEnum.Unspecified },
                 RepeatedImportMessage = { new ImportMessage { D = 20 }, new ImportMessage { D = 25 } },
                 RepeatedInt32 = { 100, 200 },
                 RepeatedInt64 = { 3210987654321, Int64.MaxValue },
-                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.FOO, TestAllTypes.Types.NestedEnum.NEG },
+                RepeatedNestedEnum = { TestAllTypes.Types.NestedEnum.Foo, TestAllTypes.Types.NestedEnum.Neg },
                 RepeatedNestedMessage = { new TestAllTypes.Types.NestedMessage { Bb = 35 }, new TestAllTypes.Types.NestedMessage { Bb = 10 } },
                 RepeatedPublicImportMessage = { new PublicImportMessage { E = 54 }, new PublicImportMessage { E = -1 } },
                 RepeatedSfixed32 = { -123, 123 },
@@ -92,8 +92,8 @@ namespace Google.Protobuf
                 RepeatedString = { "foo", "bar" },
                 RepeatedUint32 = { UInt32.MaxValue, UInt32.MinValue },
                 RepeatedUint64 = { UInt64.MaxValue, UInt32.MinValue },
-                OneofString = "Oneof string"                
+                OneofString = "Oneof string"
             };
         }
     }
-}
+}

+ 2 - 2
csharp/src/Google.Protobuf.Test/TestCornerCases.cs

@@ -43,8 +43,8 @@ namespace Google.Protobuf
             NegativeEnumMessage msg = new NegativeEnumMessage
             {
                 Value = NegativeEnum.MinusOne,
-                Values = { NegativeEnum.NEGATIVE_ENUM_ZERO, NegativeEnum.MinusOne, NegativeEnum.FiveBelow },
-                PackedValues = { NegativeEnum.NEGATIVE_ENUM_ZERO, NegativeEnum.MinusOne, NegativeEnum.FiveBelow }
+                Values = { NegativeEnum.Zero, NegativeEnum.MinusOne, NegativeEnum.FiveBelow },
+                PackedValues = { NegativeEnum.Zero, NegativeEnum.MinusOne, NegativeEnum.FiveBelow }
             };
 
             Assert.AreEqual(58, msg.CalculateSize());

+ 45 - 0
csharp/src/Google.Protobuf.Test/TestProtos/ForeignMessagePartial.cs

@@ -0,0 +1,45 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2016 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+namespace Google.Protobuf.TestProtos
+{
+    /// <summary>
+    /// A message with custom diagnostics (to test that they work).
+    /// </summary>
+    public partial class ForeignMessage : ICustomDiagnosticMessage
+    {
+        public string ToDiagnosticString()
+        {
+            return $"{{ \"c\": {C}, \"@cInHex\": \"{C:x}\" }}";
+        }
+    }
+}

+ 30 - 30
csharp/src/Google.Protobuf.Test/TestProtos/MapUnittestProto3.cs

@@ -11,7 +11,7 @@ namespace Google.Protobuf.TestProtos {
 
   /// <summary>Holder for reflection information generated from google/protobuf/map_unittest_proto3.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class MapUnittestProto3 {
+  public static partial class MapUnittestProto3Reflection {
 
     #region Descriptor
     /// <summary>File descriptor for google/protobuf/map_unittest_proto3.proto</summary>
@@ -20,7 +20,7 @@ namespace Google.Protobuf.TestProtos {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static MapUnittestProto3() {
+    static MapUnittestProto3Reflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "Cilnb29nbGUvcHJvdG9idWYvbWFwX3VuaXR0ZXN0X3Byb3RvMy5wcm90bxIR",
@@ -147,16 +147,16 @@ namespace Google.Protobuf.TestProtos {
             "dmFsdWUYAiABKAU6AjgBKj8KB01hcEVudW0SEAoMTUFQX0VOVU1fRk9PEAAS",
             "EAoMTUFQX0VOVU1fQkFSEAESEAoMTUFQX0VOVU1fQkFaEAJCIPgBAaoCGkdv",
             "b2dsZS5Qcm90b2J1Zi5UZXN0UHJvdG9zYgZwcm90bzM="));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
-          new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestProto3.Descriptor, },
-          new pbr::GeneratedCodeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.MapEnum), }, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestMap), new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapStringString", "MapInt32Bytes", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedCodeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestMapSubmessage), new[]{ "TestMap" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestMessageMap), new[]{ "MapInt32Message" }, null, null, new pbr::GeneratedCodeInfo[] { null, }),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestSameTypeMap), new[]{ "Map1", "Map2" }, null, null, new pbr::GeneratedCodeInfo[] { null, null, }),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestArenaMap), new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedCodeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType), new[]{ "Type" }, null, new[]{ typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Types.Type) }, new pbr::GeneratedCodeInfo[] { null, }),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry), new[]{ "Entry" }, null, null, new pbr::GeneratedCodeInfo[] { null, })
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
+          new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestProto3Reflection.Descriptor, },
+          new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.MapEnum), }, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMap), global::Google.Protobuf.TestProtos.TestMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapStringString", "MapInt32Bytes", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMapSubmessage), global::Google.Protobuf.TestProtos.TestMapSubmessage.Parser, new[]{ "TestMap" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestMessageMap), global::Google.Protobuf.TestProtos.TestMessageMap.Parser, new[]{ "MapInt32Message" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, }),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestSameTypeMap), global::Google.Protobuf.TestProtos.TestSameTypeMap.Parser, new[]{ "Map1", "Map2" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, }),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestArenaMap), global::Google.Protobuf.TestProtos.TestArenaMap.Parser, new[]{ "MapInt32Int32", "MapInt64Int64", "MapUint32Uint32", "MapUint64Uint64", "MapSint32Sint32", "MapSint64Sint64", "MapFixed32Fixed32", "MapFixed64Fixed64", "MapSfixed32Sfixed32", "MapSfixed64Sfixed64", "MapInt32Float", "MapInt32Double", "MapBoolBool", "MapInt32Enum", "MapInt32ForeignMessage" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, }),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType), global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Parser, new[]{ "Type" }, null, new[]{ typeof(global::Google.Protobuf.TestProtos.MessageContainingEnumCalledType.Types.Type) }, new pbr::GeneratedClrTypeInfo[] { null, }),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry), global::Google.Protobuf.TestProtos.MessageContainingMapCalledEntry.Parser, new[]{ "Entry" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, })
           }));
     }
     #endregion
@@ -164,9 +164,9 @@ namespace Google.Protobuf.TestProtos {
   }
   #region Enums
   public enum MapEnum {
-    MAP_ENUM_FOO = 0,
-    MAP_ENUM_BAR = 1,
-    MAP_ENUM_BAZ = 2,
+    [pbr::OriginalName("MAP_ENUM_FOO")] Foo = 0,
+    [pbr::OriginalName("MAP_ENUM_BAR")] Bar = 1,
+    [pbr::OriginalName("MAP_ENUM_BAZ")] Baz = 2,
   }
 
   #endregion
@@ -181,7 +181,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestMap> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[0]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -425,7 +425,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -580,7 +580,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestMapSubmessage> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[1]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[1]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -633,7 +633,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -689,7 +689,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestMessageMap> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[2]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[2]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -741,7 +741,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -787,7 +787,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestSameTypeMap> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[3]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[3]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -851,7 +851,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -901,7 +901,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestArenaMap> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[4]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[4]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1121,7 +1121,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1266,7 +1266,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<MessageContainingEnumCalledType> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[5]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[5]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1318,7 +1318,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1358,7 +1358,7 @@ namespace Google.Protobuf.TestProtos {
     [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
     public static partial class Types {
       public enum Type {
-        TYPE_FOO = 0,
+        [pbr::OriginalName("TYPE_FOO")] Foo = 0,
       }
 
     }
@@ -1375,7 +1375,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<MessageContainingMapCalledEntry> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3.Descriptor.MessageTypes[6]; }
+      get { return global::Google.Protobuf.TestProtos.MapUnittestProto3Reflection.Descriptor.MessageTypes[6]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1427,7 +1427,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {

+ 12 - 12
csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportProto3.cs

@@ -11,7 +11,7 @@ namespace Google.Protobuf.TestProtos {
 
   /// <summary>Holder for reflection information generated from google/protobuf/unittest_import_proto3.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class UnittestImportProto3 {
+  public static partial class UnittestImportProto3Reflection {
 
     #region Descriptor
     /// <summary>File descriptor for google/protobuf/unittest_import_proto3.proto</summary>
@@ -20,7 +20,7 @@ namespace Google.Protobuf.TestProtos {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static UnittestImportProto3() {
+    static UnittestImportProto3Reflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "Cixnb29nbGUvcHJvdG9idWYvdW5pdHRlc3RfaW1wb3J0X3Byb3RvMy5wcm90",
@@ -31,10 +31,10 @@ namespace Google.Protobuf.TestProtos {
             "UhAIEg4KCklNUE9SVF9CQVoQCUI8Chhjb20uZ29vZ2xlLnByb3RvYnVmLnRl",
             "c3RIAfgBAaoCGkdvb2dsZS5Qcm90b2J1Zi5UZXN0UHJvdG9zUABiBnByb3Rv",
             "Mw=="));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
-          new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestImportPublicProto3.Descriptor, },
-          new pbr::GeneratedCodeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.ImportEnum), }, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.ImportMessage), new[]{ "D" }, null, null, null)
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
+          new pbr::FileDescriptor[] { global::Google.Protobuf.TestProtos.UnittestImportPublicProto3Reflection.Descriptor, },
+          new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Google.Protobuf.TestProtos.ImportEnum), }, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.ImportMessage), global::Google.Protobuf.TestProtos.ImportMessage.Parser, new[]{ "D" }, null, null, null)
           }));
     }
     #endregion
@@ -42,10 +42,10 @@ namespace Google.Protobuf.TestProtos {
   }
   #region Enums
   public enum ImportEnum {
-    IMPORT_ENUM_UNSPECIFIED = 0,
-    IMPORT_FOO = 7,
-    IMPORT_BAR = 8,
-    IMPORT_BAZ = 9,
+    [pbr::OriginalName("IMPORT_ENUM_UNSPECIFIED")] Unspecified = 0,
+    [pbr::OriginalName("IMPORT_FOO")] ImportFoo = 7,
+    [pbr::OriginalName("IMPORT_BAR")] ImportBar = 8,
+    [pbr::OriginalName("IMPORT_BAZ")] ImportBaz = 9,
   }
 
   #endregion
@@ -57,7 +57,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<ImportMessage> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestImportProto3.Descriptor.MessageTypes[0]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestImportProto3Reflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -110,7 +110,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {

+ 7 - 7
csharp/src/Google.Protobuf.Test/TestProtos/UnittestImportPublicProto3.cs

@@ -11,7 +11,7 @@ namespace Google.Protobuf.TestProtos {
 
   /// <summary>Holder for reflection information generated from google/protobuf/unittest_import_public_proto3.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class UnittestImportPublicProto3 {
+  public static partial class UnittestImportPublicProto3Reflection {
 
     #region Descriptor
     /// <summary>File descriptor for google/protobuf/unittest_import_public_proto3.proto</summary>
@@ -20,17 +20,17 @@ namespace Google.Protobuf.TestProtos {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static UnittestImportPublicProto3() {
+    static UnittestImportPublicProto3Reflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "CjNnb29nbGUvcHJvdG9idWYvdW5pdHRlc3RfaW1wb3J0X3B1YmxpY19wcm90",
             "bzMucHJvdG8SGHByb3RvYnVmX3VuaXR0ZXN0X2ltcG9ydCIgChNQdWJsaWNJ",
             "bXBvcnRNZXNzYWdlEgkKAWUYASABKAVCNwoYY29tLmdvb2dsZS5wcm90b2J1",
             "Zi50ZXN0qgIaR29vZ2xlLlByb3RvYnVmLlRlc3RQcm90b3NiBnByb3RvMw=="));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
           new pbr::FileDescriptor[] { },
-          new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.PublicImportMessage), new[]{ "E" }, null, null, null)
+          new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.PublicImportMessage), global::Google.Protobuf.TestProtos.PublicImportMessage.Parser, new[]{ "E" }, null, null, null)
           }));
     }
     #endregion
@@ -43,7 +43,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<PublicImportMessage> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestImportPublicProto3.Descriptor.MessageTypes[0]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestImportPublicProto3Reflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -96,7 +96,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {

+ 217 - 50
csharp/src/Google.Protobuf.Test/TestProtos/UnittestIssues.cs

@@ -11,7 +11,7 @@ namespace UnitTest.Issues.TestProtos {
 
   /// <summary>Holder for reflection information generated from unittest_issues.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class UnittestIssues {
+  public static partial class UnittestIssuesReflection {
 
     #region Descriptor
     /// <summary>File descriptor for unittest_issues.proto</summary>
@@ -20,7 +20,7 @@ namespace UnitTest.Issues.TestProtos {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static UnittestIssues() {
+    static UnittestIssuesReflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "ChV1bml0dGVzdF9pc3N1ZXMucHJvdG8SD3VuaXR0ZXN0X2lzc3VlcyInCghJ",
@@ -42,20 +42,23 @@ namespace UnitTest.Issues.TestProtos {
             "CgtwbGFpbl9pbnQzMhgEIAEoBRITCglvMV9zdHJpbmcYAiABKAlIABISCghv",
             "MV9pbnQzMhgFIAEoBUgAEhQKDHBsYWluX3N0cmluZxgBIAEoCRISCghvMl9p",
             "bnQzMhgGIAEoBUgBEhMKCW8yX3N0cmluZxgDIAEoCUgBQgQKAm8xQgQKAm8y",
-            "KlUKDE5lZ2F0aXZlRW51bRIWChJORUdBVElWRV9FTlVNX1pFUk8QABIWCglG",
-            "aXZlQmVsb3cQ+///////////ARIVCghNaW51c09uZRD///////////8BKi4K",
-            "DkRlcHJlY2F0ZWRFbnVtEhMKD0RFUFJFQ0FURURfWkVSTxAAEgcKA29uZRAB",
-            "Qh9IAaoCGlVuaXRUZXN0Lklzc3Vlcy5UZXN0UHJvdG9zYgZwcm90bzM="));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
+            "IksKDFRlc3RKc29uTmFtZRIMCgRuYW1lGAEgASgJEhkKC2Rlc2NyaXB0aW9u",
+            "GAIgASgJUgRkZXNjEhIKBGd1aWQYAyABKAlSBGV4aWQqVQoMTmVnYXRpdmVF",
+            "bnVtEhYKEk5FR0FUSVZFX0VOVU1fWkVSTxAAEhYKCUZpdmVCZWxvdxD7////",
+            "//////8BEhUKCE1pbnVzT25lEP///////////wEqLgoORGVwcmVjYXRlZEVu",
+            "dW0SEwoPREVQUkVDQVRFRF9aRVJPEAASBwoDb25lEAFCH0gBqgIaVW5pdFRl",
+            "c3QuSXNzdWVzLlRlc3RQcm90b3NiBnByb3RvMw=="));
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
           new pbr::FileDescriptor[] { },
-          new pbr::GeneratedCodeInfo(new[] {typeof(global::UnitTest.Issues.TestProtos.NegativeEnum), typeof(global::UnitTest.Issues.TestProtos.DeprecatedEnum), }, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307), null, null, null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce), null, null, null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Types.NestedTwice), null, null, null, null)})}),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.NegativeEnumMessage), new[]{ "Value", "Values", "PackedValues" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedChild), null, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedFieldsMessage), new[]{ "PrimitiveValue", "PrimitiveArray", "MessageValue", "MessageArray", "EnumValue", "EnumArray" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.ItemField), new[]{ "Item" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames), new[]{ "Types_", "Descriptor_" }, null, null, new pbr::GeneratedCodeInfo[] { new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames.Types.SomeNestedType), null, null, null, null)}),
-            new pbr::GeneratedCodeInfo(typeof(global::UnitTest.Issues.TestProtos.TestJsonFieldOrdering), new[]{ "PlainInt32", "O1String", "O1Int32", "PlainString", "O2Int32", "O2String" }, new[]{ "O1", "O2" }, null, null)
+          new pbr::GeneratedClrTypeInfo(new[] {typeof(global::UnitTest.Issues.TestProtos.NegativeEnum), typeof(global::UnitTest.Issues.TestProtos.DeprecatedEnum), }, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307), global::UnitTest.Issues.TestProtos.Issue307.Parser, null, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce), global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Parser, null, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Types.NestedTwice), global::UnitTest.Issues.TestProtos.Issue307.Types.NestedOnce.Types.NestedTwice.Parser, null, null, null, null)})}),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.NegativeEnumMessage), global::UnitTest.Issues.TestProtos.NegativeEnumMessage.Parser, new[]{ "Value", "Values", "PackedValues" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedChild), global::UnitTest.Issues.TestProtos.DeprecatedChild.Parser, null, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.DeprecatedFieldsMessage), global::UnitTest.Issues.TestProtos.DeprecatedFieldsMessage.Parser, new[]{ "PrimitiveValue", "PrimitiveArray", "MessageValue", "MessageArray", "EnumValue", "EnumArray" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ItemField), global::UnitTest.Issues.TestProtos.ItemField.Parser, new[]{ "Item" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames), global::UnitTest.Issues.TestProtos.ReservedNames.Parser, new[]{ "Types_", "Descriptor_" }, null, null, new pbr::GeneratedClrTypeInfo[] { new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.ReservedNames.Types.SomeNestedType), global::UnitTest.Issues.TestProtos.ReservedNames.Types.SomeNestedType.Parser, null, null, null, null)}),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.TestJsonFieldOrdering), global::UnitTest.Issues.TestProtos.TestJsonFieldOrdering.Parser, new[]{ "PlainInt32", "O1String", "O1Int32", "PlainString", "O2Int32", "O2String" }, new[]{ "O1", "O2" }, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::UnitTest.Issues.TestProtos.TestJsonName), global::UnitTest.Issues.TestProtos.TestJsonName.Parser, new[]{ "Name", "Description", "Guid" }, null, null, null)
           }));
     }
     #endregion
@@ -63,14 +66,14 @@ namespace UnitTest.Issues.TestProtos {
   }
   #region Enums
   public enum NegativeEnum {
-    NEGATIVE_ENUM_ZERO = 0,
-    FiveBelow = -5,
-    MinusOne = -1,
+    [pbr::OriginalName("NEGATIVE_ENUM_ZERO")] Zero = 0,
+    [pbr::OriginalName("FiveBelow")] FiveBelow = -5,
+    [pbr::OriginalName("MinusOne")] MinusOne = -1,
   }
 
   public enum DeprecatedEnum {
-    DEPRECATED_ZERO = 0,
-    one = 1,
+    [pbr::OriginalName("DEPRECATED_ZERO")] DeprecatedZero = 0,
+    [pbr::OriginalName("one")] One = 1,
   }
 
   #endregion
@@ -86,7 +89,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<Issue307> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[0]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -126,7 +129,7 @@ namespace UnitTest.Issues.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -204,7 +207,7 @@ namespace UnitTest.Issues.TestProtos {
         }
 
         public override string ToString() {
-          return pb::JsonFormatter.Default.Format(this);
+          return pb::JsonFormatter.ToDiagnosticString(this);
         }
 
         public void WriteTo(pb::CodedOutputStream output) {
@@ -282,7 +285,7 @@ namespace UnitTest.Issues.TestProtos {
             }
 
             public override string ToString() {
-              return pb::JsonFormatter.Default.Format(this);
+              return pb::JsonFormatter.ToDiagnosticString(this);
             }
 
             public void WriteTo(pb::CodedOutputStream output) {
@@ -328,7 +331,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<NegativeEnumMessage> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[1]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[1]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -353,7 +356,7 @@ namespace UnitTest.Issues.TestProtos {
 
     /// <summary>Field number for the "value" field.</summary>
     public const int ValueFieldNumber = 1;
-    private global::UnitTest.Issues.TestProtos.NegativeEnum value_ = global::UnitTest.Issues.TestProtos.NegativeEnum.NEGATIVE_ENUM_ZERO;
+    private global::UnitTest.Issues.TestProtos.NegativeEnum value_ = 0;
     public global::UnitTest.Issues.TestProtos.NegativeEnum Value {
       get { return value_; }
       set {
@@ -398,18 +401,18 @@ namespace UnitTest.Issues.TestProtos {
 
     public override int GetHashCode() {
       int hash = 1;
-      if (Value != global::UnitTest.Issues.TestProtos.NegativeEnum.NEGATIVE_ENUM_ZERO) hash ^= Value.GetHashCode();
+      if (Value != 0) hash ^= Value.GetHashCode();
       hash ^= values_.GetHashCode();
       hash ^= packedValues_.GetHashCode();
       return hash;
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
-      if (Value != global::UnitTest.Issues.TestProtos.NegativeEnum.NEGATIVE_ENUM_ZERO) {
+      if (Value != 0) {
         output.WriteRawTag(8);
         output.WriteEnum((int) Value);
       }
@@ -419,7 +422,7 @@ namespace UnitTest.Issues.TestProtos {
 
     public int CalculateSize() {
       int size = 0;
-      if (Value != global::UnitTest.Issues.TestProtos.NegativeEnum.NEGATIVE_ENUM_ZERO) {
+      if (Value != 0) {
         size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Value);
       }
       size += values_.CalculateSize(_repeated_values_codec);
@@ -431,7 +434,7 @@ namespace UnitTest.Issues.TestProtos {
       if (other == null) {
         return;
       }
-      if (other.Value != global::UnitTest.Issues.TestProtos.NegativeEnum.NEGATIVE_ENUM_ZERO) {
+      if (other.Value != 0) {
         Value = other.Value;
       }
       values_.Add(other.values_);
@@ -471,7 +474,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<DeprecatedChild> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[2]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[2]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -511,7 +514,7 @@ namespace UnitTest.Issues.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -547,7 +550,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<DeprecatedFieldsMessage> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[3]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[3]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -617,7 +620,7 @@ namespace UnitTest.Issues.TestProtos {
 
     /// <summary>Field number for the "EnumValue" field.</summary>
     public const int EnumValueFieldNumber = 5;
-    private global::UnitTest.Issues.TestProtos.DeprecatedEnum enumValue_ = global::UnitTest.Issues.TestProtos.DeprecatedEnum.DEPRECATED_ZERO;
+    private global::UnitTest.Issues.TestProtos.DeprecatedEnum enumValue_ = 0;
     [global::System.ObsoleteAttribute()]
     public global::UnitTest.Issues.TestProtos.DeprecatedEnum EnumValue {
       get { return enumValue_; }
@@ -662,13 +665,13 @@ namespace UnitTest.Issues.TestProtos {
       hash ^= primitiveArray_.GetHashCode();
       if (messageValue_ != null) hash ^= MessageValue.GetHashCode();
       hash ^= messageArray_.GetHashCode();
-      if (EnumValue != global::UnitTest.Issues.TestProtos.DeprecatedEnum.DEPRECATED_ZERO) hash ^= EnumValue.GetHashCode();
+      if (EnumValue != 0) hash ^= EnumValue.GetHashCode();
       hash ^= enumArray_.GetHashCode();
       return hash;
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -682,7 +685,7 @@ namespace UnitTest.Issues.TestProtos {
         output.WriteMessage(MessageValue);
       }
       messageArray_.WriteTo(output, _repeated_messageArray_codec);
-      if (EnumValue != global::UnitTest.Issues.TestProtos.DeprecatedEnum.DEPRECATED_ZERO) {
+      if (EnumValue != 0) {
         output.WriteRawTag(40);
         output.WriteEnum((int) EnumValue);
       }
@@ -699,7 +702,7 @@ namespace UnitTest.Issues.TestProtos {
         size += 1 + pb::CodedOutputStream.ComputeMessageSize(MessageValue);
       }
       size += messageArray_.CalculateSize(_repeated_messageArray_codec);
-      if (EnumValue != global::UnitTest.Issues.TestProtos.DeprecatedEnum.DEPRECATED_ZERO) {
+      if (EnumValue != 0) {
         size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) EnumValue);
       }
       size += enumArray_.CalculateSize(_repeated_enumArray_codec);
@@ -721,7 +724,7 @@ namespace UnitTest.Issues.TestProtos {
         MessageValue.MergeFrom(other.MessageValue);
       }
       messageArray_.Add(other.messageArray_);
-      if (other.EnumValue != global::UnitTest.Issues.TestProtos.DeprecatedEnum.DEPRECATED_ZERO) {
+      if (other.EnumValue != 0) {
         EnumValue = other.EnumValue;
       }
       enumArray_.Add(other.enumArray_);
@@ -778,7 +781,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<ItemField> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[4]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[4]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -831,7 +834,7 @@ namespace UnitTest.Issues.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -881,7 +884,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<ReservedNames> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[5]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[5]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -947,7 +950,7 @@ namespace UnitTest.Issues.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1056,7 +1059,7 @@ namespace UnitTest.Issues.TestProtos {
         }
 
         public override string ToString() {
-          return pb::JsonFormatter.Default.Format(this);
+          return pb::JsonFormatter.ToDiagnosticString(this);
         }
 
         public void WriteTo(pb::CodedOutputStream output) {
@@ -1109,7 +1112,7 @@ namespace UnitTest.Issues.TestProtos {
     public static pb::MessageParser<TestJsonFieldOrdering> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::UnitTest.Issues.TestProtos.UnittestIssues.Descriptor.MessageTypes[6]; }
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[6]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1164,7 +1167,7 @@ namespace UnitTest.Issues.TestProtos {
     public string O1String {
       get { return o1Case_ == O1OneofCase.O1String ? (string) o1_ : ""; }
       set {
-        o1_ = pb::Preconditions.CheckNotNull(value, "value");
+        o1_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
         o1Case_ = O1OneofCase.O1String;
       }
     }
@@ -1185,7 +1188,7 @@ namespace UnitTest.Issues.TestProtos {
     public string PlainString {
       get { return plainString_; }
       set {
-        plainString_ = pb::Preconditions.CheckNotNull(value, "value");
+        plainString_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
       }
     }
 
@@ -1204,7 +1207,7 @@ namespace UnitTest.Issues.TestProtos {
     public string O2String {
       get { return o2Case_ == O2OneofCase.O2String ? (string) o2_ : ""; }
       set {
-        o2_ = pb::Preconditions.CheckNotNull(value, "value");
+        o2_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
         o2Case_ = O2OneofCase.O2String;
       }
     }
@@ -1260,6 +1263,8 @@ namespace UnitTest.Issues.TestProtos {
       if (PlainString != other.PlainString) return false;
       if (O2Int32 != other.O2Int32) return false;
       if (O2String != other.O2String) return false;
+      if (O1Case != other.O1Case) return false;
+      if (O2Case != other.O2Case) return false;
       return true;
     }
 
@@ -1271,11 +1276,13 @@ namespace UnitTest.Issues.TestProtos {
       if (PlainString.Length != 0) hash ^= PlainString.GetHashCode();
       if (o2Case_ == O2OneofCase.O2Int32) hash ^= O2Int32.GetHashCode();
       if (o2Case_ == O2OneofCase.O2String) hash ^= O2String.GetHashCode();
+      hash ^= (int) o1Case_;
+      hash ^= (int) o2Case_;
       return hash;
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1395,6 +1402,166 @@ namespace UnitTest.Issues.TestProtos {
 
   }
 
+  [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+  public sealed partial class TestJsonName : pb::IMessage<TestJsonName> {
+    private static readonly pb::MessageParser<TestJsonName> _parser = new pb::MessageParser<TestJsonName>(() => new TestJsonName());
+    public static pb::MessageParser<TestJsonName> Parser { get { return _parser; } }
+
+    public static pbr::MessageDescriptor Descriptor {
+      get { return global::UnitTest.Issues.TestProtos.UnittestIssuesReflection.Descriptor.MessageTypes[7]; }
+    }
+
+    pbr::MessageDescriptor pb::IMessage.Descriptor {
+      get { return Descriptor; }
+    }
+
+    public TestJsonName() {
+      OnConstruction();
+    }
+
+    partial void OnConstruction();
+
+    public TestJsonName(TestJsonName other) : this() {
+      name_ = other.name_;
+      description_ = other.description_;
+      guid_ = other.guid_;
+    }
+
+    public TestJsonName Clone() {
+      return new TestJsonName(this);
+    }
+
+    /// <summary>Field number for the "name" field.</summary>
+    public const int NameFieldNumber = 1;
+    private string name_ = "";
+    /// <summary>
+    ///  Message for testing the effects for of the json_name option
+    /// </summary>
+    public string Name {
+      get { return name_; }
+      set {
+        name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
+      }
+    }
+
+    /// <summary>Field number for the "description" field.</summary>
+    public const int DescriptionFieldNumber = 2;
+    private string description_ = "";
+    public string Description {
+      get { return description_; }
+      set {
+        description_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
+      }
+    }
+
+    /// <summary>Field number for the "guid" field.</summary>
+    public const int GuidFieldNumber = 3;
+    private string guid_ = "";
+    public string Guid {
+      get { return guid_; }
+      set {
+        guid_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
+      }
+    }
+
+    public override bool Equals(object other) {
+      return Equals(other as TestJsonName);
+    }
+
+    public bool Equals(TestJsonName other) {
+      if (ReferenceEquals(other, null)) {
+        return false;
+      }
+      if (ReferenceEquals(other, this)) {
+        return true;
+      }
+      if (Name != other.Name) return false;
+      if (Description != other.Description) return false;
+      if (Guid != other.Guid) return false;
+      return true;
+    }
+
+    public override int GetHashCode() {
+      int hash = 1;
+      if (Name.Length != 0) hash ^= Name.GetHashCode();
+      if (Description.Length != 0) hash ^= Description.GetHashCode();
+      if (Guid.Length != 0) hash ^= Guid.GetHashCode();
+      return hash;
+    }
+
+    public override string ToString() {
+      return pb::JsonFormatter.ToDiagnosticString(this);
+    }
+
+    public void WriteTo(pb::CodedOutputStream output) {
+      if (Name.Length != 0) {
+        output.WriteRawTag(10);
+        output.WriteString(Name);
+      }
+      if (Description.Length != 0) {
+        output.WriteRawTag(18);
+        output.WriteString(Description);
+      }
+      if (Guid.Length != 0) {
+        output.WriteRawTag(26);
+        output.WriteString(Guid);
+      }
+    }
+
+    public int CalculateSize() {
+      int size = 0;
+      if (Name.Length != 0) {
+        size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
+      }
+      if (Description.Length != 0) {
+        size += 1 + pb::CodedOutputStream.ComputeStringSize(Description);
+      }
+      if (Guid.Length != 0) {
+        size += 1 + pb::CodedOutputStream.ComputeStringSize(Guid);
+      }
+      return size;
+    }
+
+    public void MergeFrom(TestJsonName other) {
+      if (other == null) {
+        return;
+      }
+      if (other.Name.Length != 0) {
+        Name = other.Name;
+      }
+      if (other.Description.Length != 0) {
+        Description = other.Description;
+      }
+      if (other.Guid.Length != 0) {
+        Guid = other.Guid;
+      }
+    }
+
+    public void MergeFrom(pb::CodedInputStream input) {
+      uint tag;
+      while ((tag = input.ReadTag()) != 0) {
+        switch(tag) {
+          default:
+            input.SkipLastField();
+            break;
+          case 10: {
+            Name = input.ReadString();
+            break;
+          }
+          case 18: {
+            Description = input.ReadString();
+            break;
+          }
+          case 26: {
+            Guid = input.ReadString();
+            break;
+          }
+        }
+      }
+    }
+
+  }
+
   #endregion
 
 }

Файловите разлики са ограничени, защото са твърде много
+ 159 - 155
csharp/src/Google.Protobuf.Test/TestProtos/UnittestProto3.cs


+ 67 - 28
csharp/src/Google.Protobuf.Test/TestProtos/UnittestWellKnownTypes.cs

@@ -11,7 +11,7 @@ namespace Google.Protobuf.TestProtos {
 
   /// <summary>Holder for reflection information generated from google/protobuf/unittest_well_known_types.proto</summary>
   [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
-  public static partial class UnittestWellKnownTypes {
+  public static partial class UnittestWellKnownTypesReflection {
 
     #region Descriptor
     /// <summary>File descriptor for google/protobuf/unittest_well_known_types.proto</summary>
@@ -20,7 +20,7 @@ namespace Google.Protobuf.TestProtos {
     }
     private static pbr::FileDescriptor descriptor;
 
-    static UnittestWellKnownTypes() {
+    static UnittestWellKnownTypesReflection() {
       byte[] descriptorData = global::System.Convert.FromBase64String(
           string.Concat(
             "Ci9nb29nbGUvcHJvdG9idWYvdW5pdHRlc3Rfd2VsbF9rbm93bl90eXBlcy5w",
@@ -31,7 +31,7 @@ namespace Google.Protobuf.TestProtos {
             "L3Byb3RvYnVmL3NvdXJjZV9jb250ZXh0LnByb3RvGhxnb29nbGUvcHJvdG9i",
             "dWYvc3RydWN0LnByb3RvGh9nb29nbGUvcHJvdG9idWYvdGltZXN0YW1wLnBy",
             "b3RvGhpnb29nbGUvcHJvdG9idWYvdHlwZS5wcm90bxoeZ29vZ2xlL3Byb3Rv",
-            "YnVmL3dyYXBwZXJzLnByb3RvIpEHChJUZXN0V2VsbEtub3duVHlwZXMSJwoJ",
+            "YnVmL3dyYXBwZXJzLnByb3RvIr4HChJUZXN0V2VsbEtub3duVHlwZXMSJwoJ",
             "YW55X2ZpZWxkGAEgASgLMhQuZ29vZ2xlLnByb3RvYnVmLkFueRInCglhcGlf",
             "ZmllbGQYAiABKAsyFC5nb29nbGUucHJvdG9idWYuQXBpEjEKDmR1cmF0aW9u",
             "X2ZpZWxkGAMgASgLMhkuZ29vZ2xlLnByb3RvYnVmLkR1cmF0aW9uEisKC2Vt",
@@ -51,7 +51,8 @@ namespace Google.Protobuf.TestProtos {
             "cm90b2J1Zi5VSW50MzJWYWx1ZRIuCgpib29sX2ZpZWxkGBAgASgLMhouZ29v",
             "Z2xlLnByb3RvYnVmLkJvb2xWYWx1ZRIyCgxzdHJpbmdfZmllbGQYESABKAsy",
             "HC5nb29nbGUucHJvdG9idWYuU3RyaW5nVmFsdWUSMAoLYnl0ZXNfZmllbGQY",
-            "EiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZSKVBwoWUmVwZWF0",
+            "EiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZRIrCgt2YWx1ZV9m",
+            "aWVsZBgTIAEoCzIWLmdvb2dsZS5wcm90b2J1Zi5WYWx1ZSKVBwoWUmVwZWF0",
             "ZWRXZWxsS25vd25UeXBlcxInCglhbnlfZmllbGQYASADKAsyFC5nb29nbGUu",
             "cHJvdG9idWYuQW55EicKCWFwaV9maWVsZBgCIAMoCzIULmdvb2dsZS5wcm90",
             "b2J1Zi5BcGkSMQoOZHVyYXRpb25fZmllbGQYAyADKAsyGS5nb29nbGUucHJv",
@@ -159,13 +160,13 @@ namespace Google.Protobuf.TestProtos {
             "AiABKAsyGy5nb29nbGUucHJvdG9idWYuQnl0ZXNWYWx1ZToCOAFCOQoYY29t",
             "Lmdvb2dsZS5wcm90b2J1Zi50ZXN0UAGqAhpHb29nbGUuUHJvdG9idWYuVGVz",
             "dFByb3Rvc2IGcHJvdG8z"));
-      descriptor = pbr::FileDescriptor.InternalBuildGeneratedFileFrom(descriptorData,
-          new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.Proto.Any.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Api.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Duration.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Empty.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.FieldMask.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.SourceContext.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Struct.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Timestamp.Descriptor, global::Google.Protobuf.WellKnownTypes.Proto.Type.Descriptor, global::Google.Protobuf.WellKnownTypes.Wrappers.Descriptor, },
-          new pbr::GeneratedCodeInfo(null, new pbr::GeneratedCodeInfo[] {
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.TestWellKnownTypes), new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes), new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.OneofWellKnownTypes), new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, new[]{ "OneofField" }, null, null),
-            new pbr::GeneratedCodeInfo(typeof(global::Google.Protobuf.TestProtos.MapWellKnownTypes), new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, new pbr::GeneratedCodeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, })
+      descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
+          new pbr::FileDescriptor[] { global::Google.Protobuf.WellKnownTypes.AnyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.ApiReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.DurationReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.SourceContextReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.StructReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TimestampReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.TypeReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.WrappersReflection.Descriptor, },
+          new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.TestWellKnownTypes), global::Google.Protobuf.TestProtos.TestWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField", "ValueField" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes), global::Google.Protobuf.TestProtos.RepeatedWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.OneofWellKnownTypes), global::Google.Protobuf.TestProtos.OneofWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, new[]{ "OneofField" }, null, null),
+            new pbr::GeneratedClrTypeInfo(typeof(global::Google.Protobuf.TestProtos.MapWellKnownTypes), global::Google.Protobuf.TestProtos.MapWellKnownTypes.Parser, new[]{ "AnyField", "ApiField", "DurationField", "EmptyField", "FieldMaskField", "SourceContextField", "StructField", "TimestampField", "TypeField", "DoubleField", "FloatField", "Int64Field", "Uint64Field", "Int32Field", "Uint32Field", "BoolField", "StringField", "BytesField" }, null, null, new pbr::GeneratedClrTypeInfo[] { null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, })
           }));
     }
     #endregion
@@ -183,7 +184,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<TestWellKnownTypes> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypes.Descriptor.MessageTypes[0]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[0]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -215,6 +216,7 @@ namespace Google.Protobuf.TestProtos {
       BoolField = other.BoolField;
       StringField = other.StringField;
       BytesField = other.BytesField;
+      ValueField = other.valueField_ != null ? other.ValueField.Clone() : null;
     }
 
     public TestWellKnownTypes Clone() {
@@ -410,6 +412,19 @@ namespace Google.Protobuf.TestProtos {
       }
     }
 
+    /// <summary>Field number for the "value_field" field.</summary>
+    public const int ValueFieldFieldNumber = 19;
+    private global::Google.Protobuf.WellKnownTypes.Value valueField_;
+    /// <summary>
+    ///  Part of struct, but useful to be able to test separately
+    /// </summary>
+    public global::Google.Protobuf.WellKnownTypes.Value ValueField {
+      get { return valueField_; }
+      set {
+        valueField_ = value;
+      }
+    }
+
     public override bool Equals(object other) {
       return Equals(other as TestWellKnownTypes);
     }
@@ -439,6 +454,7 @@ namespace Google.Protobuf.TestProtos {
       if (BoolField != other.BoolField) return false;
       if (StringField != other.StringField) return false;
       if (BytesField != other.BytesField) return false;
+      if (!object.Equals(ValueField, other.ValueField)) return false;
       return true;
     }
 
@@ -462,11 +478,12 @@ namespace Google.Protobuf.TestProtos {
       if (boolField_ != null) hash ^= BoolField.GetHashCode();
       if (stringField_ != null) hash ^= StringField.GetHashCode();
       if (bytesField_ != null) hash ^= BytesField.GetHashCode();
+      if (valueField_ != null) hash ^= ValueField.GetHashCode();
       return hash;
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -533,6 +550,10 @@ namespace Google.Protobuf.TestProtos {
       if (bytesField_ != null) {
         _single_bytesField_codec.WriteTagAndValue(output, BytesField);
       }
+      if (valueField_ != null) {
+        output.WriteRawTag(154, 1);
+        output.WriteMessage(ValueField);
+      }
     }
 
     public int CalculateSize() {
@@ -591,6 +612,9 @@ namespace Google.Protobuf.TestProtos {
       if (bytesField_ != null) {
         size += _single_bytesField_codec.CalculateSizeWithTag(BytesField);
       }
+      if (valueField_ != null) {
+        size += 2 + pb::CodedOutputStream.ComputeMessageSize(ValueField);
+      }
       return size;
     }
 
@@ -697,6 +721,12 @@ namespace Google.Protobuf.TestProtos {
           BytesField = other.BytesField;
         }
       }
+      if (other.valueField_ != null) {
+        if (valueField_ == null) {
+          valueField_ = new global::Google.Protobuf.WellKnownTypes.Value();
+        }
+        ValueField.MergeFrom(other.ValueField);
+      }
     }
 
     public void MergeFrom(pb::CodedInputStream input) {
@@ -832,6 +862,13 @@ namespace Google.Protobuf.TestProtos {
             }
             break;
           }
+          case 154: {
+            if (valueField_ == null) {
+              valueField_ = new global::Google.Protobuf.WellKnownTypes.Value();
+            }
+            input.ReadMessage(valueField_);
+            break;
+          }
         }
       }
     }
@@ -847,7 +884,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<RepeatedWellKnownTypes> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypes.Descriptor.MessageTypes[1]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[1]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1106,7 +1143,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1268,7 +1305,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<OneofWellKnownTypes> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypes.Descriptor.MessageTypes[2]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[2]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -1596,6 +1633,7 @@ namespace Google.Protobuf.TestProtos {
       if (BoolField != other.BoolField) return false;
       if (StringField != other.StringField) return false;
       if (BytesField != other.BytesField) return false;
+      if (OneofFieldCase != other.OneofFieldCase) return false;
       return true;
     }
 
@@ -1619,11 +1657,12 @@ namespace Google.Protobuf.TestProtos {
       if (oneofFieldCase_ == OneofFieldOneofCase.BoolField) hash ^= BoolField.GetHashCode();
       if (oneofFieldCase_ == OneofFieldOneofCase.StringField) hash ^= StringField.GetHashCode();
       if (oneofFieldCase_ == OneofFieldOneofCase.BytesField) hash ^= BytesField.GetHashCode();
+      hash ^= (int) oneofFieldCase_;
       return hash;
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {
@@ -1955,7 +1994,7 @@ namespace Google.Protobuf.TestProtos {
     public static pb::MessageParser<MapWellKnownTypes> Parser { get { return _parser; } }
 
     public static pbr::MessageDescriptor Descriptor {
-      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypes.Descriptor.MessageTypes[3]; }
+      get { return global::Google.Protobuf.TestProtos.UnittestWellKnownTypesReflection.Descriptor.MessageTypes[3]; }
     }
 
     pbr::MessageDescriptor pb::IMessage.Descriptor {
@@ -2078,7 +2117,7 @@ namespace Google.Protobuf.TestProtos {
     public const int DoubleFieldFieldNumber = 10;
     private static readonly pbc::MapField<int, double?>.Codec _map_doubleField_codec
         = new pbc::MapField<int, double?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<double>(18), 82);
-    private readonly pbc::MapField<int, double?> doubleField_ = new pbc::MapField<int, double?>(true);
+    private readonly pbc::MapField<int, double?> doubleField_ = new pbc::MapField<int, double?>();
     public pbc::MapField<int, double?> DoubleField {
       get { return doubleField_; }
     }
@@ -2087,7 +2126,7 @@ namespace Google.Protobuf.TestProtos {
     public const int FloatFieldFieldNumber = 11;
     private static readonly pbc::MapField<int, float?>.Codec _map_floatField_codec
         = new pbc::MapField<int, float?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<float>(18), 90);
-    private readonly pbc::MapField<int, float?> floatField_ = new pbc::MapField<int, float?>(true);
+    private readonly pbc::MapField<int, float?> floatField_ = new pbc::MapField<int, float?>();
     public pbc::MapField<int, float?> FloatField {
       get { return floatField_; }
     }
@@ -2096,7 +2135,7 @@ namespace Google.Protobuf.TestProtos {
     public const int Int64FieldFieldNumber = 12;
     private static readonly pbc::MapField<int, long?>.Codec _map_int64Field_codec
         = new pbc::MapField<int, long?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<long>(18), 98);
-    private readonly pbc::MapField<int, long?> int64Field_ = new pbc::MapField<int, long?>(true);
+    private readonly pbc::MapField<int, long?> int64Field_ = new pbc::MapField<int, long?>();
     public pbc::MapField<int, long?> Int64Field {
       get { return int64Field_; }
     }
@@ -2105,7 +2144,7 @@ namespace Google.Protobuf.TestProtos {
     public const int Uint64FieldFieldNumber = 13;
     private static readonly pbc::MapField<int, ulong?>.Codec _map_uint64Field_codec
         = new pbc::MapField<int, ulong?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<ulong>(18), 106);
-    private readonly pbc::MapField<int, ulong?> uint64Field_ = new pbc::MapField<int, ulong?>(true);
+    private readonly pbc::MapField<int, ulong?> uint64Field_ = new pbc::MapField<int, ulong?>();
     public pbc::MapField<int, ulong?> Uint64Field {
       get { return uint64Field_; }
     }
@@ -2114,7 +2153,7 @@ namespace Google.Protobuf.TestProtos {
     public const int Int32FieldFieldNumber = 14;
     private static readonly pbc::MapField<int, int?>.Codec _map_int32Field_codec
         = new pbc::MapField<int, int?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<int>(18), 114);
-    private readonly pbc::MapField<int, int?> int32Field_ = new pbc::MapField<int, int?>(true);
+    private readonly pbc::MapField<int, int?> int32Field_ = new pbc::MapField<int, int?>();
     public pbc::MapField<int, int?> Int32Field {
       get { return int32Field_; }
     }
@@ -2123,7 +2162,7 @@ namespace Google.Protobuf.TestProtos {
     public const int Uint32FieldFieldNumber = 15;
     private static readonly pbc::MapField<int, uint?>.Codec _map_uint32Field_codec
         = new pbc::MapField<int, uint?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<uint>(18), 122);
-    private readonly pbc::MapField<int, uint?> uint32Field_ = new pbc::MapField<int, uint?>(true);
+    private readonly pbc::MapField<int, uint?> uint32Field_ = new pbc::MapField<int, uint?>();
     public pbc::MapField<int, uint?> Uint32Field {
       get { return uint32Field_; }
     }
@@ -2132,7 +2171,7 @@ namespace Google.Protobuf.TestProtos {
     public const int BoolFieldFieldNumber = 16;
     private static readonly pbc::MapField<int, bool?>.Codec _map_boolField_codec
         = new pbc::MapField<int, bool?>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForStructWrapper<bool>(18), 130);
-    private readonly pbc::MapField<int, bool?> boolField_ = new pbc::MapField<int, bool?>(true);
+    private readonly pbc::MapField<int, bool?> boolField_ = new pbc::MapField<int, bool?>();
     public pbc::MapField<int, bool?> BoolField {
       get { return boolField_; }
     }
@@ -2141,7 +2180,7 @@ namespace Google.Protobuf.TestProtos {
     public const int StringFieldFieldNumber = 17;
     private static readonly pbc::MapField<int, string>.Codec _map_stringField_codec
         = new pbc::MapField<int, string>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForClassWrapper<string>(18), 138);
-    private readonly pbc::MapField<int, string> stringField_ = new pbc::MapField<int, string>(true);
+    private readonly pbc::MapField<int, string> stringField_ = new pbc::MapField<int, string>();
     public pbc::MapField<int, string> StringField {
       get { return stringField_; }
     }
@@ -2150,7 +2189,7 @@ namespace Google.Protobuf.TestProtos {
     public const int BytesFieldFieldNumber = 18;
     private static readonly pbc::MapField<int, pb::ByteString>.Codec _map_bytesField_codec
         = new pbc::MapField<int, pb::ByteString>.Codec(pb::FieldCodec.ForInt32(8), pb::FieldCodec.ForClassWrapper<pb::ByteString>(18), 146);
-    private readonly pbc::MapField<int, pb::ByteString> bytesField_ = new pbc::MapField<int, pb::ByteString>(true);
+    private readonly pbc::MapField<int, pb::ByteString> bytesField_ = new pbc::MapField<int, pb::ByteString>();
     public pbc::MapField<int, pb::ByteString> BytesField {
       get { return bytesField_; }
     }
@@ -2211,7 +2250,7 @@ namespace Google.Protobuf.TestProtos {
     }
 
     public override string ToString() {
-      return pb::JsonFormatter.Default.Format(this);
+      return pb::JsonFormatter.ToDiagnosticString(this);
     }
 
     public void WriteTo(pb::CodedOutputStream output) {

+ 50 - 0
csharp/src/Google.Protobuf.Test/WellKnownTypes/AnyTest.cs

@@ -46,6 +46,24 @@ namespace Google.Protobuf.WellKnownTypes
             Assert.AreEqual(message.CalculateSize(), any.Value.Length);
         }
 
+        [Test]
+        public void Pack_WithCustomPrefix()
+        {
+            var message = SampleMessages.CreateFullTestAllTypes();
+            var any = Any.Pack(message, "foo.bar/baz");
+            Assert.AreEqual("foo.bar/baz/protobuf_unittest.TestAllTypes", any.TypeUrl);
+            Assert.AreEqual(message.CalculateSize(), any.Value.Length);
+        }
+
+        [Test]
+        public void Pack_WithCustomPrefixTrailingSlash()
+        {
+            var message = SampleMessages.CreateFullTestAllTypes();
+            var any = Any.Pack(message, "foo.bar/baz/");
+            Assert.AreEqual("foo.bar/baz/protobuf_unittest.TestAllTypes", any.TypeUrl);
+            Assert.AreEqual(message.CalculateSize(), any.Value.Length);
+        }
+
         [Test]
         public void Unpack_WrongType()
         {
@@ -62,5 +80,37 @@ namespace Google.Protobuf.WellKnownTypes
             var unpacked = any.Unpack<TestAllTypes>();
             Assert.AreEqual(message, unpacked);
         }
+
+        [Test]
+        public void Unpack_CustomPrefix_Success()
+        {
+            var message = SampleMessages.CreateFullTestAllTypes();
+            var any = Any.Pack(message, "foo.bar/baz");
+            var unpacked = any.Unpack<TestAllTypes>();
+            Assert.AreEqual(message, unpacked);
+        }
+
+        [Test]
+        public void ToString_WithValues()
+        {
+            var message = SampleMessages.CreateFullTestAllTypes();
+            var any = Any.Pack(message);
+            var text = any.ToString();
+            Assert.That(text, Is.StringContaining("\"@value\": \"" + message.ToByteString().ToBase64() + "\""));
+        }
+
+        [Test]
+        public void ToString_Empty()
+        {
+            var any = new Any();
+            Assert.AreEqual("{ \"@type\": \"\", \"@value\": \"\" }", any.ToString());
+        }
+
+        [Test]
+        public void ToString_MessageContainingAny()
+        {
+            var message = new TestWellKnownTypes { AnyField = new Any() };
+            Assert.AreEqual("{ \"anyField\": { \"@type\": \"\", \"@value\": \"\" } }", message.ToString());
+        }
     }
 }

+ 33 - 5
csharp/src/Google.Protobuf.Test/WellKnownTypes/DurationTest.cs

@@ -50,11 +50,6 @@ namespace Google.Protobuf.WellKnownTypes
             // Rounding is towards 0
             Assert.AreEqual(TimeSpan.FromTicks(2), new Duration { Nanos = 250 }.ToTimeSpan());
             Assert.AreEqual(TimeSpan.FromTicks(-2), new Duration { Nanos = -250 }.ToTimeSpan());
-
-            // Non-normalized durations
-            Assert.AreEqual(TimeSpan.FromSeconds(3), new Duration { Seconds = 1, Nanos = 2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
-            Assert.AreEqual(TimeSpan.FromSeconds(1), new Duration { Seconds = 3, Nanos = -2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
-            Assert.AreEqual(TimeSpan.FromSeconds(-1), new Duration { Seconds = 1, Nanos = -2 * Duration.NanosecondsPerSecond }.ToTimeSpan());
         }
 
         [Test]
@@ -100,5 +95,38 @@ namespace Google.Protobuf.WellKnownTypes
             Assert.AreEqual(new Duration { Seconds = 1 }, Duration.FromTimeSpan(TimeSpan.FromSeconds(1)));
             Assert.AreEqual(new Duration { Nanos = Duration.NanosecondsPerTick }, Duration.FromTimeSpan(TimeSpan.FromTicks(1)));
         }
+
+        [Test]
+        [TestCase(0, Duration.MaxNanoseconds + 1)]
+        [TestCase(0, Duration.MinNanoseconds - 1)]
+        [TestCase(Duration.MinSeconds - 1, 0)]
+        [TestCase(Duration.MaxSeconds + 1, 0)]
+        [TestCase(1, -1)]
+        [TestCase(-1, 1)]
+        public void ToTimeSpan_Invalid(long seconds, int nanoseconds)
+        {
+            var duration = new Duration { Seconds = seconds, Nanos = nanoseconds };
+            Assert.Throws<InvalidOperationException>(() => duration.ToTimeSpan());
+        }
+
+        [Test]
+        [TestCase(0, Duration.MaxNanoseconds)]
+        [TestCase(0, Duration.MinNanoseconds)]
+        [TestCase(Duration.MinSeconds, Duration.MinNanoseconds)]
+        [TestCase(Duration.MaxSeconds, Duration.MaxNanoseconds)]
+        public void ToTimeSpan_Valid(long seconds, int nanoseconds)
+        {
+            // Only testing that these values don't throw, unlike their similar tests in ToTimeSpan_Invalid
+            var duration = new Duration { Seconds = seconds, Nanos = nanoseconds };
+            duration.ToTimeSpan();
+        }
+
+        [Test]
+        public void ToString_NonNormalized()
+        {
+            // Just a single example should be sufficient...
+            var duration = new Duration { Seconds = 1, Nanos = -1 };
+            Assert.AreEqual("{ \"@warning\": \"Invalid Duration\", \"seconds\": \"1\", \"nanos\": -1 }", duration.ToString());
+        }
     }
 }

+ 62 - 0
csharp/src/Google.Protobuf.Test/WellKnownTypes/FieldMaskTest.cs

@@ -0,0 +1,62 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2016 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+
+using NUnit.Framework;
+
+namespace Google.Protobuf.WellKnownTypes
+{
+    public class FieldMaskTest
+    {
+        [Test]
+        [TestCase("foo__bar")]
+        [TestCase("foo_3_ar")]
+        [TestCase("fooBar")]
+        public void ToString_Invalid(string input)
+        {
+            var mask = new FieldMask { Paths = { input } };
+            var text = mask.ToString();
+            // More specific test below
+            Assert.That(text, Is.StringContaining("@warning"));
+            Assert.That(text, Is.StringContaining(input));
+        }
+
+        [Test]
+        public void ToString_Invalid_Precise()
+        {
+            var mask = new FieldMask { Paths = { "x", "foo__bar", @"x\y" } };
+            Assert.AreEqual(
+                "{ \"@warning\": \"Invalid FieldMask\", \"paths\": [ \"x\", \"foo__bar\", \"x\\\\y\" ] }",
+                mask.ToString());
+        }
+    }
+}

+ 31 - 0
csharp/src/Google.Protobuf.Test/WellKnownTypes/TimestampTest.cs

@@ -61,6 +61,29 @@ namespace Google.Protobuf.WellKnownTypes
             Assert.AreEqual(new DateTime(1969, 12, 31, 23, 59, 59).AddMilliseconds(1), t2.ToDateTime());
         }
 
+        [Test]
+        [TestCase(Timestamp.UnixSecondsAtBclMinValue - 1, Timestamp.MaxNanos)]
+        [TestCase(Timestamp.UnixSecondsAtBclMaxValue + 1, 0)]
+        [TestCase(0, -1)]
+        [TestCase(0, Timestamp.MaxNanos + 1)]
+        public void ToDateTime_OutOfRange(long seconds, int nanoseconds)
+        {
+            var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds };
+            Assert.Throws<InvalidOperationException>(() => value.ToDateTime());
+        }
+
+        // 1ns larger or smaller than the above values
+        [Test]
+        [TestCase(Timestamp.UnixSecondsAtBclMinValue, 0)]
+        [TestCase(Timestamp.UnixSecondsAtBclMaxValue, Timestamp.MaxNanos)]
+        [TestCase(0, 0)]
+        [TestCase(0, Timestamp.MaxNanos)]
+        public void ToDateTime_ValidBoundaries(long seconds, int nanoseconds)
+        {
+            var value = new Timestamp { Seconds = seconds, Nanos = nanoseconds };
+            value.ToDateTime();
+        }
+
         private static void AssertRoundtrip(Timestamp timestamp, DateTime dateTime)
         {
             Assert.AreEqual(timestamp, Timestamp.FromDateTime(dateTime));
@@ -80,5 +103,13 @@ namespace Google.Protobuf.WellKnownTypes
             Assert.AreEqual(t1, t2 + difference);
             Assert.AreEqual(t2, t1 - difference);
         }
+
+        [Test]
+        public void ToString_NonNormalized()
+        {
+            // Just a single example should be sufficient...
+            var duration = new Timestamp { Seconds = 1, Nanos = -1 };
+            Assert.AreEqual("{ \"@warning\": \"Invalid Timestamp\", \"seconds\": \"1\", \"nanos\": -1 }", duration.ToString());
+        }
     }
 }

+ 80 - 8
csharp/src/Google.Protobuf.Test/WellKnownTypes/WrappersTest.cs

@@ -148,9 +148,35 @@ namespace Google.Protobuf.WellKnownTypes
             Assert.AreEqual("Second", message.StringField[1]);
         }
 
+        [Test]
+        public void RepeatedWrappersBinaryFormat()
+        {
+            // At one point we accidentally used a packed format for repeated wrappers, which is wrong (and weird).
+            // This test is just to prove that we use the right format.
+
+            var rawOutput = new MemoryStream();
+            var output = new CodedOutputStream(rawOutput);
+            // Write a value of 5
+            output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited);
+            output.WriteLength(2);
+            output.WriteTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint);
+            output.WriteInt32(5);
+            // Write a value of 0 (empty message)
+            output.WriteTag(RepeatedWellKnownTypes.Int32FieldFieldNumber, WireFormat.WireType.LengthDelimited);
+            output.WriteLength(0);
+            output.Flush();
+            var expectedBytes = rawOutput.ToArray();
+
+            var message = new RepeatedWellKnownTypes { Int32Field = { 5, 0 } };
+            var actualBytes = message.ToByteArray();
+            Assert.AreEqual(expectedBytes, actualBytes);
+        }
+
         [Test]
         public void MapWrappersSerializeDeserialize()
         {
+            // Note: no null values here, as they are prohibited in map fields
+            // (despite being representable).
             var message = new MapWellKnownTypes
             {
                 BoolField = { { 10, false }, { 20, true } },
@@ -158,13 +184,12 @@ namespace Google.Protobuf.WellKnownTypes
                     { -1, ByteString.CopyFrom(1, 2, 3) },
                     { 10, ByteString.CopyFrom(4, 5, 6) },
                     { 1000, ByteString.Empty },
-                    { 10000, null }
                 },
                 DoubleField = { { 1, 12.5 }, { 10, -1.5 }, { 20, 0d } },
                 FloatField = { { 2, 123.25f }, { 3, -20f }, { 4, 0f } },
                 Int32Field = { { 5, int.MaxValue }, { 6, int.MinValue }, { 7, 0 } },
                 Int64Field = { { 8, long.MaxValue }, { 9, long.MinValue }, { 10, 0L } },
-                StringField = { { 11, "First" }, { 12, "Second" }, { 13, "" }, { 14, null } },
+                StringField = { { 11, "First" }, { 12, "Second" }, { 13, "" } },
                 Uint32Field = { { 15, uint.MaxValue }, { 16, uint.MinValue }, { 17, 0U } },
                 Uint64Field = { { 18, ulong.MaxValue }, { 19, ulong.MinValue }, { 20, 0UL } },
             };
@@ -224,13 +249,11 @@ namespace Google.Protobuf.WellKnownTypes
         [Test]
         public void Reflection_MapFields()
         {
-            // Just a single example... note that we can't have a null value here
-            var message = new MapWellKnownTypes { Int32Field = { { 1, 2 }, { 3, null } } };
+            // Just a single example... note that we can't have a null value here despite the value type being int?
+            var message = new MapWellKnownTypes { Int32Field = { { 1, 2 } } };
             var fields = MapWellKnownTypes.Descriptor.Fields;
             var dictionary = (IDictionary) fields[MapWellKnownTypes.Int32FieldFieldNumber].Accessor.GetValue(message);
             Assert.AreEqual(2, dictionary[1]);
-            Assert.IsNull(dictionary[3]);
-            Assert.IsTrue(dictionary.Contains(3));
         }
 
         [Test]
@@ -296,9 +319,10 @@ namespace Google.Protobuf.WellKnownTypes
 
         // Merging is odd with wrapper types, due to the way that default values aren't emitted in
         // the binary stream. In fact we cheat a little bit - a message with an explicitly present default
-        // value will have that default value ignored.
+        // value will have that default value ignored. See issue 615. Fixing this would require significant upheaval to
+        // the FieldCodec side of things.
         [Test]
-        public void MergingCornerCase()
+        public void MergingStreamExplicitValue()
         {
             var message = new TestWellKnownTypes { Int32Field = 5 };
 
@@ -320,9 +344,47 @@ namespace Google.Protobuf.WellKnownTypes
 
             message.MergeFrom(bytes);
             // A normal implementation would have 0 now, as the explicit default would have been overwritten the 5.
+            // With the FieldCodec for Nullable<int>, we can't tell the difference between an implicit 0 and an explicit 0.
+            Assert.AreEqual(5, message.Int32Field);
+        }
+
+        [Test]
+        public void MergingStreamNoValue()
+        {
+            var message = new TestWellKnownTypes { Int32Field = 5 };
+
+            // Create a byte array which an Int32 field, but with no value.
+            var bytes = new TestWellKnownTypes { Int32Field = 0 }.ToByteArray();
+            Assert.AreEqual(2, bytes.Length); // The tag for Int32Field is a single byte, then a byte indicating a 0-length message.
+            message.MergeFrom(bytes);
+
+            // The "implicit" 0 did *not* overwrite the value.
+            // (This is the correct behaviour.)
             Assert.AreEqual(5, message.Int32Field);
         }
 
+        // All permutations of origin/merging value being null, zero (default) or non-default.
+        // As this is the in-memory version, we don't need to worry about the difference between implicit and explicit 0.
+        [Test]
+        [TestCase(null, null, null)]
+        [TestCase(null, 0, 0)]
+        [TestCase(null, 5, 5)]
+        [TestCase(0, null, 0)]
+        [TestCase(0, 0, 0)]
+        [TestCase(0, 5, 5)]
+        [TestCase(5, null, 5)]
+        [TestCase(5, 0, 5)]
+        [TestCase(5, 10, 10)]
+        public void MergingMessageWithZero(int? originValue, int? mergingValue, int? expectedResult)
+        {
+            // This differs from the MergingStreamCornerCase because when we merge message *objects*,
+            // we ignore default values from the "source".
+            var message1 = new TestWellKnownTypes { Int32Field = originValue };
+            var message2 = new TestWellKnownTypes { Int32Field = mergingValue };
+            message1.MergeFrom(message2);
+            Assert.AreEqual(expectedResult, message1.Int32Field);
+        }
+
         [Test]
         public void UnknownFieldInWrapper()
         {
@@ -345,5 +407,15 @@ namespace Google.Protobuf.WellKnownTypes
             var message = TestWellKnownTypes.Parser.ParseFrom(stream);
             Assert.AreEqual(6, message.Int32Field);
         }
+
+        [Test]
+        public void ClearWithReflection()
+        {
+            // String and Bytes are the tricky ones here, as the CLR type of the property
+            // is the same between the wrapper and non-wrapper types.
+            var message = new TestWellKnownTypes { StringField = "foo" };
+            TestWellKnownTypes.Descriptor.Fields[TestWellKnownTypes.StringFieldFieldNumber].Accessor.Clear(message);
+            Assert.IsNull(message.StringField);
+        }
     }
 }

+ 3 - 3
csharp/src/Google.Protobuf.sln

@@ -1,7 +1,7 @@
 Microsoft Visual Studio Solution File, Format Version 12.00
-# Visual Studio 2013
-VisualStudioVersion = 12.0.31101.0
-MinimumVisualStudioVersion = 10.0.40219.1
+# Visual Studio 2015
+VisualStudioVersion = 14.0.24720.0
+MinimumVisualStudioVersion = 14.0.24720.0
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.Protobuf", "Google.Protobuf\Google.Protobuf.csproj", "{6908BDCE-D925-43F3-94AC-A531E6DF2591}"
 EndProject
 Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Google.Protobuf.Test", "Google.Protobuf.Test\Google.Protobuf.Test.csproj", "{DD01ED24-3750-4567-9A23-1DB676A15610}"

+ 3 - 3
csharp/src/Google.Protobuf/ByteString.cs

@@ -50,13 +50,13 @@ namespace Google.Protobuf
         /// <summary>
         /// Unsafe operations that can cause IO Failure and/or other catestrophic side-effects.
         /// </summary>
-        public static class Unsafe
+        internal static class Unsafe
         {
             /// <summary>
             /// Constructs a new ByteString from the given byte array. The array is
             /// *not* copied, and must not be modified after this constructor is called.
             /// </summary>
-            public static ByteString FromBytes(byte[] bytes)
+            internal static ByteString FromBytes(byte[] bytes)
             {
                 return new ByteString(bytes);
             }
@@ -65,7 +65,7 @@ namespace Google.Protobuf
             /// Provides direct, unrestricted access to the bytes contained in this instance.
             /// You must not modify or resize the byte array returned by this method.
             /// </summary>
-            public static byte[] GetBuffer(ByteString bytes)
+            internal static byte[] GetBuffer(ByteString bytes)
             {
                 return bytes.bytes;
             }

+ 69 - 13
csharp/src/Google.Protobuf/CodedInputStream.cs

@@ -51,8 +51,14 @@ namespace Google.Protobuf
     /// and <see cref="MapField{TKey, TValue}"/> to serialize such fields.
     /// </para>
     /// </remarks>
-    public sealed class CodedInputStream
+    public sealed class CodedInputStream : IDisposable
     {
+        /// <summary>
+        /// Whether to leave the underlying stream open when disposing of this stream.
+        /// This is always true when there's no stream.
+        /// </summary>
+        private readonly bool leaveOpen;
+
         /// <summary>
         /// Buffer of data read from the stream or provided at construction time.
         /// </summary>
@@ -115,15 +121,15 @@ namespace Google.Protobuf
         /// <summary>
         /// Creates a new CodedInputStream reading data from the given byte array.
         /// </summary>
-        public CodedInputStream(byte[] buffer) : this(null, Preconditions.CheckNotNull(buffer, "buffer"), 0, buffer.Length)
+        public CodedInputStream(byte[] buffer) : this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), 0, buffer.Length)
         {            
         }
 
         /// <summary>
-        /// Creates a new CodedInputStream that reads from the given byte array slice.
+        /// Creates a new <see cref="CodedInputStream"/> that reads from the given byte array slice.
         /// </summary>
         public CodedInputStream(byte[] buffer, int offset, int length)
-            : this(null, Preconditions.CheckNotNull(buffer, "buffer"), offset, offset + length)
+            : this(null, ProtoPreconditions.CheckNotNull(buffer, "buffer"), offset, offset + length)
         {            
             if (offset < 0 || offset > buffer.Length)
             {
@@ -136,13 +142,27 @@ namespace Google.Protobuf
         }
 
         /// <summary>
-        /// Creates a new CodedInputStream reading data from the given stream.
+        /// Creates a new <see cref="CodedInputStream"/> reading data from the given stream, which will be disposed
+        /// when the returned object is disposed.
         /// </summary>
-        public CodedInputStream(Stream input) : this(input, new byte[BufferSize], 0, 0)
+        /// <param name="input">The stream to read from.</param>
+        public CodedInputStream(Stream input) : this(input, false)
         {
-            Preconditions.CheckNotNull(input, "input");
         }
 
+        /// <summary>
+        /// Creates a new <see cref="CodedInputStream"/> reading data from the given stream.
+        /// </summary>
+        /// <param name="input">The stream to read from.</param>
+        /// <param name="leaveOpen"><c>true</c> to leave <paramref name="input"/> open when the returned
+        /// <c cref="CodedInputStream"/> is disposed; <c>false</c> to dispose of the given stream when the
+        /// returned object is disposed.</param>
+        public CodedInputStream(Stream input, bool leaveOpen)
+            : this(ProtoPreconditions.CheckNotNull(input, "input"), new byte[BufferSize], 0, 0)
+        {
+            this.leaveOpen = leaveOpen;
+        }
+        
         /// <summary>
         /// Creates a new CodedInputStream reading data from the given
         /// stream and buffer, using the default limits.
@@ -246,6 +266,22 @@ namespace Google.Protobuf
         /// </value>
         public int RecursionLimit { get { return recursionLimit; } }
 
+        /// <summary>
+        /// Disposes of this instance, potentially closing any underlying stream.
+        /// </summary>
+        /// <remarks>
+        /// As there is no flushing to perform here, disposing of a <see cref="CodedInputStream"/> which
+        /// was constructed with the <c>leaveOpen</c> option parameter set to <c>true</c> (or one which
+        /// was constructed to read from a byte array) has no effect.
+        /// </remarks>
+        public void Dispose()
+        {
+            if (!leaveOpen)
+            {
+                input.Dispose();
+            }
+        }
+
         #region Validation
         /// <summary>
         /// Verifies that the last call to ReadTag() returned tag 0 - in other words,
@@ -349,6 +385,14 @@ namespace Google.Protobuf
         /// This should be called directly after <see cref="ReadTag"/>, when
         /// the caller wishes to skip an unknown field.
         /// </summary>
+        /// <remarks>
+        /// This method throws <see cref="InvalidProtocolBufferException"/> if the last-read tag was an end-group tag.
+        /// If a caller wishes to skip a group, they should skip the whole group, by calling this method after reading the
+        /// start-group tag. This behavior allows callers to call this method on any field they don't understand, correctly
+        /// resulting in an error if an end-group tag has not been paired with an earlier start-group tag.
+        /// </remarks>
+        /// <exception cref="InvalidProtocolBufferException">The last tag was an end-group tag</exception>
+        /// <exception cref="InvalidOperationException">The last read operation read to the end of the logical stream</exception>
         public void SkipLastField()
         {
             if (lastTag == 0)
@@ -358,11 +402,11 @@ namespace Google.Protobuf
             switch (WireFormat.GetTagWireType(lastTag))
             {
                 case WireFormat.WireType.StartGroup:
-                    SkipGroup();
+                    SkipGroup(lastTag);
                     break;
                 case WireFormat.WireType.EndGroup:
-                    // Just ignore; there's no data following the tag.
-                    break;
+                    throw new InvalidProtocolBufferException(
+                        "SkipLastField called on an end-group tag, indicating that the corresponding start-group was missing");
                 case WireFormat.WireType.Fixed32:
                     ReadFixed32();
                     break;
@@ -379,7 +423,7 @@ namespace Google.Protobuf
             }
         }
 
-        private void SkipGroup()
+        private void SkipGroup(uint startGroupTag)
         {
             // Note: Currently we expect this to be the way that groups are read. We could put the recursion
             // depth changes into the ReadTag method instead, potentially...
@@ -389,16 +433,28 @@ namespace Google.Protobuf
                 throw InvalidProtocolBufferException.RecursionLimitExceeded();
             }
             uint tag;
-            do
+            while (true)
             {
                 tag = ReadTag();
                 if (tag == 0)
                 {
                     throw InvalidProtocolBufferException.TruncatedMessage();
                 }
+                // Can't call SkipLastField for this case- that would throw.
+                if (WireFormat.GetTagWireType(tag) == WireFormat.WireType.EndGroup)
+                {
+                    break;
+                }
                 // This recursion will allow us to handle nested groups.
                 SkipLastField();
-            } while (WireFormat.GetTagWireType(tag) != WireFormat.WireType.EndGroup);
+            }
+            int startField = WireFormat.GetTagFieldNumber(startGroupTag);
+            int endField = WireFormat.GetTagFieldNumber(tag);
+            if (startField != endField)
+            {
+                throw new InvalidProtocolBufferException(
+                    $"Mismatched end-group tag. Started with field {startField}; ended with field {endField}");
+            }
             recursionDepth--;
         }
 

+ 60 - 7
csharp/src/Google.Protobuf/CodedOutputStream.cs

@@ -55,7 +55,7 @@ namespace Google.Protobuf
     /// and <c>MapField&lt;TKey, TValue&gt;</c> to serialize such fields.
     /// </para>
     /// </remarks>
-    public sealed partial class CodedOutputStream
+    public sealed partial class CodedOutputStream : IDisposable
     {
         // "Local" copy of Encoding.UTF8, for efficiency. (Yes, it makes a difference.)
         internal static readonly Encoding Utf8Encoding = Encoding.UTF8;
@@ -65,6 +65,7 @@ namespace Google.Protobuf
         /// </summary>
         public static readonly int DefaultBufferSize = 4096;
 
+        private readonly bool leaveOpen;
         private readonly byte[] buffer;
         private readonly int limit;
         private int position;
@@ -91,20 +92,44 @@ namespace Google.Protobuf
             this.buffer = buffer;
             this.position = offset;
             this.limit = offset + length;
+            leaveOpen = true; // Simple way of avoiding trying to dispose of a null reference
         }
 
-        private CodedOutputStream(Stream output, byte[] buffer)
+        private CodedOutputStream(Stream output, byte[] buffer, bool leaveOpen)
         {
-            this.output = output;
+            this.output = ProtoPreconditions.CheckNotNull(output, nameof(output));
             this.buffer = buffer;
             this.position = 0;
             this.limit = buffer.Length;
+            this.leaveOpen = leaveOpen;
+        }
+
+        /// <summary>
+        /// Creates a new <see cref="CodedOutputStream" /> which write to the given stream, and disposes of that
+        /// stream when the returned <c>CodedOutputStream</c> is disposed.
+        /// </summary>
+        /// <param name="output">The stream to write to. It will be disposed when the returned <c>CodedOutputStream is disposed.</c></param>
+        public CodedOutputStream(Stream output) : this(output, DefaultBufferSize, false)
+        {
+        }
+
+        /// <summary>
+        /// Creates a new CodedOutputStream which write to the given stream and uses
+        /// the specified buffer size.
+        /// </summary>
+        /// <param name="output">The stream to write to. It will be disposed when the returned <c>CodedOutputStream is disposed.</c></param>
+        /// <param name="bufferSize">The size of buffer to use internally.</param>
+        public CodedOutputStream(Stream output, int bufferSize) : this(output, new byte[bufferSize], false)
+        {
         }
 
         /// <summary>
         /// Creates a new CodedOutputStream which write to the given stream.
         /// </summary>
-        public CodedOutputStream(Stream output) : this(output, DefaultBufferSize)
+        /// <param name="output">The stream to write to.</param>
+        /// <param name="leaveOpen">If <c>true</c>, <paramref name="output"/> is left open when the returned <c>CodedOutputStream</c> is disposed;
+        /// if <c>false</c>, the provided stream is disposed as well.</param>
+        public CodedOutputStream(Stream output, bool leaveOpen) : this(output, DefaultBufferSize, leaveOpen)
         {
         }
 
@@ -112,9 +137,13 @@ namespace Google.Protobuf
         /// Creates a new CodedOutputStream which write to the given stream and uses
         /// the specified buffer size.
         /// </summary>
-        public CodedOutputStream(Stream output, int bufferSize) : this(output, new byte[bufferSize])
+        /// <param name="output">The stream to write to.</param>
+        /// <param name="bufferSize">The size of buffer to use internally.</param>
+        /// <param name="leaveOpen">If <c>true</c>, <paramref name="output"/> is left open when the returned <c>CodedOutputStream</c> is disposed;
+        /// if <c>false</c>, the provided stream is disposed as well.</param>
+        public CodedOutputStream(Stream output, int bufferSize, bool leaveOpen) : this(output, new byte[bufferSize], leaveOpen)
         {
-        }    
+        }
         #endregion
 
         /// <summary>
@@ -659,6 +688,30 @@ namespace Google.Protobuf
             }
         }
 
+        /// <summary>
+        /// Flushes any buffered data and optionally closes the underlying stream, if any.
+        /// </summary>
+        /// <remarks>
+        /// <para>
+        /// By default, any underlying stream is closed by this method. To configure this behaviour,
+        /// use a constructor overload with a <c>leaveOpen</c> parameter. If this instance does not
+        /// have an underlying stream, this method does nothing.
+        /// </para>
+        /// <para>
+        /// For the sake of efficiency, calling this method does not prevent future write calls - but
+        /// if a later write ends up writing to a stream which has been disposed, that is likely to
+        /// fail. It is recommend that you not call any other methods after this.
+        /// </para>
+        /// </remarks>
+        public void Dispose()
+        {
+            Flush();
+            if (!leaveOpen)
+            {
+                output.Dispose();
+            }
+        }
+
         /// <summary>
         /// Flushes any buffered data to the underlying stream (if there is one).
         /// </summary>
@@ -705,4 +758,4 @@ namespace Google.Protobuf
             }
         }
     }
-}
+}

+ 32 - 44
csharp/src/Google.Protobuf/Collections/MapField.cs

@@ -34,6 +34,7 @@ using Google.Protobuf.Reflection;
 using System;
 using System.Collections;
 using System.Collections.Generic;
+using System.IO;
 using System.Linq;
 using System.Text;
 using Google.Protobuf.Compatibility;
@@ -53,6 +54,13 @@ namespace Google.Protobuf.Collections
     /// For string keys, the equality comparison is provided by <see cref="StringComparer.Ordinal" />.
     /// </para>
     /// <para>
+    /// Null values are not permitted in the map, either for wrapper types or regular messages.
+    /// If a map is deserialized from a data stream and the value is missing from an entry, a default value
+    /// is created instead. For primitive types, that is the regular default value (0, the empty string and so
+    /// on); for message types, an empty instance of the message is created, as if the map entry contained a 0-length
+    /// encoded value for the field.
+    /// </para>
+    /// <para>
     /// This implementation does not generally prohibit the use of key/value types which are not
     /// supported by Protocol Buffers (e.g. using a key type of <code>byte</code>) but nor does it guarantee
     /// that all operations will work in such cases.
@@ -61,34 +69,10 @@ namespace Google.Protobuf.Collections
     public sealed class MapField<TKey, TValue> : IDeepCloneable<MapField<TKey, TValue>>, IDictionary<TKey, TValue>, IEquatable<MapField<TKey, TValue>>, IDictionary
     {
         // TODO: Don't create the map/list until we have an entry. (Assume many maps will be empty.)
-        private readonly bool allowNullValues;
         private readonly Dictionary<TKey, LinkedListNode<KeyValuePair<TKey, TValue>>> map =
             new Dictionary<TKey, LinkedListNode<KeyValuePair<TKey, TValue>>>();
         private readonly LinkedList<KeyValuePair<TKey, TValue>> list = new LinkedList<KeyValuePair<TKey, TValue>>();
 
-        /// <summary>
-        /// Constructs a new map field, defaulting the value nullability to only allow null values for message types
-        /// and non-nullable value types.
-        /// </summary>
-        public MapField() : this(typeof(IMessage).IsAssignableFrom(typeof(TValue)) || Nullable.GetUnderlyingType(typeof(TValue)) != null)
-        {
-        }
-
-        /// <summary>
-        /// Constructs a new map field, overriding the choice of whether null values are permitted in the map.
-        /// This is used by wrapper types, where maps with string and bytes wrappers as the value types
-        /// support null values.
-        /// </summary>
-        /// <param name="allowNullValues">Whether null values are permitted in the map or not.</param>
-        public MapField(bool allowNullValues)
-        {
-            if (allowNullValues && typeof(TValue).IsValueType() && Nullable.GetUnderlyingType(typeof(TValue)) == null)
-            {
-                throw new ArgumentException("allowNullValues", "Non-nullable value types do not support null values");
-            }
-            this.allowNullValues = allowNullValues;
-        }
-
         /// <summary>
         /// Creates a deep clone of this object.
         /// </summary>
@@ -97,13 +81,13 @@ namespace Google.Protobuf.Collections
         /// </returns>
         public MapField<TKey, TValue> Clone()
         {
-            var clone = new MapField<TKey, TValue>(allowNullValues);
+            var clone = new MapField<TKey, TValue>();
             // Keys are never cloneable. Values might be.
             if (typeof(IDeepCloneable<TValue>).IsAssignableFrom(typeof(TValue)))
             {
                 foreach (var pair in list)
                 {
-                    clone.Add(pair.Key, pair.Value == null ? pair.Value : ((IDeepCloneable<TValue>)pair.Value).Clone());
+                    clone.Add(pair.Key, ((IDeepCloneable<TValue>)pair.Value).Clone());
                 }
             }
             else
@@ -140,7 +124,7 @@ namespace Google.Protobuf.Collections
         /// <returns><c>true</c> if the map contains the given key; <c>false</c> otherwise.</returns>
         public bool ContainsKey(TKey key)
         {
-            Preconditions.CheckNotNullUnconstrained(key, "key");
+            ProtoPreconditions.CheckNotNullUnconstrained(key, "key");
             return map.ContainsKey(key);
         }
 
@@ -157,7 +141,7 @@ namespace Google.Protobuf.Collections
         /// <returns><c>true</c> if the map contained the given key before the entry was removed; <c>false</c> otherwise.</returns>
         public bool Remove(TKey key)
         {
-            Preconditions.CheckNotNullUnconstrained(key, "key");
+            ProtoPreconditions.CheckNotNullUnconstrained(key, "key");
             LinkedListNode<KeyValuePair<TKey, TValue>> node;
             if (map.TryGetValue(key, out node))
             {
@@ -205,7 +189,7 @@ namespace Google.Protobuf.Collections
         {
             get
             {
-                Preconditions.CheckNotNullUnconstrained(key, "key");
+                ProtoPreconditions.CheckNotNullUnconstrained(key, "key");
                 TValue value;
                 if (TryGetValue(key, out value))
                 {
@@ -215,11 +199,11 @@ namespace Google.Protobuf.Collections
             }
             set
             {
-                Preconditions.CheckNotNullUnconstrained(key, "key");
+                ProtoPreconditions.CheckNotNullUnconstrained(key, "key");
                 // value == null check here is redundant, but avoids boxing.
-                if (value == null && !allowNullValues)
+                if (value == null)
                 {
-                    Preconditions.CheckNotNullUnconstrained(value, "value");
+                    ProtoPreconditions.CheckNotNullUnconstrained(value, "value");
                 }
                 LinkedListNode<KeyValuePair<TKey, TValue>> node;
                 var pair = new KeyValuePair<TKey, TValue>(key, value);
@@ -246,12 +230,12 @@ namespace Google.Protobuf.Collections
         public ICollection<TValue> Values { get { return new MapView<TValue>(this, pair => pair.Value, ContainsValue); } }
 
         /// <summary>
-        /// Adds the specified entries to the map.
+        /// Adds the specified entries to the map. The keys and values are not automatically cloned.
         /// </summary>
         /// <param name="entries">The entries to add to the map.</param>
         public void Add(IDictionary<TKey, TValue> entries)
         {
-            Preconditions.CheckNotNull(entries, "entries");
+            ProtoPreconditions.CheckNotNull(entries, "entries");
             foreach (var pair in entries)
             {
                 Add(pair.Key, pair.Value);
@@ -346,11 +330,6 @@ namespace Google.Protobuf.Collections
             }
         }
 
-        /// <summary>
-        /// Returns whether or not this map allows values to be null.
-        /// </summary>
-        public bool AllowsNullValues { get { return allowNullValues; } }
-
         /// <summary>
         /// Gets the number of elements contained in the map.
         /// </summary>
@@ -496,9 +475,9 @@ namespace Google.Protobuf.Collections
         /// </summary>
         public override string ToString()
         {
-            var builder = new StringBuilder();
-            JsonFormatter.Default.WriteDictionary(builder, this);
-            return builder.ToString();
+            var writer = new StringWriter();
+            JsonFormatter.Default.WriteDictionary(writer, this);
+            return writer.ToString();
         }
 
         #region IDictionary explicit interface implementation
@@ -523,7 +502,7 @@ namespace Google.Protobuf.Collections
 
         void IDictionary.Remove(object key)
         {
-            Preconditions.CheckNotNull(key, "key");
+            ProtoPreconditions.CheckNotNull(key, "key");
             if (!(key is TKey))
             {
                 return;
@@ -552,7 +531,7 @@ namespace Google.Protobuf.Collections
         {
             get
             {
-                Preconditions.CheckNotNull(key, "key");
+                ProtoPreconditions.CheckNotNull(key, "key");
                 if (!(key is TKey))
                 {
                     return null;
@@ -632,6 +611,8 @@ namespace Google.Protobuf.Collections
             /// </summary>
             internal class MessageAdapter : IMessage
             {
+                private static readonly byte[] ZeroLengthMessageStreamData = new byte[] { 0 };
+
                 private readonly Codec codec;
                 internal TKey Key { get; set; }
                 internal TValue Value { get; set; }
@@ -665,6 +646,13 @@ namespace Google.Protobuf.Collections
                             input.SkipLastField();
                         }
                     }
+
+                    // Corner case: a map entry with a key but no value, where the value type is a message.
+                    // Read it as if we'd seen an input stream with no data (i.e. create a "default" message).
+                    if (Value == null)
+                    {
+                        Value = codec.valueCodec.Read(new CodedInputStream(ZeroLengthMessageStreamData));
+                    }
                 }
 
                 public void WriteTo(CodedOutputStream output)

+ 8 - 8
csharp/src/Google.Protobuf/Collections/RepeatedField.cs

@@ -33,8 +33,8 @@
 using System;
 using System.Collections;
 using System.Collections.Generic;
+using System.IO;
 using System.Text;
-using Google.Protobuf.Compatibility;
 
 namespace Google.Protobuf.Collections
 {
@@ -96,8 +96,8 @@ namespace Google.Protobuf.Collections
             // iteration.
             uint tag = input.LastTag;
             var reader = codec.ValueReader;
-            // Value types can be packed or not.
-            if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
+            // Non-nullable value types can be packed or not.
+            if (FieldCodec<T>.IsPackedRepeatedField(tag))
             {
                 int length = input.ReadLength();
                 if (length > 0)
@@ -134,7 +134,7 @@ namespace Google.Protobuf.Collections
                 return 0;
             }
             uint tag = codec.Tag;
-            if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
+            if (codec.PackedRepeatedField)
             {
                 int dataSize = CalculatePackedDataSize(codec);
                 return CodedOutputStream.ComputeRawVarint32Size(tag) +
@@ -186,7 +186,7 @@ namespace Google.Protobuf.Collections
             }
             var writer = codec.ValueWriter;
             var tag = codec.Tag;
-            if (typeof(T).IsValueType() && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited)
+            if (codec.PackedRepeatedField)
             {
                 // Packed primitive type
                 uint size = (uint)CalculatePackedDataSize(codec);
@@ -475,9 +475,9 @@ namespace Google.Protobuf.Collections
         /// </summary>
         public override string ToString()
         {
-            var builder = new StringBuilder();
-            JsonFormatter.Default.WriteList(builder, this);
-            return builder.ToString();
+            var writer = new StringWriter();
+            JsonFormatter.Default.WriteList(writer, this);
+            return writer.ToString();
         }
 
         /// <summary>

+ 77 - 90
csharp/src/Google.Protobuf/FieldCodec.cs

@@ -30,6 +30,8 @@
 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 #endregion
 
+using Google.Protobuf.Compatibility;
+using Google.Protobuf.WellKnownTypes;
 using System;
 using System.Collections.Generic;
 
@@ -261,20 +263,17 @@ namespace Google.Protobuf
         /// </remarks>
         private static class WrapperCodecs
         {
-            // All the field numbers are the same (1).
-            private const int WrapperValueFieldNumber = Google.Protobuf.WellKnownTypes.Int32Value.ValueFieldNumber;
-
-            private static readonly Dictionary<Type, object> Codecs = new Dictionary<Type, object>
+            private static readonly Dictionary<System.Type, object> Codecs = new Dictionary<System.Type, object>
             {
-                { typeof(bool), ForBool(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
-                { typeof(int), ForInt32(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
-                { typeof(long), ForInt64(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
-                { typeof(uint), ForUInt32(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
-                { typeof(ulong), ForUInt64(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
-                { typeof(float), ForFloat(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Fixed32)) },
-                { typeof(double), ForDouble(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.Fixed64)) },
-                { typeof(string), ForString(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) },
-                { typeof(ByteString), ForBytes(WireFormat.MakeTag(WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) }
+                { typeof(bool), ForBool(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
+                { typeof(int), ForInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
+                { typeof(long), ForInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
+                { typeof(uint), ForUInt32(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
+                { typeof(ulong), ForUInt64(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Varint)) },
+                { typeof(float), ForFloat(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed32)) },
+                { typeof(double), ForDouble(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.Fixed64)) },
+                { typeof(string), ForString(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) },
+                { typeof(ByteString), ForBytes(WireFormat.MakeTag(WrappersReflection.WrapperValueFieldNumber, WireFormat.WireType.LengthDelimited)) }
             };
 
             /// <summary>
@@ -331,17 +330,24 @@ namespace Google.Protobuf
     }
 
     /// <summary>
+    /// <para>
     /// An encode/decode pair for a single field. This effectively encapsulates
     /// all the information needed to read or write the field value from/to a coded
     /// stream.
+    /// </para>
+    /// <para>
+    /// This class is public and has to be as it is used by generated code, but its public
+    /// API is very limited - just what the generated code needs to call directly.
+    /// </para>
     /// </summary>
     /// <remarks>
-    /// This never writes default values to the stream, and is not currently designed
-    /// to play well with packed arrays.
+    /// This never writes default values to the stream, and does not address "packedness"
+    /// in repeated fields itself, other than to know whether or not the field *should* be packed.
     /// </remarks>
     public sealed class FieldCodec<T>
     {
         private static readonly T DefaultDefault;
+        private static readonly bool TypeSupportsPacking = typeof(T).IsValueType() && Nullable.GetUnderlyingType(typeof(T)) == null;
 
         static FieldCodec()
         {
@@ -356,80 +362,31 @@ namespace Google.Protobuf
             // Otherwise it's the default value of the CLR type
         }
 
-        private static Func<T, bool> CreateDefaultValueCheck<TTmp>(Func<TTmp, bool> check)
-        {
-            return (Func<T, bool>)(object)check;
-        }
-
-        private readonly Func<CodedInputStream, T> reader;
-        private readonly Action<CodedOutputStream, T> writer;
-        private readonly Func<T, int> sizeCalculator;
-        private readonly uint tag;
-        private readonly int tagSize;
-        private readonly int fixedSize;
-        // Default value for this codec. Usually the same for every instance of the same type, but
-        // for string/ByteString wrapper fields the codec's default value is null, whereas for
-        // other string/ByteString fields it's "" or ByteString.Empty.
-        private readonly T defaultValue;
+        internal static bool IsPackedRepeatedField(uint tag) =>
+            TypeSupportsPacking && WireFormat.GetTagWireType(tag) == WireFormat.WireType.LengthDelimited;
 
-        internal FieldCodec(
-            Func<CodedInputStream, T> reader,
-            Action<CodedOutputStream, T> writer,
-            Func<T, int> sizeCalculator,
-            uint tag) : this(reader, writer, sizeCalculator, tag, DefaultDefault)
-        {
-        }
-
-        internal FieldCodec(
-            Func<CodedInputStream, T> reader,
-            Action<CodedOutputStream, T> writer,
-            Func<T, int> sizeCalculator,
-            uint tag,
-            T defaultValue)
-        {
-            this.reader = reader;
-            this.writer = writer;
-            this.sizeCalculator = sizeCalculator;
-            this.fixedSize = 0;
-            this.tag = tag;
-            this.defaultValue = defaultValue;
-            tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
-        }
-
-        internal FieldCodec(
-            Func<CodedInputStream, T> reader,
-            Action<CodedOutputStream, T> writer,
-            int fixedSize,
-            uint tag)
-        {
-            this.reader = reader;
-            this.writer = writer;
-            this.sizeCalculator = _ => fixedSize;
-            this.fixedSize = fixedSize;
-            this.tag = tag;
-            tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
-        }
+        internal bool PackedRepeatedField { get; }
 
         /// <summary>
-        /// Returns the size calculator for just a value.
+        /// Returns a delegate to write a value (unconditionally) to a coded output stream.
         /// </summary>
-        internal Func<T, int> ValueSizeCalculator { get { return sizeCalculator; } }
+        internal Action<CodedOutputStream, T> ValueWriter { get; }
 
         /// <summary>
-        /// Returns a delegate to write a value (unconditionally) to a coded output stream.
+        /// Returns the size calculator for just a value.
         /// </summary>
-        internal Action<CodedOutputStream, T> ValueWriter { get { return writer; } }
+        internal Func<T, int> ValueSizeCalculator { get; }
 
         /// <summary>
         /// Returns a delegate to read a value from a coded input stream. It is assumed that
         /// the stream is already positioned on the appropriate tag.
         /// </summary>
-        internal Func<CodedInputStream, T> ValueReader { get { return reader; } }
+        internal Func<CodedInputStream, T> ValueReader { get; }
 
         /// <summary>
         /// Returns the fixed size for an entry, or 0 if sizes vary.
         /// </summary>
-        internal int FixedSize { get { return fixedSize; } }
+        internal int FixedSize { get; }
 
         /// <summary>
         /// Gets the tag of the codec.
@@ -437,15 +394,54 @@ namespace Google.Protobuf
         /// <value>
         /// The tag of the codec.
         /// </value>
-        public uint Tag { get { return tag; } }
+        internal uint Tag { get; }
 
         /// <summary>
-        /// Gets the default value of the codec's type.
+        /// Default value for this codec. Usually the same for every instance of the same type, but
+        /// for string/ByteString wrapper fields the codec's default value is null, whereas for
+        /// other string/ByteString fields it's "" or ByteString.Empty.
         /// </summary>
         /// <value>
         /// The default value of the codec's type.
         /// </value>
-        public T DefaultValue { get { return defaultValue; } }
+        internal T DefaultValue { get; }
+
+        private readonly int tagSize;
+        
+        internal FieldCodec(
+                Func<CodedInputStream, T> reader,
+                Action<CodedOutputStream, T> writer,
+                int fixedSize,
+                uint tag) : this(reader, writer, _ => fixedSize, tag)
+        {
+            FixedSize = fixedSize;
+        }
+
+        internal FieldCodec(
+            Func<CodedInputStream, T> reader,
+            Action<CodedOutputStream, T> writer,
+            Func<T, int> sizeCalculator,
+            uint tag) : this(reader, writer, sizeCalculator, tag, DefaultDefault)
+        {
+        }
+
+        internal FieldCodec(
+            Func<CodedInputStream, T> reader,
+            Action<CodedOutputStream, T> writer,
+            Func<T, int> sizeCalculator,
+            uint tag,
+            T defaultValue)
+        {
+            ValueReader = reader;
+            ValueWriter = writer;
+            ValueSizeCalculator = sizeCalculator;
+            FixedSize = 0;
+            Tag = tag;
+            DefaultValue = defaultValue;
+            tagSize = CodedOutputStream.ComputeRawVarint32Size(tag);
+            // Detect packed-ness once, so we can check for it within RepeatedField<T>.
+            PackedRepeatedField = IsPackedRepeatedField(tag);
+        }
 
         /// <summary>
         /// Write a tag and the given value, *if* the value is not the default.
@@ -454,8 +450,8 @@ namespace Google.Protobuf
         {
             if (!IsDefault(value))
             {
-                output.WriteTag(tag);
-                writer(output, value);
+                output.WriteTag(Tag);
+                ValueWriter(output, value);
             }
         }
 
@@ -464,23 +460,14 @@ namespace Google.Protobuf
         /// </summary>
         /// <param name="input">The input stream to read from.</param>
         /// <returns>The value read from the stream.</returns>
-        public T Read(CodedInputStream input)
-        {
-            return reader(input);
-        }
+        public T Read(CodedInputStream input) => ValueReader(input);
 
         /// <summary>
         /// Calculates the size required to write the given value, with a tag,
         /// if the value is not the default.
         /// </summary>
-        public int CalculateSizeWithTag(T value)
-        {
-            return IsDefault(value) ? 0 : sizeCalculator(value) + tagSize;
-        }
+        public int CalculateSizeWithTag(T value) => IsDefault(value) ? 0 : ValueSizeCalculator(value) + tagSize;
 
-        private bool IsDefault(T value)
-        {
-            return EqualityComparer<T>.Default.Equals(value, defaultValue);
-        }
+        private bool IsDefault(T value) => EqualityComparer<T>.Default.Equals(value, DefaultValue);
     }
 }

+ 24 - 4
csharp/src/Google.Protobuf/Google.Protobuf.csproj

@@ -16,6 +16,8 @@
     <FileAlignment>512</FileAlignment>
     <OldToolsVersion>3.5</OldToolsVersion>
     <MinimumVisualStudioVersion>10.0</MinimumVisualStudioVersion>
+    <NuGetPackageImportStamp>
+    </NuGetPackageImportStamp>
   </PropertyGroup>
   <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
     <DebugSymbols>true</DebugSymbols>
@@ -62,7 +64,7 @@
     <NoStdLib>true</NoStdLib>
     <GenerateSerializationAssemblies>Off</GenerateSerializationAssemblies>
     <SignAssembly>True</SignAssembly>
-    <AssemblyOriginatorKeyFile>C:\keys\Google.Protobuf.snk</AssemblyOriginatorKeyFile>
+    <AssemblyOriginatorKeyFile>..\..\keys\Google.Protobuf.snk</AssemblyOriginatorKeyFile>
     <TreatWarningsAsErrors>true</TreatWarningsAsErrors>
   </PropertyGroup>
   <ItemGroup>
@@ -83,17 +85,22 @@
     <Compile Include="Compatibility\TypeExtensions.cs" />
     <Compile Include="FieldCodec.cs" />
     <Compile Include="FrameworkPortability.cs" />
+    <Compile Include="ICustomDiagnosticMessage.cs" />
     <Compile Include="IDeepCloneable.cs" />
+    <Compile Include="InvalidJsonException.cs" />
     <Compile Include="JsonFormatter.cs" />
+    <Compile Include="JsonParser.cs" />
+    <Compile Include="JsonToken.cs" />
+    <Compile Include="JsonTokenizer.cs" />
     <Compile Include="MessageExtensions.cs" />
     <Compile Include="IMessage.cs" />
     <Compile Include="InvalidProtocolBufferException.cs" />
     <Compile Include="LimitedInputStream.cs" />
     <Compile Include="MessageParser.cs" />
     <Compile Include="Properties\AssemblyInfo.cs" />
+    <Compile Include="Reflection\Descriptor.cs" />
     <Compile Include="Reflection\DescriptorBase.cs" />
     <Compile Include="Reflection\DescriptorPool.cs" />
-    <Compile Include="Reflection\DescriptorProtoFile.cs" />
     <Compile Include="Reflection\DescriptorUtil.cs" />
     <Compile Include="Reflection\DescriptorValidationException.cs" />
     <Compile Include="Reflection\EnumDescriptor.cs" />
@@ -102,7 +109,7 @@
     <Compile Include="Reflection\FieldDescriptor.cs" />
     <Compile Include="Reflection\FieldType.cs" />
     <Compile Include="Reflection\FileDescriptor.cs" />
-    <Compile Include="Reflection\GeneratedCodeInfo.cs" />
+    <Compile Include="Reflection\GeneratedClrTypeInfo.cs" />
     <Compile Include="Reflection\IDescriptor.cs" />
     <Compile Include="Reflection\IFieldAccessor.cs" />
     <Compile Include="Reflection\MapFieldAccessor.cs" />
@@ -110,13 +117,15 @@
     <Compile Include="Reflection\MethodDescriptor.cs" />
     <Compile Include="Reflection\OneofAccessor.cs" />
     <Compile Include="Reflection\OneofDescriptor.cs" />
+    <Compile Include="Reflection\OriginalNameAttribute.cs" />
     <Compile Include="Reflection\PackageDescriptor.cs" />
     <Compile Include="Reflection\PartialClasses.cs" />
     <Compile Include="Reflection\ReflectionUtil.cs" />
     <Compile Include="Reflection\RepeatedFieldAccessor.cs" />
     <Compile Include="Reflection\ServiceDescriptor.cs" />
     <Compile Include="Reflection\SingleFieldAccessor.cs" />
-    <Compile Include="Preconditions.cs" />
+    <Compile Include="ProtoPreconditions.cs" />
+    <Compile Include="Reflection\TypeRegistry.cs" />
     <Compile Include="WellKnownTypes\Any.cs" />
     <Compile Include="WellKnownTypes\AnyPartial.cs" />
     <Compile Include="WellKnownTypes\Api.cs" />
@@ -124,20 +133,31 @@
     <Compile Include="WellKnownTypes\DurationPartial.cs" />
     <Compile Include="WellKnownTypes\Empty.cs" />
     <Compile Include="WellKnownTypes\FieldMask.cs" />
+    <Compile Include="WellKnownTypes\FieldMaskPartial.cs" />
     <Compile Include="WellKnownTypes\SourceContext.cs" />
     <Compile Include="WellKnownTypes\Struct.cs" />
     <Compile Include="WellKnownTypes\TimeExtensions.cs" />
     <Compile Include="WellKnownTypes\Timestamp.cs" />
     <Compile Include="WellKnownTypes\TimestampPartial.cs" />
     <Compile Include="WellKnownTypes\Type.cs" />
+    <Compile Include="WellKnownTypes\ValuePartial.cs" />
     <Compile Include="WellKnownTypes\Wrappers.cs" />
+    <Compile Include="WellKnownTypes\WrappersPartial.cs" />
     <Compile Include="WireFormat.cs" />
   </ItemGroup>
   <ItemGroup>
     <None Include="Google.Protobuf.nuspec" />
+    <None Include="packages.config" />
   </ItemGroup>
   <ItemGroup />
   <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" />
+  <Import Project="..\packages\NuSpec.ReferenceGenerator.1.4.1\build\portable-net45+win+wpa81+wp80+MonoAndroid10+xamarinios10+MonoTouch10\NuSpec.ReferenceGenerator.targets" Condition="Exists('..\packages\NuSpec.ReferenceGenerator.1.4.1\build\portable-net45+win+wpa81+wp80+MonoAndroid10+xamarinios10+MonoTouch10\NuSpec.ReferenceGenerator.targets')" />
+  <Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
+    <PropertyGroup>
+      <ErrorText>This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them.  For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText>
+    </PropertyGroup>
+    <Error Condition="!Exists('..\packages\NuSpec.ReferenceGenerator.1.4.1\build\portable-net45+win+wpa81+wp80+MonoAndroid10+xamarinios10+MonoTouch10\NuSpec.ReferenceGenerator.targets')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\NuSpec.ReferenceGenerator.1.4.1\build\portable-net45+win+wpa81+wp80+MonoAndroid10+xamarinios10+MonoTouch10\NuSpec.ReferenceGenerator.targets'))" />
+  </Target>
   <!-- To modify your build process, add your task inside one of the targets below and uncomment it. 
        Other similar extension points exist, see Microsoft.Common.targets.
   <Target Name="BeforeBuild">

+ 35 - 18
csharp/src/Google.Protobuf/Google.Protobuf.nuspec

@@ -1,11 +1,11 @@
-<?xml version="1.0" encoding="utf-8"?>
+<?xml version="1.0" encoding="utf-8"?>
 <package>
   <metadata>
     <id>Google.Protobuf</id>
     <title>Google Protocol Buffers C#</title>
     <summary>C# runtime library for Protocol Buffers - Google's data interchange format.</summary>
     <description>See project site for more info.</description>
-    <version>3.0.0-alpha4</version>
+    <version>3.0.0-beta3</version>
     <authors>Google Inc.</authors>
     <owners>protobuf-packages</owners>
     <licenseUrl>https://github.com/google/protobuf/blob/master/LICENSE</licenseUrl>
@@ -14,24 +14,41 @@
     <releaseNotes>C# proto3 support</releaseNotes>
     <copyright>Copyright 2015, Google Inc.</copyright>
     <tags>Protocol Buffers Binary Serialization Format Google proto proto3</tags>
+    <dependencies>
+      <!-- Dependencies for older, monolithic-assembly platforms -->
+      <group targetFramework="net45" />
+      <group targetFramework="wp8" />
+      <group targetFramework="win8" />
+      <group targetFramework="wpa81" />
+      <group targetFramework="xamarin.ios" />
+      <group targetFramework="monotouch" />
+      <group targetFramework="monoandroid" />
+      <!-- Dependencies for newer, more granular platforms (.NET Core etc) -->
+      <group targetFramework="dotnet">
+        <dependency id="System.Collections" version="4.0.0" />
+        <dependency id="System.Diagnostics.Debug" version="4.0.0" />
+        <dependency id="System.Globalization" version="4.0.0" />
+        <dependency id="System.IO" version="4.0.0" />
+        <dependency id="System.Linq" version="4.0.0" />
+        <dependency id="System.Linq.Expressions" version="4.0.0" />
+        <dependency id="System.ObjectModel" version="4.0.0" />
+        <dependency id="System.Reflection" version="4.0.0" />
+        <dependency id="System.Reflection.Extensions" version="4.0.0" />
+        <dependency id="System.Runtime" version="4.0.0" />
+        <dependency id="System.Runtime.Extensions" version="4.0.0" />
+        <dependency id="System.Text.Encoding" version="4.0.0" />
+        <dependency id="System.Text.RegularExpressions" version="4.0.0" />
+        <dependency id="System.Threading" version="4.0.0" />
+      </group>
+    </dependencies>
   </metadata>
   <files>
     <file src="bin/ReleaseSigned/Google.Protobuf.dll" target="lib/portable-net45+netcore45+wpa81+wp8" />
-	<file src="bin/ReleaseSigned/Google.Protobuf.pdb" target="lib/portable-net45+netcore45+wpa81+wp8" />
-	<file src="bin/ReleaseSigned/Google.Protobuf.xml" target="lib/portable-net45+netcore45+wpa81+wp8" />
-	<file src="**\*.cs" target="src" />
-	<file src="..\..\..\cmake\Release\protoc.exe" target="tools" />
-	<file src="..\..\..\src\google\protobuf\any.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\api.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\descriptor.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\duration.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\empty.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\field_mask.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\source_context.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\struct.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\timestamp.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\any.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\type.proto" target="tools\google\protobuf" />
-	<file src="..\..\..\src\google\protobuf\wrappers.proto" target="tools\google\protobuf" />
+    <file src="bin/ReleaseSigned/Google.Protobuf.pdb" target="lib/portable-net45+netcore45+wpa81+wp8" />
+    <file src="bin/ReleaseSigned/Google.Protobuf.xml" target="lib/portable-net45+netcore45+wpa81+wp8" />
+    <file src="bin/ReleaseSigned/Google.Protobuf.dll" target="lib/dotnet" />
+    <file src="bin/ReleaseSigned/Google.Protobuf.pdb" target="lib/dotnet" />
+    <file src="bin/ReleaseSigned/Google.Protobuf.xml" target="lib/dotnet" />
+    <file src="**\*.cs" target="src" />
   </files>
 </package>

+ 69 - 0
csharp/src/Google.Protobuf/ICustomDiagnosticMessage.cs

@@ -0,0 +1,69 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2016 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+namespace Google.Protobuf
+{
+    /// <summary>
+    /// A message type that has a custom string format for diagnostic purposes.
+    /// </summary>
+    /// <remarks>
+    /// <para>
+    /// Calling <see cref="object.ToString"/> on a generated message type normally
+    /// returns the JSON representation. If a message type implements this interface,
+    /// then the <see cref="ToDiagnosticString"/> method will be called instead of the regular
+    /// JSON formatting code, but only when <c>ToString()</c> is called either on the message itself
+    /// or on another message which contains it. This does not affect the normal JSON formatting of
+    /// the message.
+    /// </para>
+    /// <para>
+    /// For example, if you create a proto message representing a GUID, the internal
+    /// representation may be a <c>bytes</c> field or four <c>fixed32</c> fields. However, when debugging
+    /// it may be more convenient to see a result in the same format as <see cref="System.Guid"/> provides.
+    /// </para>
+    /// <para>This interface extends <see cref="IMessage"/> to avoid it accidentally being implemented
+    /// on types other than messages, where it would not be used by anything in the framework.</para>
+    /// </remarks>
+    public interface ICustomDiagnosticMessage : IMessage
+    {
+        /// <summary>
+        /// Returns a string representation of this object, for diagnostic purposes.
+        /// </summary>
+        /// <remarks>
+        /// This method is called when a message is formatted as part of a <see cref="object.ToString"/>
+        /// call. It does not affect the JSON representation used by <see cref="JsonFormatter"/> other than
+        /// in calls to <see cref="JsonFormatter.ToDiagnosticString(IMessage)"/>. While it is recommended
+        /// that the result is valid JSON, this is never assumed by the Protobuf library.
+        /// </remarks>
+        /// <returns>A string representation of this object, for diagnostic purposes.</returns>
+        string ToDiagnosticString();
+    }
+}

+ 53 - 0
csharp/src/Google.Protobuf/InvalidJsonException.cs

@@ -0,0 +1,53 @@
+#region Copyright notice and license
+// Protocol Buffers - Google's data interchange format
+// Copyright 2015 Google Inc.  All rights reserved.
+// https://developers.google.com/protocol-buffers/
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#endregion
+
+using System.IO;
+
+namespace Google.Protobuf
+{
+    /// <summary>
+    /// Thrown when an attempt is made to parse invalid JSON, e.g. using
+    /// a non-string property key, or including a redundant comma. Parsing a protocol buffer
+    /// message represented in JSON using <see cref="JsonParser"/> can throw both this
+    /// exception and <see cref="InvalidProtocolBufferException"/> depending on the situation. This
+    /// exception is only thrown for "pure JSON" errors, whereas <c>InvalidProtocolBufferException</c>
+    /// is thrown when the JSON may be valid in and of itself, but cannot be parsed as a protocol buffer
+    /// message.
+    /// </summary>
+    public sealed class InvalidJsonException : IOException
+    {
+        internal InvalidJsonException(string message)
+            : base(message)
+        {
+        }
+    }
+}

Някои файлове не бяха показани, защото твърде много файлове са промени