aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHyun Jae Moon <hyunjaemoon@google.com>2023-04-12 21:42:41 +0000
committerAutomerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>2023-04-12 21:42:41 +0000
commit0218c368cf308b3a6c278a23e6290ecb5d2c2602 (patch)
treefa8a40d12aebe0612e575dfb429c834b5d3a6702
parent3ed0bf60b2bae007313fb69a2e25c446774c5f99 (diff)
parente42e8d5c1c8850cdc88ddf6e44457c43d4af0452 (diff)
downloadprotobuf-json-mapping-0218c368cf308b3a6c278a23e6290ecb5d2c2602.tar.gz
Import protobuf-json-mapping crate. am: bb8920e11b am: daeeea76da am: 52c5d139a8 am: e42e8d5c1c
Original change: https://android-review.googlesource.com/c/platform/external/rust/crates/protobuf-json-mapping/+/2514115 Change-Id: I7219d31f79e95f90e95f6c777f4de382f0f9fee3 Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
-rw-r--r--.gitignore2
-rw-r--r--Android.bp45
-rw-r--r--Cargo.toml39
-rw-r--r--Cargo.toml.orig25
-rw-r--r--LICENSE19
-rw-r--r--METADATA17
-rw-r--r--MODULE_LICENSE_MIT0
l---------NOTICE1
-rw-r--r--OWNERS1
-rw-r--r--README.md6
-rw-r--r--src/base64.rs227
-rw-r--r--src/float.rs3
-rw-r--r--src/lib.rs22
-rw-r--r--src/parse.rs914
-rw-r--r--src/print.rs586
-rw-r--r--src/rfc_3339.rs585
-rw-r--r--src/well_known_wrapper.rs128
17 files changed, 2620 insertions, 0 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..96ef6c0
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+/target
+Cargo.lock
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..bb5293f
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,45 @@
+// This file is generated by cargo2android.py --run --device --tests.
+// Do not modify this file as changes will be overridden on upgrade.
+
+
+
+rust_library {
+ name: "libprotobuf_json_mapping",
+ host_supported: true,
+ crate_name: "protobuf_json_mapping",
+ cargo_env_compat: true,
+ cargo_pkg_version: "3.2.0",
+ srcs: ["src/lib.rs"],
+ edition: "2021",
+ rustlibs: [
+ "libprotobuf",
+ "libprotobuf_support",
+ "libthiserror",
+ ],
+ apex_available: [
+ "//apex_available:platform",
+ "//apex_available:anyapex",
+ ],
+ product_available: true,
+ vendor_available: true,
+}
+
+rust_test {
+ name: "protobuf-json-mapping_test_src_lib",
+ host_supported: true,
+ crate_name: "protobuf_json_mapping",
+ cargo_env_compat: true,
+ cargo_pkg_version: "3.2.0",
+ srcs: ["src/lib.rs"],
+ test_suites: ["general-tests"],
+ auto_gen_config: true,
+ test_options: {
+ unit_test: true,
+ },
+ edition: "2021",
+ rustlibs: [
+ "libprotobuf",
+ "libprotobuf_support",
+ "libthiserror",
+ ],
+}
diff --git a/Cargo.toml b/Cargo.toml
new file mode 100644
index 0000000..965427b
--- /dev/null
+++ b/Cargo.toml
@@ -0,0 +1,39 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "protobuf-json-mapping"
+version = "3.2.0"
+authors = ["Stepan Koltsov <stepan.koltsov@gmail.com>"]
+description = """
+JSON mapping for Protocol Buffers messages.
+"""
+homepage = "https://github.com/stepancheg/rust-protobuf/"
+documentation = "https://github.com/stepancheg/rust-protobuf/blob/master/README.md"
+readme = "README.md"
+license = "MIT"
+repository = "https://github.com/stepancheg/rust-protobuf/"
+
+[package.metadata.docs.rs]
+all-features = true
+
+[lib]
+bench = false
+
+[dependencies.protobuf]
+version = "=3.2.0"
+
+[dependencies.protobuf-support]
+version = "=3.2.0"
+
+[dependencies.thiserror]
+version = "1.0.30"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
new file mode 100644
index 0000000..be24813
--- /dev/null
+++ b/Cargo.toml.orig
@@ -0,0 +1,25 @@
+[package]
+
+name = "protobuf-json-mapping"
+version = "3.2.0"
+authors = ["Stepan Koltsov <stepan.koltsov@gmail.com>"]
+edition = "2021"
+license = "MIT"
+homepage = "https://github.com/stepancheg/rust-protobuf/"
+repository = "https://github.com/stepancheg/rust-protobuf/"
+documentation = "https://github.com/stepancheg/rust-protobuf/blob/master/README.md"
+description = """
+JSON mapping for Protocol Buffers messages.
+"""
+
+[lib]
+bench = false
+
+[dependencies]
+thiserror = "1.0.30"
+
+protobuf = { path = "../protobuf", version = "=3.2.0" }
+protobuf-support = { path = "../protobuf-support", version = "=3.2.0" }
+
+[package.metadata.docs.rs]
+all-features = true
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..21f30fa
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2019 Stepan Koltsov
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..248872e
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,17 @@
+name: "protobuf-json-mapping"
+description:
+ "JSON printer and parser which tries to follow protobuf conventions."
+
+third_party {
+ url {
+ type: HOMEPAGE
+ value: "https://crates.io/crates/protobuf-json-mapping"
+ }
+ url {
+ type: ARCHIVE
+ value: "https://static.crates.io/crates/protobuf-json-mapping/protobuf-json-mapping-3.2.0.crate"
+ }
+ version: "3.2.0"
+ last_upgrade_date { year: 2023 month: 3 day: 22 }
+ license_type: NOTICE
+}
diff --git a/MODULE_LICENSE_MIT b/MODULE_LICENSE_MIT
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/MODULE_LICENSE_MIT
diff --git a/NOTICE b/NOTICE
new file mode 120000
index 0000000..7a694c9
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1 @@
+LICENSE \ No newline at end of file
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..45dc4dd
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:master:/OWNERS
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..b6b05a0
--- /dev/null
+++ b/README.md
@@ -0,0 +1,6 @@
+<!-- cargo-sync-readme start -->
+
+JSON printer and parser which tries to follow
+[protobuf conventions](https://developers.google.com/protocol-buffers/docs/proto3#json).
+
+<!-- cargo-sync-readme end -->
diff --git a/src/base64.rs b/src/base64.rs
new file mode 100644
index 0000000..c8461c6
--- /dev/null
+++ b/src/base64.rs
@@ -0,0 +1,227 @@
+//! Copy-pasted from the internet
+/// Available encoding character sets
+#[derive(Clone, Copy, Debug)]
+enum _CharacterSet {
+ /// The standard character set (uses `+` and `/`)
+ _Standard,
+ /// The URL safe character set (uses `-` and `_`)
+ _UrlSafe,
+}
+
+static STANDARD_CHARS: &'static [u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
+ abcdefghijklmnopqrstuvwxyz\
+ 0123456789+/";
+
+static _URLSAFE_CHARS: &'static [u8] = b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
+ abcdefghijklmnopqrstuvwxyz\
+ 0123456789-_";
+
+pub fn encode(input: &[u8]) -> String {
+ let bytes = STANDARD_CHARS;
+
+ let len = input.len();
+
+ // Preallocate memory.
+ let prealloc_len = (len + 2) / 3 * 4;
+ let mut out_bytes = vec![b'='; prealloc_len];
+
+ // Deal with padding bytes
+ let mod_len = len % 3;
+
+ // Use iterators to reduce branching
+ {
+ let mut s_in = input[..len - mod_len].iter().map(|&x| x as u32);
+ let mut s_out = out_bytes.iter_mut();
+
+ // Convenient shorthand
+ let enc = |val| bytes[val as usize];
+ let mut write = |val| *s_out.next().unwrap() = val;
+
+ // Iterate though blocks of 4
+ while let (Some(first), Some(second), Some(third)) = (s_in.next(), s_in.next(), s_in.next())
+ {
+ let n = first << 16 | second << 8 | third;
+
+ // This 24-bit number gets separated into four 6-bit numbers.
+ write(enc((n >> 18) & 63));
+ write(enc((n >> 12) & 63));
+ write(enc((n >> 6) & 63));
+ write(enc((n >> 0) & 63));
+ }
+
+ // Heh, would be cool if we knew this was exhaustive
+ // (the dream of bounded integer types)
+ match mod_len {
+ 0 => (),
+ 1 => {
+ let n = (input[len - 1] as u32) << 16;
+ write(enc((n >> 18) & 63));
+ write(enc((n >> 12) & 63));
+ }
+ 2 => {
+ let n = (input[len - 2] as u32) << 16 | (input[len - 1] as u32) << 8;
+ write(enc((n >> 18) & 63));
+ write(enc((n >> 12) & 63));
+ write(enc((n >> 6) & 63));
+ }
+ _ => panic!("Algebra is broken, please alert the math police"),
+ }
+ }
+
+ // `out_bytes` vec is prepopulated with `=` symbols and then only updated
+ // with base64 chars, so this unsafe is safe.
+ unsafe { String::from_utf8_unchecked(out_bytes) }
+}
+
+/// Errors that can occur when decoding a base64 encoded string
+#[derive(Clone, Copy, Debug, thiserror::Error)]
+pub enum FromBase64Error {
+ /// The input contained a character not part of the base64 format
+ #[error("Invalid base64 byte")]
+ InvalidBase64Byte(u8, usize),
+ /// The input had an invalid length
+ #[error("Invalid base64 length")]
+ InvalidBase64Length,
+}
+
+pub fn decode(input: &str) -> Result<Vec<u8>, FromBase64Error> {
+ let mut r = Vec::with_capacity(input.len());
+ let mut buf: u32 = 0;
+ let mut modulus = 0;
+
+ let mut it = input.as_bytes().iter();
+ for byte in it.by_ref() {
+ let code = DECODE_TABLE[*byte as usize];
+ if code >= SPECIAL_CODES_START {
+ match code {
+ NEWLINE_CODE => continue,
+ EQUALS_CODE => break,
+ INVALID_CODE => {
+ return Err(FromBase64Error::InvalidBase64Byte(
+ *byte,
+ (byte as *const _ as usize) - input.as_ptr() as usize,
+ ))
+ }
+ _ => unreachable!(),
+ }
+ }
+ buf = (buf | code as u32) << 6;
+ modulus += 1;
+ if modulus == 4 {
+ modulus = 0;
+ r.push((buf >> 22) as u8);
+ r.push((buf >> 14) as u8);
+ r.push((buf >> 6) as u8);
+ }
+ }
+
+ for byte in it {
+ match *byte {
+ b'=' | b'\r' | b'\n' => continue,
+ _ => {
+ return Err(FromBase64Error::InvalidBase64Byte(
+ *byte,
+ (byte as *const _ as usize) - input.as_ptr() as usize,
+ ))
+ }
+ }
+ }
+
+ match modulus {
+ 2 => {
+ r.push((buf >> 10) as u8);
+ }
+ 3 => {
+ r.push((buf >> 16) as u8);
+ r.push((buf >> 8) as u8);
+ }
+ 0 => (),
+ _ => return Err(FromBase64Error::InvalidBase64Length),
+ }
+
+ Ok(r)
+}
+
+const DECODE_TABLE: [u8; 256] = [
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF, 0xFD, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x3E, 0xFF, 0x3E, 0xFF, 0x3F,
+ 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3A, 0x3B, 0x3C, 0x3D, 0xFF, 0xFF, 0xFF, 0xFE, 0xFF, 0xFF,
+ 0xFF, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E,
+ 0x0F, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0xFF, 0xFF, 0xFF, 0xFF, 0x3F,
+ 0xFF, 0x1A, 0x1B, 0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2A, 0x2B, 0x2C, 0x2D, 0x2E, 0x2F, 0x30, 0x31, 0x32, 0x33, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+];
+const INVALID_CODE: u8 = 0xFF;
+const EQUALS_CODE: u8 = 0xFE;
+const NEWLINE_CODE: u8 = 0xFD;
+const SPECIAL_CODES_START: u8 = NEWLINE_CODE;
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_encode_basic() {
+ assert_eq!(encode(b""), "");
+ assert_eq!(encode(b"f"), "Zg==");
+ assert_eq!(encode(b"fo"), "Zm8=");
+ assert_eq!(encode(b"foo"), "Zm9v");
+ assert_eq!(encode(b"foob"), "Zm9vYg==");
+ assert_eq!(encode(b"fooba"), "Zm9vYmE=");
+ assert_eq!(encode(b"foobar"), "Zm9vYmFy");
+ }
+
+ #[test]
+ fn test_encode_standard_safe() {
+ assert_eq!(encode(&[251, 255]), "+/8=");
+ }
+
+ #[test]
+ fn test_decode_basic() {
+ assert_eq!(decode("").unwrap(), b"");
+ assert_eq!(decode("Zg==").unwrap(), b"f");
+ assert_eq!(decode("Zm8=").unwrap(), b"fo");
+ assert_eq!(decode("Zm9v").unwrap(), b"foo");
+ assert_eq!(decode("Zm9vYg==").unwrap(), b"foob");
+ assert_eq!(decode("Zm9vYmE=").unwrap(), b"fooba");
+ assert_eq!(decode("Zm9vYmFy").unwrap(), b"foobar");
+ }
+
+ #[test]
+ fn test_decode() {
+ assert_eq!(decode("Zm9vYmFy").unwrap(), b"foobar");
+ }
+
+ #[test]
+ fn test_decode_newlines() {
+ assert_eq!(decode("Zm9v\r\nYmFy").unwrap(), b"foobar");
+ assert_eq!(decode("Zm9vYg==\r\n").unwrap(), b"foob");
+ assert_eq!(decode("Zm9v\nYmFy").unwrap(), b"foobar");
+ assert_eq!(decode("Zm9vYg==\n").unwrap(), b"foob");
+ }
+
+ #[test]
+ fn test_decode_urlsafe() {
+ assert_eq!(decode("-_8").unwrap(), decode("+/8=").unwrap());
+ }
+
+ #[test]
+ fn test_from_base64_invalid_char() {
+ assert!(decode("Zm$=").is_err());
+ assert!(decode("Zg==$").is_err());
+ }
+
+ #[test]
+ fn test_decode_invalid_padding() {
+ assert!(decode("Z===").is_err());
+ }
+}
diff --git a/src/float.rs b/src/float.rs
new file mode 100644
index 0000000..a082933
--- /dev/null
+++ b/src/float.rs
@@ -0,0 +1,3 @@
+pub const PROTOBUF_JSON_NAN: &str = "NaN";
+pub const PROTOBUF_JSON_INF: &str = "Infinity";
+pub const PROTOBUF_JSON_MINUS_INF: &str = "-Infinity";
diff --git a/src/lib.rs b/src/lib.rs
new file mode 100644
index 0000000..1f152c4
--- /dev/null
+++ b/src/lib.rs
@@ -0,0 +1,22 @@
+//! JSON printer and parser which tries to follow
+//! [protobuf conventions](https://developers.google.com/protocol-buffers/docs/proto3#json).
+
+mod base64;
+mod float;
+mod parse;
+mod print;
+mod rfc_3339;
+mod well_known_wrapper;
+
+pub use self::parse::merge_from_str;
+pub use self::parse::merge_from_str_with_options;
+pub use self::parse::parse_dyn_from_str;
+pub use self::parse::parse_dyn_from_str_with_options;
+pub use self::parse::parse_from_str;
+pub use self::parse::parse_from_str_with_options;
+pub use self::parse::ParseError;
+pub use self::parse::ParseOptions;
+pub use self::print::print_to_string;
+pub use self::print::print_to_string_with_options;
+pub use self::print::PrintError;
+pub use self::print::PrintOptions;
diff --git a/src/parse.rs b/src/parse.rs
new file mode 100644
index 0000000..e09fc57
--- /dev/null
+++ b/src/parse.rs
@@ -0,0 +1,914 @@
+use std::num::ParseFloatError;
+use std::num::ParseIntError;
+
+use protobuf::reflect::EnumDescriptor;
+use protobuf::reflect::EnumValueDescriptor;
+use protobuf::reflect::FieldDescriptor;
+use protobuf::reflect::MessageDescriptor;
+use protobuf::reflect::ReflectValueBox;
+use protobuf::reflect::RuntimeFieldType;
+use protobuf::reflect::RuntimeType;
+use protobuf::well_known_types::any::Any;
+use protobuf::well_known_types::duration::Duration;
+use protobuf::well_known_types::field_mask::FieldMask;
+use protobuf::well_known_types::struct_;
+use protobuf::well_known_types::struct_::ListValue;
+use protobuf::well_known_types::struct_::NullValue;
+use protobuf::well_known_types::struct_::Struct;
+use protobuf::well_known_types::struct_::Value;
+use protobuf::well_known_types::timestamp::Timestamp;
+use protobuf::well_known_types::wrappers::BoolValue;
+use protobuf::well_known_types::wrappers::BytesValue;
+use protobuf::well_known_types::wrappers::DoubleValue;
+use protobuf::well_known_types::wrappers::FloatValue;
+use protobuf::well_known_types::wrappers::Int32Value;
+use protobuf::well_known_types::wrappers::Int64Value;
+use protobuf::well_known_types::wrappers::StringValue;
+use protobuf::well_known_types::wrappers::UInt32Value;
+use protobuf::well_known_types::wrappers::UInt64Value;
+use protobuf::Enum;
+use protobuf::MessageDyn;
+use protobuf::MessageFull;
+use protobuf_support::lexer::json_number_lit::JsonNumberLit;
+use protobuf_support::lexer::lexer_impl::Lexer;
+use protobuf_support::lexer::lexer_impl::LexerError;
+use protobuf_support::lexer::loc::Loc;
+use protobuf_support::lexer::parser_language::ParserLanguage;
+use protobuf_support::lexer::token::Token;
+use protobuf_support::lexer::tokenizer::Tokenizer;
+use protobuf_support::lexer::tokenizer::TokenizerError;
+
+use super::base64;
+use super::float;
+use super::rfc_3339;
+use crate::base64::FromBase64Error;
+use crate::well_known_wrapper::WellKnownWrapper;
+
+#[derive(Debug, thiserror::Error)]
+enum ParseErrorWithoutLocInner {
+ #[error(transparent)]
+ TokenizerError(#[from] TokenizerError),
+ #[error("Unknown field name: `{}`", .0)]
+ UnknownFieldName(String),
+ #[error("Unknown enum variant name: `{}`", .0)]
+ UnknownEnumVariantName(String),
+ #[error(transparent)]
+ FromBase64Error(#[from] FromBase64Error),
+ #[error(transparent)]
+ IncorrectStrLit(#[from] LexerError),
+ #[error("Incorrect duration")]
+ IncorrectDuration,
+ #[error(transparent)]
+ Rfc3339(#[from] rfc_3339::Rfc3339ParseError),
+ #[error(transparent)]
+ ParseIntError(#[from] ParseIntError),
+ #[error(transparent)]
+ ParseFloatError(#[from] ParseFloatError),
+ #[error("Expecting bool")]
+ ExpectingBool,
+ #[error("Expecting string or integer")]
+ ExpectingStrOrInt,
+ #[error("Expecting number")]
+ ExpectingNumber,
+ #[error("Unexpected token")]
+ UnexpectedToken,
+ #[error("Any parsing is not implemented")]
+ AnyParsingIsNotImplemented,
+ #[error("Message not initialized")]
+ MessageNotInitialized,
+}
+
+/// JSON parse error.
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+struct ParseErrorWithoutLoc(ParseErrorWithoutLocInner);
+
+impl From<TokenizerError> for ParseErrorWithoutLoc {
+ fn from(e: TokenizerError) -> Self {
+ ParseErrorWithoutLoc(ParseErrorWithoutLocInner::TokenizerError(e))
+ }
+}
+
+impl From<FromBase64Error> for ParseErrorWithoutLoc {
+ fn from(e: FromBase64Error) -> Self {
+ ParseErrorWithoutLoc(ParseErrorWithoutLocInner::FromBase64Error(e))
+ }
+}
+
+impl From<ParseIntError> for ParseErrorWithoutLoc {
+ fn from(e: ParseIntError) -> Self {
+ ParseErrorWithoutLoc(ParseErrorWithoutLocInner::ParseIntError(e))
+ }
+}
+
+impl From<ParseFloatError> for ParseErrorWithoutLoc {
+ fn from(e: ParseFloatError) -> Self {
+ ParseErrorWithoutLoc(ParseErrorWithoutLocInner::ParseFloatError(e))
+ }
+}
+
+impl From<rfc_3339::Rfc3339ParseError> for ParseErrorWithoutLoc {
+ fn from(e: rfc_3339::Rfc3339ParseError) -> Self {
+ ParseErrorWithoutLoc(ParseErrorWithoutLocInner::Rfc3339(e))
+ }
+}
+
+/// JSON parse error
+#[derive(Debug, thiserror::Error)]
+#[error("{} at {}", error, loc)]
+pub struct ParseError {
+ error: ParseErrorWithoutLoc,
+ loc: Loc,
+}
+
+type ParseResultWithoutLoc<A> = Result<A, ParseErrorWithoutLoc>;
+type ParseResult<A> = Result<A, ParseError>;
+
+#[derive(Clone)]
+struct Parser<'a> {
+ tokenizer: Tokenizer<'a>,
+ parse_options: ParseOptions,
+}
+
+trait FromJsonNumber: PartialEq + Sized {
+ fn from_f64(v: f64) -> Self;
+ fn to_f64(&self) -> f64;
+ fn from_string(v: &str) -> ParseResultWithoutLoc<Self>;
+}
+
+impl FromJsonNumber for u32 {
+ fn from_f64(v: f64) -> Self {
+ v as u32
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self as f64
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ Ok(v.parse()?)
+ }
+}
+
+impl FromJsonNumber for u64 {
+ fn from_f64(v: f64) -> Self {
+ v as u64
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self as f64
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ Ok(v.parse()?)
+ }
+}
+
+impl FromJsonNumber for i32 {
+ fn from_f64(v: f64) -> Self {
+ v as i32
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self as f64
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ Ok(v.parse()?)
+ }
+}
+
+impl FromJsonNumber for i64 {
+ fn from_f64(v: f64) -> Self {
+ v as i64
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self as f64
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ Ok(v.parse()?)
+ }
+}
+
+impl FromJsonNumber for f32 {
+ fn from_f64(v: f64) -> Self {
+ v as f32
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self as f64
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ if v == float::PROTOBUF_JSON_INF {
+ Ok(f32::INFINITY)
+ } else if v == float::PROTOBUF_JSON_MINUS_INF {
+ Ok(f32::NEG_INFINITY)
+ } else if v == float::PROTOBUF_JSON_NAN {
+ Ok(f32::NAN)
+ } else {
+ Ok(v.parse()?)
+ }
+ }
+}
+
+impl FromJsonNumber for f64 {
+ fn from_f64(v: f64) -> Self {
+ v
+ }
+
+ fn to_f64(&self) -> f64 {
+ *self
+ }
+
+ fn from_string(v: &str) -> Result<Self, ParseErrorWithoutLoc> {
+ if v == float::PROTOBUF_JSON_INF {
+ Ok(f64::INFINITY)
+ } else if v == float::PROTOBUF_JSON_MINUS_INF {
+ Ok(f64::NEG_INFINITY)
+ } else if v == float::PROTOBUF_JSON_NAN {
+ Ok(f64::NAN)
+ } else {
+ Ok(v.parse()?)
+ }
+ }
+}
+
+impl<'a> Parser<'a> {
+ fn read_bool(&mut self) -> ParseResultWithoutLoc<bool> {
+ if self.tokenizer.next_ident_if_eq("true")? {
+ Ok(true)
+ } else if self.tokenizer.next_ident_if_eq("false")? {
+ Ok(false)
+ } else {
+ Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::ExpectingBool,
+ ))
+ }
+ }
+
+ fn parse_bool(&self, s: &str) -> ParseResultWithoutLoc<bool> {
+ if s == "true" {
+ Ok(true)
+ } else if s == "false" {
+ Ok(false)
+ } else {
+ Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::ExpectingBool,
+ ))
+ }
+ }
+
+ fn read_json_number_opt(&mut self) -> ParseResultWithoutLoc<Option<JsonNumberLit>> {
+ Ok(self.tokenizer.next_token_if_map(|t| match t {
+ Token::JsonNumber(v) => Some(v.clone()),
+ _ => None,
+ })?)
+ }
+
+ fn read_number<V: FromJsonNumber>(&mut self) -> ParseResultWithoutLoc<V> {
+ if let Some(v) = self.read_json_number_opt()? {
+ V::from_string(&v.0)
+ } else if self.tokenizer.lookahead_is_str_lit()? {
+ let v = self.read_string()?;
+ self.parse_number(&v)
+ } else {
+ Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::ExpectingNumber,
+ ))
+ }
+ }
+
+ fn parse_number<V: FromJsonNumber>(&self, s: &str) -> ParseResultWithoutLoc<V> {
+ V::from_string(s)
+ }
+
+ fn merge_wrapper<W>(&mut self, w: &mut W) -> ParseResultWithoutLoc<()>
+ where
+ W: WellKnownWrapper,
+ W::Underlying: FromJsonNumber,
+ {
+ *w.get_mut() = self.read_number()?;
+ Ok(())
+ }
+
+ fn merge_bool_value(&mut self, w: &mut BoolValue) -> ParseResultWithoutLoc<()> {
+ w.value = self.read_bool()?;
+ Ok(())
+ }
+
+ fn merge_string_value(&mut self, w: &mut StringValue) -> ParseResultWithoutLoc<()> {
+ w.value = self.read_string()?;
+ Ok(())
+ }
+
+ fn merge_bytes_value(&mut self, w: &mut BytesValue) -> ParseResultWithoutLoc<()> {
+ w.value = self.read_bytes()?;
+ Ok(())
+ }
+
+ fn read_u32(&mut self) -> ParseResultWithoutLoc<u32> {
+ self.read_number()
+ }
+
+ fn read_u64(&mut self) -> ParseResultWithoutLoc<u64> {
+ self.read_number()
+ }
+
+ fn read_i32(&mut self) -> ParseResultWithoutLoc<i32> {
+ self.read_number()
+ }
+
+ fn read_i64(&mut self) -> ParseResultWithoutLoc<i64> {
+ self.read_number()
+ }
+
+ fn read_f32(&mut self) -> ParseResultWithoutLoc<f32> {
+ self.read_number()
+ }
+
+ fn read_f64(&mut self) -> ParseResultWithoutLoc<f64> {
+ self.read_number()
+ }
+
+ fn read_string(&mut self) -> ParseResultWithoutLoc<String> {
+ let str_lit = self.tokenizer.next_str_lit()?;
+
+ let mut lexer = Lexer::new(&str_lit.escaped, ParserLanguage::Json);
+ let mut r = String::new();
+ while !lexer.eof() {
+ r.push(
+ lexer
+ .next_json_char_value()
+ .map_err(ParseErrorWithoutLocInner::IncorrectStrLit)
+ .map_err(ParseErrorWithoutLoc)?,
+ );
+ }
+ Ok(r)
+ }
+
+ fn read_bytes(&mut self) -> ParseResultWithoutLoc<Vec<u8>> {
+ let s = self.read_string()?;
+ self.parse_bytes(&s)
+ }
+
+ fn parse_bytes(&self, s: &str) -> ParseResultWithoutLoc<Vec<u8>> {
+ Ok(base64::decode(s)?)
+ }
+
+ fn read_enum(&mut self, descriptor: &EnumDescriptor) -> ParseResultWithoutLoc<i32> {
+ if descriptor.is::<NullValue>() {
+ return Ok(self.read_wk_null_value()?.value());
+ }
+
+ if self.tokenizer.lookahead_is_str_lit()? {
+ let name = self.read_string()?;
+ Ok(self.parse_enum(name, descriptor)?.value())
+ } else if self.tokenizer.lookahead_is_json_number()? {
+ self.read_i32()
+ } else {
+ Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::ExpectingStrOrInt,
+ ))
+ }
+ }
+
+ fn parse_enum(
+ &self,
+ name: String,
+ descriptor: &EnumDescriptor,
+ ) -> ParseResultWithoutLoc<EnumValueDescriptor> {
+ match descriptor.value_by_name(&name) {
+ Some(v) => Ok(v),
+ None => Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::UnknownEnumVariantName(name),
+ )),
+ }
+ }
+
+ fn read_wk_null_value(&mut self) -> ParseResultWithoutLoc<NullValue> {
+ self.tokenizer.next_ident_expect_eq("null")?;
+ Ok(NullValue::NULL_VALUE)
+ }
+
+ fn read_message(
+ &mut self,
+ descriptor: &MessageDescriptor,
+ ) -> ParseResultWithoutLoc<Box<dyn MessageDyn>> {
+ let mut m = descriptor.new_instance();
+ self.merge_inner(&mut *m)?;
+ Ok(m)
+ }
+
+ fn read_value(&mut self, t: &RuntimeType) -> ParseResultWithoutLoc<ReflectValueBox> {
+ match t {
+ RuntimeType::I32 => self.read_i32().map(ReflectValueBox::from),
+ RuntimeType::I64 => self.read_i64().map(ReflectValueBox::from),
+ RuntimeType::U32 => self.read_u32().map(ReflectValueBox::from),
+ RuntimeType::U64 => self.read_u64().map(ReflectValueBox::from),
+ RuntimeType::F32 => self.read_f32().map(ReflectValueBox::from),
+ RuntimeType::F64 => self.read_f64().map(ReflectValueBox::from),
+ RuntimeType::Bool => self.read_bool().map(ReflectValueBox::from),
+ RuntimeType::String => self.read_string().map(ReflectValueBox::from),
+ RuntimeType::VecU8 => self.read_bytes().map(ReflectValueBox::from),
+ RuntimeType::Enum(e) => self
+ .read_enum(&e)
+ .map(|v| ReflectValueBox::Enum(e.clone(), v)),
+ RuntimeType::Message(m) => self.read_message(&m).map(ReflectValueBox::from),
+ }
+ }
+
+ fn merge_singular_field(
+ &mut self,
+ message: &mut dyn MessageDyn,
+ field: &FieldDescriptor,
+ t: &RuntimeType,
+ ) -> ParseResultWithoutLoc<()> {
+ field.set_singular_field(message, self.read_value(t)?);
+ Ok(())
+ }
+
+ fn read_list<C>(&mut self, mut read_item: C) -> ParseResultWithoutLoc<()>
+ where
+ C: for<'b> FnMut(&'b mut Self) -> ParseResultWithoutLoc<()>,
+ {
+ if self.tokenizer.next_ident_if_eq("null")? {
+ return Ok(());
+ }
+
+ // TODO: better error reporting on wrong field type
+ self.tokenizer.next_symbol_expect_eq('[', "list")?;
+ let mut first = true;
+ while !self.tokenizer.next_symbol_if_eq(']')? {
+ if !first {
+ self.tokenizer.next_symbol_expect_eq(',', "list")?;
+ }
+ first = false;
+
+ read_item(self)?;
+ }
+
+ Ok(())
+ }
+
+ fn merge_repeated_field(
+ &mut self,
+ message: &mut dyn MessageDyn,
+ field: &FieldDescriptor,
+ t: &RuntimeType,
+ ) -> ParseResultWithoutLoc<()> {
+ let mut repeated = field.mut_repeated(message);
+ repeated.clear();
+
+ self.read_list(|s| {
+ repeated.push(s.read_value(t)?);
+ Ok(())
+ })
+ }
+
+ fn merge_wk_list_value(&mut self, list: &mut ListValue) -> ParseResultWithoutLoc<()> {
+ list.values.clear();
+
+ self.read_list(|s| {
+ list.values.push(s.read_wk_value()?);
+ Ok(())
+ })
+ }
+
+ fn read_map<K, Fk, Fi>(
+ &mut self,
+ mut parse_key: Fk,
+ mut read_value_and_insert: Fi,
+ ) -> ParseResultWithoutLoc<()>
+ where
+ Fk: for<'b> FnMut(&Self, String) -> ParseResultWithoutLoc<K>,
+ Fi: for<'b> FnMut(&mut Self, K) -> ParseResultWithoutLoc<()>,
+ {
+ if self.tokenizer.next_ident_if_eq("null")? {
+ return Ok(());
+ }
+
+ self.tokenizer.next_symbol_expect_eq('{', "map")?;
+ let mut first = true;
+ while !self.tokenizer.next_symbol_if_eq('}')? {
+ if !first {
+ self.tokenizer.next_symbol_expect_eq(',', "map")?;
+ }
+ first = false;
+
+ let key_string = self.read_string()?;
+ let k = parse_key(self, key_string)?;
+
+ self.tokenizer.next_symbol_expect_eq(':', "map")?;
+ read_value_and_insert(self, k)?;
+ }
+
+ Ok(())
+ }
+
+ fn parse_key(&self, key: String, t: &RuntimeType) -> ParseResultWithoutLoc<ReflectValueBox> {
+ match t {
+ RuntimeType::I32 => self.parse_number::<i32>(&key).map(ReflectValueBox::I32),
+ RuntimeType::I64 => self.parse_number::<i64>(&key).map(ReflectValueBox::I64),
+ RuntimeType::U32 => self.parse_number::<u32>(&key).map(ReflectValueBox::U32),
+ RuntimeType::U64 => self.parse_number::<u64>(&key).map(ReflectValueBox::U64),
+ RuntimeType::Bool => self.parse_bool(&key).map(ReflectValueBox::Bool),
+ RuntimeType::String => Ok(ReflectValueBox::String(key)),
+ t @ RuntimeType::F32
+ | t @ RuntimeType::F64
+ | t @ RuntimeType::VecU8
+ | t @ RuntimeType::Enum(..) => panic!("{} cannot be a map key", t),
+ RuntimeType::Message(_) => panic!("message cannot be a map key"),
+ }
+ }
+
+ fn merge_map_field(
+ &mut self,
+ message: &mut dyn MessageDyn,
+ field: &FieldDescriptor,
+ kt: &RuntimeType,
+ vt: &RuntimeType,
+ ) -> ParseResultWithoutLoc<()> {
+ let mut map = field.mut_map(message);
+ map.clear();
+
+ self.read_map(
+ |ss, s| ss.parse_key(s, kt),
+ |s, k| {
+ let v = s.read_value(vt)?;
+ map.insert(k, v);
+ Ok(())
+ },
+ )
+ }
+
+ fn merge_wk_struct(&mut self, struct_value: &mut Struct) -> ParseResultWithoutLoc<()> {
+ struct_value.fields.clear();
+
+ self.read_map(
+ |_, s| Ok(s),
+ |s, k| {
+ let v = s.read_wk_value()?;
+ struct_value.fields.insert(k, v);
+ Ok(())
+ },
+ )
+ }
+
+ fn skip_json_value(&mut self) -> ParseResultWithoutLoc<()> {
+ if self
+ .tokenizer
+ .next_ident_if_in(&["true", "false", "null"])?
+ .is_some()
+ {
+ } else if self.tokenizer.lookahead_is_str_lit()? {
+ self.tokenizer.next_str_lit()?;
+ } else if self.tokenizer.lookahead_is_json_number()? {
+ self.read_json_number_opt()?;
+ } else if self.tokenizer.lookahead_is_symbol('[')? {
+ self.read_list(|s| s.skip_json_value())?;
+ } else if self.tokenizer.lookahead_is_symbol('{')? {
+ self.read_map(|_, _| Ok(()), |s, ()| s.skip_json_value())?;
+ } else {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::UnexpectedToken,
+ ));
+ }
+ Ok(())
+ }
+
+ fn merge_field(
+ &mut self,
+ message: &mut dyn MessageDyn,
+ field: &FieldDescriptor,
+ ) -> ParseResultWithoutLoc<()> {
+ match field.runtime_field_type() {
+ RuntimeFieldType::Singular(t) => self.merge_singular_field(message, field, &t),
+ RuntimeFieldType::Repeated(t) => self.merge_repeated_field(message, field, &t),
+ RuntimeFieldType::Map(kt, vt) => self.merge_map_field(message, field, &kt, &vt),
+ }
+ }
+
+ fn merge_inner(&mut self, message: &mut dyn MessageDyn) -> ParseResultWithoutLoc<()> {
+ if let Some(duration) = message.downcast_mut() {
+ return self.merge_wk_duration(duration);
+ }
+
+ if let Some(timestamp) = message.downcast_mut() {
+ return self.merge_wk_timestamp(timestamp);
+ }
+
+ if let Some(field_mask) = message.downcast_mut() {
+ return self.merge_wk_field_mask(field_mask);
+ }
+
+ if let Some(value) = message.downcast_mut() {
+ return self.merge_wk_value(value);
+ }
+
+ if let Some(value) = message.downcast_mut() {
+ return self.merge_wk_any(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<DoubleValue>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<FloatValue>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<Int64Value>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<UInt64Value>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<Int32Value>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<UInt32Value>() {
+ return self.merge_wrapper(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<BoolValue>() {
+ return self.merge_bool_value(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<StringValue>() {
+ return self.merge_string_value(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<BytesValue>() {
+ return self.merge_bytes_value(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<ListValue>() {
+ return self.merge_wk_list_value(value);
+ }
+
+ if let Some(value) = message.downcast_mut::<Struct>() {
+ return self.merge_wk_struct(value);
+ }
+
+ let descriptor = message.descriptor_dyn();
+
+ self.tokenizer.next_symbol_expect_eq('{', "object")?;
+ let mut first = true;
+ while !self.tokenizer.next_symbol_if_eq('}')? {
+ if !first {
+ self.tokenizer.next_symbol_expect_eq(',', "object")?;
+ }
+ first = false;
+
+ let field_name = self.read_string()?;
+ // Proto3 JSON parsers are required to accept both
+ // the converted `lowerCamelCase` name and the proto field name.
+ match descriptor.field_by_name_or_json_name(&field_name) {
+ Some(field) => {
+ self.tokenizer.next_symbol_expect_eq(':', "object")?;
+ self.merge_field(message, &field)?;
+ }
+ None if self.parse_options.ignore_unknown_fields => {
+ self.tokenizer.next_symbol_expect_eq(':', "object")?;
+ self.skip_json_value()?;
+ }
+ None => {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::UnknownFieldName(field_name),
+ ))
+ }
+ };
+ }
+ Ok(())
+ }
+
+ fn merge_wk_duration(&mut self, duration: &mut Duration) -> ParseResultWithoutLoc<()> {
+ let s = self.read_string()?;
+ let mut lexer = Lexer::new(&s, ParserLanguage::Json);
+
+ fn next_dec(lexer: &mut Lexer) -> ParseResultWithoutLoc<(u64, u32)> {
+ let s = lexer.take_while(|c| c >= '0' && c <= '9');
+
+ if s.len() == 0 {
+ Ok((0, 0))
+ } else {
+ match s.parse() {
+ Ok(n) => Ok((n, s.len() as u32)),
+ Err(_) => Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ )),
+ }
+ }
+ }
+
+ let minus = lexer.next_char_if_eq('-');
+ let seconds = match next_dec(&mut lexer)? {
+ (_, 0) => {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ ))
+ }
+ (s, _) => s,
+ };
+ let nanos = if lexer.next_char_if_eq('.') {
+ let (mut a, mut b) = next_dec(&mut lexer)?;
+ if b > 9 {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ ));
+ }
+ while b != 9 {
+ b += 1;
+ a *= 10;
+ }
+
+ if a > 999_999_999 {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ ));
+ }
+
+ a
+ } else {
+ 0
+ };
+
+ // The suffix "s" is required
+ if !lexer.next_char_if_eq('s') {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ ));
+ }
+
+ if !lexer.eof() {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::IncorrectDuration,
+ ));
+ }
+
+ if minus {
+ duration.seconds = -(seconds as i64);
+ duration.nanos = -(nanos as i32);
+ } else {
+ duration.seconds = seconds as i64;
+ duration.nanos = nanos as i32;
+ }
+ Ok(())
+ }
+
+ fn merge_wk_timestamp(&mut self, timestamp: &mut Timestamp) -> ParseResultWithoutLoc<()> {
+ let s = self.read_string()?;
+ let (seconds, nanos) = rfc_3339::TmUtc::parse_rfc_3339(&s)?;
+ timestamp.seconds = seconds;
+ timestamp.nanos = nanos as i32;
+ Ok(())
+ }
+
+ fn merge_wk_field_mask(&mut self, field_mask: &mut FieldMask) -> ParseResultWithoutLoc<()> {
+ let s = self.read_string()?;
+ if !s.is_empty() {
+ field_mask.paths = s.split(',').map(|s| s.to_owned()).collect();
+ }
+ Ok(())
+ }
+
+ fn read_wk_list_value(&mut self) -> ParseResultWithoutLoc<ListValue> {
+ let mut r = ListValue::new();
+ self.merge_wk_list_value(&mut r)?;
+ Ok(r)
+ }
+
+ fn read_wk_struct(&mut self) -> ParseResultWithoutLoc<Struct> {
+ let mut r = Struct::new();
+ self.merge_wk_struct(&mut r)?;
+ Ok(r)
+ }
+
+ fn merge_wk_value(&mut self, value: &mut Value) -> ParseResultWithoutLoc<()> {
+ if self.tokenizer.lookahead_is_ident("null")? {
+ value.kind = Some(struct_::value::Kind::NullValue(
+ self.read_wk_null_value()?.into(),
+ ));
+ } else if self.tokenizer.lookahead_is_ident("true")?
+ || self.tokenizer.lookahead_is_ident("false")?
+ {
+ value.kind = Some(struct_::value::Kind::BoolValue(self.read_bool()?));
+ } else if self.tokenizer.lookahead_is_json_number()? {
+ value.kind = Some(struct_::value::Kind::NumberValue(self.read_f64()?));
+ } else if self.tokenizer.lookahead_is_str_lit()? {
+ value.kind = Some(struct_::value::Kind::StringValue(self.read_string()?));
+ } else if self.tokenizer.lookahead_is_symbol('[')? {
+ value.kind = Some(struct_::value::Kind::ListValue(self.read_wk_list_value()?));
+ } else if self.tokenizer.lookahead_is_symbol('{')? {
+ value.kind = Some(struct_::value::Kind::StructValue(self.read_wk_struct()?));
+ } else {
+ return Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::UnexpectedToken,
+ ));
+ }
+ Ok(())
+ }
+
+ fn merge_wk_any(&mut self, _value: &mut Any) -> ParseResultWithoutLoc<()> {
+ Err(ParseErrorWithoutLoc(
+ ParseErrorWithoutLocInner::AnyParsingIsNotImplemented,
+ ))
+ }
+
+ fn read_wk_value(&mut self) -> ParseResultWithoutLoc<Value> {
+ let mut v = Value::new();
+ self.merge_wk_value(&mut v)?;
+ Ok(v)
+ }
+
+ fn merge(&mut self, message: &mut dyn MessageDyn) -> ParseResult<()> {
+ match self.merge_inner(message) {
+ Ok(()) => Ok(()),
+ Err(error) => Err(ParseError {
+ error,
+ loc: self.tokenizer.loc(),
+ }),
+ }
+ }
+}
+
+/// JSON parse options.
+///
+/// # Examples
+///
+/// ```
+/// let parse_options = protobuf_json_mapping::ParseOptions {
+/// ignore_unknown_fields: true,
+/// ..Default::default()
+/// };
+/// ```
+#[derive(Default, Debug, Clone)]
+pub struct ParseOptions {
+ /// Ignore unknown fields when parsing.
+ ///
+ /// When `true` fields with unknown names are ignored.
+ /// When `false` parser returns an error on unknown field.
+ pub ignore_unknown_fields: bool,
+ /// Prevent initializing `ParseOptions` enumerating all field.
+ pub _future_options: (),
+}
+
+/// Merge JSON into provided message
+pub fn merge_from_str_with_options(
+ message: &mut dyn MessageDyn,
+ json: &str,
+ parse_options: &ParseOptions,
+) -> ParseResult<()> {
+ let mut parser = Parser {
+ tokenizer: Tokenizer::new(json, ParserLanguage::Json),
+ parse_options: parse_options.clone(),
+ };
+ parser.merge(message)
+}
+
+/// Merge JSON into provided message
+pub fn merge_from_str(message: &mut dyn MessageDyn, json: &str) -> ParseResult<()> {
+ merge_from_str_with_options(message, json, &ParseOptions::default())
+}
+
+/// Parse JSON to protobuf message.
+pub fn parse_dyn_from_str_with_options(
+ d: &MessageDescriptor,
+ json: &str,
+ parse_options: &ParseOptions,
+) -> ParseResult<Box<dyn MessageDyn>> {
+ let mut m = d.new_instance();
+ merge_from_str_with_options(&mut *m, json, parse_options)?;
+ if let Err(_) = m.check_initialized_dyn() {
+ return Err(ParseError {
+ error: ParseErrorWithoutLoc(ParseErrorWithoutLocInner::MessageNotInitialized),
+ loc: Loc::start(),
+ });
+ }
+ Ok(m)
+}
+
+/// Parse JSON to protobuf message.
+pub fn parse_dyn_from_str(d: &MessageDescriptor, json: &str) -> ParseResult<Box<dyn MessageDyn>> {
+ parse_dyn_from_str_with_options(d, json, &ParseOptions::default())
+}
+
+/// Parse JSON to protobuf message.
+pub fn parse_from_str_with_options<M: MessageFull>(
+ json: &str,
+ parse_options: &ParseOptions,
+) -> ParseResult<M> {
+ let m = parse_dyn_from_str_with_options(&M::descriptor(), json, parse_options)?;
+ Ok(*m.downcast_box().unwrap())
+}
+
+/// Parse JSON to protobuf message.
+pub fn parse_from_str<M: MessageFull>(json: &str) -> ParseResult<M> {
+ parse_from_str_with_options(json, &ParseOptions::default())
+}
diff --git a/src/print.rs b/src/print.rs
new file mode 100644
index 0000000..eef44ca
--- /dev/null
+++ b/src/print.rs
@@ -0,0 +1,586 @@
+use std::fmt;
+use std::fmt::Write as fmt_Write;
+
+use protobuf::reflect::EnumDescriptor;
+use protobuf::reflect::EnumValueDescriptor;
+use protobuf::reflect::MessageRef;
+use protobuf::reflect::ReflectFieldRef;
+use protobuf::reflect::ReflectMapRef;
+use protobuf::reflect::ReflectRepeatedRef;
+use protobuf::reflect::ReflectValueRef;
+use protobuf::reflect::RuntimeFieldType;
+use protobuf::reflect::RuntimeType;
+use protobuf::well_known_types::any::Any;
+use protobuf::well_known_types::duration::Duration;
+use protobuf::well_known_types::field_mask::FieldMask;
+use protobuf::well_known_types::struct_::value;
+use protobuf::well_known_types::struct_::ListValue;
+use protobuf::well_known_types::struct_::NullValue;
+use protobuf::well_known_types::struct_::Struct;
+use protobuf::well_known_types::struct_::Value;
+use protobuf::well_known_types::timestamp::Timestamp;
+use protobuf::well_known_types::wrappers::BoolValue;
+use protobuf::well_known_types::wrappers::BytesValue;
+use protobuf::well_known_types::wrappers::DoubleValue;
+use protobuf::well_known_types::wrappers::FloatValue;
+use protobuf::well_known_types::wrappers::Int32Value;
+use protobuf::well_known_types::wrappers::Int64Value;
+use protobuf::well_known_types::wrappers::StringValue;
+use protobuf::well_known_types::wrappers::UInt32Value;
+use protobuf::well_known_types::wrappers::UInt64Value;
+use protobuf::MessageDyn;
+
+use crate::base64;
+use crate::float;
+use crate::rfc_3339::TmUtc;
+use crate::well_known_wrapper::WellKnownWrapper;
+
+#[derive(Debug, thiserror::Error)]
+enum PrintErrorInner {
+ #[error(transparent)]
+ Fmt(fmt::Error),
+ #[error("JSON printing of Any is not implemented")]
+ AnyPrintingIsNotImplemented,
+ #[error("Negative nanoseconds in timestamp")]
+ TimestampNegativeNanos,
+ #[error("Unknown struct value kind")]
+ UnknownStructValueKind,
+}
+
+/// Print to JSON error.
+#[derive(Debug, thiserror::Error)]
+#[error(transparent)]
+pub struct PrintError(PrintErrorInner);
+
+impl From<fmt::Error> for PrintError {
+ fn from(e: fmt::Error) -> Self {
+ PrintError(PrintErrorInner::Fmt(e))
+ }
+}
+
+pub type PrintResult<T> = Result<T, PrintError>;
+
+struct Printer {
+ buf: String,
+ print_options: PrintOptions,
+}
+
+trait PrintableToJson {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()>;
+}
+
+trait JsonFloat: fmt::Display + fmt::Debug + PrintableToJson {
+ fn is_nan(&self) -> bool;
+ fn is_pos_infinity(&self) -> bool;
+ fn is_neg_infinity(&self) -> bool;
+
+ fn print_to_json_impl(&self, w: &mut String) -> PrintResult<()> {
+ Ok(if self.is_nan() {
+ write!(w, "\"{}\"", float::PROTOBUF_JSON_NAN)?
+ } else if self.is_pos_infinity() {
+ write!(w, "\"{}\"", float::PROTOBUF_JSON_INF)?
+ } else if self.is_neg_infinity() {
+ write!(w, "\"{}\"", float::PROTOBUF_JSON_MINUS_INF)?
+ } else {
+ write!(w, "{:?}", self)?
+ })
+ }
+}
+
+impl JsonFloat for f32 {
+ fn is_nan(&self) -> bool {
+ f32::is_nan(*self)
+ }
+
+ fn is_pos_infinity(&self) -> bool {
+ f32::is_infinite(*self) && self > &0.0
+ }
+
+ fn is_neg_infinity(&self) -> bool {
+ f32::is_infinite(*self) && self < &0.0
+ }
+}
+
+impl PrintableToJson for f32 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ Ok(self.print_to_json_impl(&mut w.buf)?)
+ }
+}
+
+impl JsonFloat for f64 {
+ fn is_nan(&self) -> bool {
+ f64::is_nan(*self)
+ }
+
+ fn is_pos_infinity(&self) -> bool {
+ f64::is_infinite(*self) && self > &0.0
+ }
+
+ fn is_neg_infinity(&self) -> bool {
+ f64::is_infinite(*self) && self < &0.0
+ }
+}
+
+impl PrintableToJson for f64 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ self.print_to_json_impl(&mut w.buf)
+ }
+}
+
+impl PrintableToJson for u64 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ // 64-bit integers are quoted by default
+ Ok(write!(w.buf, "\"{}\"", self)?)
+ }
+}
+
+impl PrintableToJson for i64 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ // 64-bit integers are quoted by default
+ Ok(write!(w.buf, "\"{}\"", self)?)
+ }
+}
+
+impl PrintableToJson for u32 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ Ok(write!(w.buf, "{}", self)?)
+ }
+}
+
+impl PrintableToJson for i32 {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ Ok(write!(w.buf, "{}", self)?)
+ }
+}
+
+impl PrintableToJson for bool {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ Ok(write!(w.buf, "{}", self)?)
+ }
+}
+
+impl PrintableToJson for str {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ write!(w.buf, "\"")?;
+ for c in self.chars() {
+ match c {
+ '"' => write!(w.buf, "\\\""),
+ '\\' => write!(w.buf, "\\\\"),
+ '\n' => write!(w.buf, "\\n"),
+ '\r' => write!(w.buf, "\\r"),
+ '\t' => write!(w.buf, "\\t"),
+ c if c.is_control() => write!(w.buf, "\\u{:04x}", c as u32),
+ c => write!(w.buf, "{}", c),
+ }?;
+ }
+ write!(w.buf, "\"")?;
+ Ok(())
+ }
+}
+
+impl PrintableToJson for String {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ self.as_str().print_to_json(w)
+ }
+}
+
+impl PrintableToJson for [u8] {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ let encoded = base64::encode(self);
+ encoded.print_to_json(w)
+ }
+}
+
+impl PrintableToJson for Vec<u8> {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ self.as_slice().print_to_json(w)
+ }
+}
+
+impl<'a> PrintableToJson for ReflectValueRef<'a> {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ match self {
+ ReflectValueRef::U32(v) => w.print_printable(v),
+ ReflectValueRef::U64(v) => w.print_printable(v),
+ ReflectValueRef::I32(v) => w.print_printable(v),
+ ReflectValueRef::I64(v) => w.print_printable(v),
+ ReflectValueRef::F32(v) => w.print_printable(v),
+ ReflectValueRef::F64(v) => w.print_printable(v),
+ ReflectValueRef::Bool(v) => w.print_printable(v),
+ ReflectValueRef::String(v) => w.print_printable::<str>(v),
+ ReflectValueRef::Bytes(v) => w.print_printable::<[u8]>(v),
+ ReflectValueRef::Enum(d, v) => w.print_enum(d, *v),
+ ReflectValueRef::Message(v) => w.print_message(v),
+ }
+ }
+}
+
+impl PrintableToJson for Duration {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ let sign = if self.seconds >= 0 { "" } else { "-" };
+ Ok(write!(
+ w.buf,
+ "\"{}{}.{:09}s\"",
+ sign,
+ self.seconds.abs(),
+ self.nanos.abs()
+ )?)
+ }
+}
+
+impl PrintableToJson for Timestamp {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ if self.nanos < 0 {
+ return Err(PrintError(PrintErrorInner::TimestampNegativeNanos));
+ }
+ let tm_utc = TmUtc::from_protobuf_timestamp(self.seconds, self.nanos as u32);
+ w.print_printable(&tm_utc.to_string())
+ }
+}
+
+impl PrintableToJson for FieldMask {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ w.print_printable(&self.paths.join(","))
+ }
+}
+
+impl PrintableToJson for Any {
+ fn print_to_json(&self, _w: &mut Printer) -> PrintResult<()> {
+ Err(PrintError(PrintErrorInner::AnyPrintingIsNotImplemented))
+ }
+}
+
+impl PrintableToJson for Value {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ match self.kind {
+ // None should not be possible here, but it's better to print null than crash
+ None => w.print_json_null(),
+ Some(value::Kind::NullValue(null_value)) => {
+ match null_value.enum_value() {
+ Ok(value) => w.print_wk_null_value(&value),
+ Err(n) => {
+ // Practically not possible, but it is safer this way.
+ w.print_printable(&n)
+ }
+ }
+ }
+ Some(value::Kind::BoolValue(b)) => w.print_printable(&b),
+ Some(value::Kind::NumberValue(n)) => w.print_printable(&n),
+ Some(value::Kind::StringValue(ref s)) => w.print_printable::<String>(&s),
+ Some(value::Kind::StructValue(ref s)) => w.print_printable(&s),
+ Some(value::Kind::ListValue(ref l)) => w.print_printable(&l),
+ Some(_) => Err(PrintError(PrintErrorInner::UnknownStructValueKind)),
+ }
+ }
+}
+
+impl PrintableToJson for ListValue {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ w.print_list(&self.values)
+ }
+}
+
+impl PrintableToJson for Struct {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ w.print_object(&self.fields)
+ }
+}
+
+impl<'a, P: PrintableToJson> PrintableToJson for &'a P {
+ fn print_to_json(&self, w: &mut Printer) -> PrintResult<()> {
+ (*self).print_to_json(w)
+ }
+}
+
+trait ObjectKey {
+ fn print_object_key(&self, w: &mut Printer) -> PrintResult<()>;
+}
+
+impl<'a> ObjectKey for ReflectValueRef<'a> {
+ fn print_object_key(&self, w: &mut Printer) -> PrintResult<()> {
+ match self {
+ ReflectValueRef::String(v) => return w.print_printable::<str>(v),
+ ReflectValueRef::Bytes(v) => return w.print_printable::<[u8]>(v),
+ // do not quote, because printable is quoted
+ ReflectValueRef::U64(v) => return w.print_printable(v),
+ ReflectValueRef::I64(v) => return w.print_printable(v),
+ ReflectValueRef::Enum(d, v) if !w.print_options.enum_values_int => {
+ return w.print_enum(d, *v)
+ }
+ _ => {}
+ }
+
+ write!(w.buf, "\"")?;
+
+ match self {
+ ReflectValueRef::U32(v) => w.print_printable(v),
+ ReflectValueRef::I32(v) => w.print_printable(v),
+ ReflectValueRef::Bool(v) => w.print_printable(v),
+ ReflectValueRef::Enum(d, v) if w.print_options.enum_values_int => w.print_enum(d, *v),
+ ReflectValueRef::Enum(..)
+ | ReflectValueRef::U64(_)
+ | ReflectValueRef::I64(_)
+ | ReflectValueRef::String(_)
+ | ReflectValueRef::Bytes(_) => unreachable!(),
+ ReflectValueRef::F32(_) | ReflectValueRef::F64(_) | ReflectValueRef::Message(_) => {
+ panic!("cannot be object key")
+ }
+ }?;
+
+ write!(w.buf, "\"")?;
+
+ Ok(())
+ }
+}
+
+impl ObjectKey for String {
+ fn print_object_key(&self, w: &mut Printer) -> PrintResult<()> {
+ w.print_printable(self)
+ }
+}
+
+impl<'a, O: ObjectKey> ObjectKey for &'a O {
+ fn print_object_key(&self, w: &mut Printer) -> PrintResult<()> {
+ (*self).print_object_key(w)
+ }
+}
+
+impl Printer {
+ fn print_comma_but_first(&mut self, first: &mut bool) -> fmt::Result {
+ if *first {
+ *first = false;
+ Ok(())
+ } else {
+ write!(self.buf, ", ")
+ }
+ }
+
+ fn print_json_null(&mut self) -> PrintResult<()> {
+ Ok(write!(self.buf, "null")?)
+ }
+
+ fn print_printable<F: PrintableToJson + ?Sized>(&mut self, f: &F) -> PrintResult<()> {
+ f.print_to_json(self)
+ }
+
+ fn print_list<I>(&mut self, items: I) -> PrintResult<()>
+ where
+ I: IntoIterator,
+ I::Item: PrintableToJson,
+ {
+ write!(self.buf, "[")?;
+ for (i, item) in items.into_iter().enumerate() {
+ if i != 0 {
+ write!(self.buf, ", ")?;
+ }
+ self.print_printable(&item)?;
+ }
+ write!(self.buf, "]")?;
+ Ok(())
+ }
+
+ fn print_repeated(&mut self, repeated: &ReflectRepeatedRef) -> PrintResult<()> {
+ self.print_list(repeated)
+ }
+
+ fn print_object<I, K, V>(&mut self, items: I) -> PrintResult<()>
+ where
+ I: IntoIterator<Item = (K, V)>,
+ K: ObjectKey,
+ V: PrintableToJson,
+ {
+ write!(self.buf, "{{")?;
+ for (i, (k, v)) in items.into_iter().enumerate() {
+ if i != 0 {
+ write!(self.buf, ", ")?;
+ }
+ k.print_object_key(self)?;
+ write!(self.buf, ": ")?;
+ self.print_printable(&v)?;
+ }
+ write!(self.buf, "}}")?;
+ Ok(())
+ }
+
+ fn print_map(&mut self, map: &ReflectMapRef) -> PrintResult<()> {
+ self.print_object(map.into_iter())
+ }
+
+ fn print_enum_known(&mut self, value: &EnumValueDescriptor) -> PrintResult<()> {
+ if let Some(null_value) = value.cast() {
+ self.print_wk_null_value(&null_value)
+ } else {
+ if self.print_options.enum_values_int {
+ self.print_printable(&value.value())
+ } else {
+ Ok(write!(self.buf, "\"{}\"", value.name())?)
+ }
+ }
+ }
+
+ fn print_enum(&mut self, descriptor: &EnumDescriptor, v: i32) -> PrintResult<()> {
+ if self.print_options.enum_values_int {
+ self.print_printable(&v)
+ } else {
+ match descriptor.value_by_number(v) {
+ Some(value) => self.print_enum_known(&value),
+ None => self.print_printable(&v),
+ }
+ }
+ }
+
+ fn print_message(&mut self, message: &MessageRef) -> PrintResult<()> {
+ if let Some(duration) = message.downcast_ref::<Duration>() {
+ self.print_printable(duration)
+ } else if let Some(timestamp) = message.downcast_ref::<Timestamp>() {
+ self.print_printable(timestamp)
+ } else if let Some(field_mask) = message.downcast_ref::<FieldMask>() {
+ self.print_printable(field_mask)
+ } else if let Some(any) = message.downcast_ref::<Any>() {
+ self.print_printable(any)
+ } else if let Some(value) = message.downcast_ref::<Value>() {
+ self.print_printable(value)
+ } else if let Some(value) = message.downcast_ref::<DoubleValue>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<FloatValue>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<Int64Value>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<UInt64Value>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<Int32Value>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<UInt32Value>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<BoolValue>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<StringValue>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<BytesValue>() {
+ self.print_wrapper(value)
+ } else if let Some(value) = message.downcast_ref::<ListValue>() {
+ self.print_printable(value)
+ } else if let Some(value) = message.downcast_ref::<Struct>() {
+ self.print_printable(value)
+ } else {
+ self.print_regular_message(message)
+ }
+ }
+
+ fn print_regular_message(&mut self, message: &MessageRef) -> Result<(), PrintError> {
+ let descriptor = message.descriptor_dyn();
+
+ write!(self.buf, "{{")?;
+ let mut first = true;
+ for field in descriptor.fields() {
+ let json_field_name = if self.print_options.proto_field_name {
+ field.name()
+ } else {
+ field.json_name()
+ };
+
+ let field_type = field.runtime_field_type();
+
+ match field.get_reflect(&**message) {
+ ReflectFieldRef::Optional(v) => match v.value() {
+ None => {
+ if self.print_options.always_output_default_values {
+ let is_message = match field_type {
+ RuntimeFieldType::Singular(s) => match s {
+ RuntimeType::Message(_) => true,
+ _ => false,
+ },
+ _ => unreachable!(),
+ };
+
+ let is_oneof = field.proto().has_oneof_index();
+
+ if !is_message && !is_oneof {
+ let v = field.get_singular_field_or_default(&**message);
+ self.print_comma_but_first(&mut first)?;
+ write!(self.buf, "\"{}\": ", json_field_name)?;
+ self.print_printable(&v)?;
+ }
+ }
+ }
+ Some(v) => {
+ self.print_comma_but_first(&mut first)?;
+ write!(self.buf, "\"{}\": ", json_field_name)?;
+ self.print_printable(&v)?;
+ }
+ },
+ ReflectFieldRef::Repeated(v) => {
+ if !v.is_empty() || self.print_options.always_output_default_values {
+ self.print_comma_but_first(&mut first)?;
+ write!(self.buf, "\"{}\": ", json_field_name)?;
+ self.print_repeated(&v)?;
+ }
+ }
+ ReflectFieldRef::Map(v) => {
+ if !v.is_empty() || self.print_options.always_output_default_values {
+ self.print_comma_but_first(&mut first)?;
+ write!(self.buf, "\"{}\": ", json_field_name)?;
+ self.print_map(&v)?;
+ }
+ }
+ }
+ }
+ write!(self.buf, "}}")?;
+ Ok(())
+ }
+
+ fn print_wk_null_value(&mut self, _null_value: &NullValue) -> PrintResult<()> {
+ self.print_json_null()
+ }
+
+ fn print_wrapper<W>(&mut self, value: &W) -> PrintResult<()>
+ where
+ W: WellKnownWrapper,
+ W::Underlying: PrintableToJson,
+ {
+ self.print_printable(value.get_ref())
+ }
+}
+
+/// Options for printing JSON to string
+///
+/// # Examples
+///
+/// ```
+/// let print_options = protobuf_json_mapping::PrintOptions {
+/// enum_values_int: true,
+/// ..Default::default()
+/// };
+/// ```
+#[derive(Default, Debug, Clone)]
+pub struct PrintOptions {
+ /// Use ints instead of strings for enums.
+ ///
+ /// Note both string or int can be parsed.
+ pub enum_values_int: bool,
+ /// Use protobuf field names instead of `lowerCamelCase` which is used by default.
+ /// Note both names are supported when JSON is parsed.
+ pub proto_field_name: bool,
+ /// Output field default values.
+ pub always_output_default_values: bool,
+ /// Prevent initializing `PrintOptions` enumerating all field.
+ pub _future_options: (),
+}
+
+/// Serialize message to JSON according to protobuf specification.
+pub fn print_to_string_with_options(
+ message: &dyn MessageDyn,
+ print_options: &PrintOptions,
+) -> PrintResult<String> {
+ let mut printer = Printer {
+ buf: String::new(),
+ print_options: print_options.clone(),
+ };
+ printer.print_message(&MessageRef::from(message))?;
+ Ok(printer.buf)
+}
+
+/// Serialize message to JSON according to protobuf specification.
+pub fn print_to_string(message: &dyn MessageDyn) -> PrintResult<String> {
+ print_to_string_with_options(message, &PrintOptions::default())
+}
diff --git a/src/rfc_3339.rs b/src/rfc_3339.rs
new file mode 100644
index 0000000..ef04bcf
--- /dev/null
+++ b/src/rfc_3339.rs
@@ -0,0 +1,585 @@
+use std::fmt;
+use std::time::Duration;
+
+// Number of seconds in a day is a constant.
+// We do not support leap seconds here.
+const SECONDS_IN_DAY: u64 = 86400;
+
+// Gregorian calendar has 400 years cycles, this is a procedure
+// for computing if a year is a leap year.
+fn is_leap_year(year: i64) -> bool {
+ if year % 4 != 0 {
+ false
+ } else if year % 100 != 0 {
+ true
+ } else if year % 400 != 0 {
+ false
+ } else {
+ true
+ }
+}
+
+fn days_in_year(year: i64) -> u32 {
+ if is_leap_year(year) {
+ 366
+ } else {
+ 365
+ }
+}
+
+// Number of leap years among 400 consecutive years.
+const CYCLE_LEAP_YEARS: u32 = 400 / 4 - 400 / 100 + 400 / 400;
+// Number of days in 400 years cycle.
+const CYCLE_DAYS: u32 = 400 * 365 + CYCLE_LEAP_YEARS;
+// Number of seconds in 400 years cycle.
+const CYCLE_SECONDS: u64 = CYCLE_DAYS as u64 * SECONDS_IN_DAY;
+
+// Number of seconds between 1 Jan 1970 and 1 Jan 2000.
+// Check with:
+// `TZ=UTC gdate --rfc-3339=seconds --date @946684800`
+const YEARS_1970_2000_SECONDS: u64 = 946684800;
+// Number of seconds between 1 Jan 1600 and 1 Jan 1970.
+const YEARS_1600_1970_SECONDS: u64 = CYCLE_SECONDS - YEARS_1970_2000_SECONDS;
+
+// For each year in the cycle, number of leap years before in the cycle.
+#[cfg_attr(rustfmt, rustfmt_skip)]
+static YEAR_DELTAS: [u8; 401] = [
+ 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5,
+ 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 8, 9, 9, 9, 9, 10, 10, 10,
+ 10, 11, 11, 11, 11, 12, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 14, 15, 15, 15,
+ 15, 16, 16, 16, 16, 17, 17, 17, 17, 18, 18, 18, 18, 19, 19, 19, 19, 20, 20, 20,
+ 20, 21, 21, 21, 21, 22, 22, 22, 22, 23, 23, 23, 23, 24, 24, 24, 24, 25, 25, 25, // 100
+ 25, 25, 25, 25, 25, 26, 26, 26, 26, 27, 27, 27, 27, 28, 28, 28, 28, 29, 29, 29,
+ 29, 30, 30, 30, 30, 31, 31, 31, 31, 32, 32, 32, 32, 33, 33, 33, 33, 34, 34, 34,
+ 34, 35, 35, 35, 35, 36, 36, 36, 36, 37, 37, 37, 37, 38, 38, 38, 38, 39, 39, 39,
+ 39, 40, 40, 40, 40, 41, 41, 41, 41, 42, 42, 42, 42, 43, 43, 43, 43, 44, 44, 44,
+ 44, 45, 45, 45, 45, 46, 46, 46, 46, 47, 47, 47, 47, 48, 48, 48, 48, 49, 49, 49, // 200
+ 49, 49, 49, 49, 49, 50, 50, 50, 50, 51, 51, 51, 51, 52, 52, 52, 52, 53, 53, 53,
+ 53, 54, 54, 54, 54, 55, 55, 55, 55, 56, 56, 56, 56, 57, 57, 57, 57, 58, 58, 58,
+ 58, 59, 59, 59, 59, 60, 60, 60, 60, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63,
+ 63, 64, 64, 64, 64, 65, 65, 65, 65, 66, 66, 66, 66, 67, 67, 67, 67, 68, 68, 68,
+ 68, 69, 69, 69, 69, 70, 70, 70, 70, 71, 71, 71, 71, 72, 72, 72, 72, 73, 73, 73, // 300
+ 73, 73, 73, 73, 73, 74, 74, 74, 74, 75, 75, 75, 75, 76, 76, 76, 76, 77, 77, 77,
+ 77, 78, 78, 78, 78, 79, 79, 79, 79, 80, 80, 80, 80, 81, 81, 81, 81, 82, 82, 82,
+ 82, 83, 83, 83, 83, 84, 84, 84, 84, 85, 85, 85, 85, 86, 86, 86, 86, 87, 87, 87,
+ 87, 88, 88, 88, 88, 89, 89, 89, 89, 90, 90, 90, 90, 91, 91, 91, 91, 92, 92, 92,
+ 92, 93, 93, 93, 93, 94, 94, 94, 94, 95, 95, 95, 95, 96, 96, 96, 96, 97, 97, 97, 97,
+];
+
+/// UTC time
+pub struct TmUtc {
+ /// Year
+ year: i64,
+ /// 1..=12
+ month: u32,
+ /// 1-based day of month
+ day: u32,
+ /// 0..=23
+ hour: u32,
+ /// 0..=59
+ minute: u32,
+ /// 0..=59; no leap seconds
+ second: u32,
+ /// 0..=999_999_999
+ nanos: u32,
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum Rfc3339ParseError {
+ #[error("Unexpected EOF")]
+ UnexpectedEof,
+ #[error("Trailing characters")]
+ TrailngCharacters,
+ #[error("Expecting digits")]
+ ExpectingDigits,
+ #[error("Expecting character: {:?}", .0)]
+ ExpectingChar(char),
+ #[error("Expecting timezone")]
+ ExpectingTimezone,
+ #[error("No digits after dot")]
+ NoDigitsAfterDot,
+ #[error("Date-time field is out of range")]
+ DateTimeFieldOutOfRange,
+ #[error("Expecting date-time separator")]
+ ExpectingDateTimeSeparator,
+}
+
+pub type Rfc3339ParseResult<A> = Result<A, Rfc3339ParseError>;
+
+impl TmUtc {
+ fn day_of_cycle_to_year_day_of_year(day_of_cycle: u32) -> (i64, u32) {
+ debug_assert!(day_of_cycle < CYCLE_DAYS);
+
+ let mut year_mod_400 = (day_of_cycle / 365) as i64;
+ let mut day_or_year = (day_of_cycle % 365) as u32;
+
+ let delta = YEAR_DELTAS[year_mod_400 as usize] as u32;
+ if day_or_year < delta {
+ year_mod_400 -= 1;
+ day_or_year += 365 - YEAR_DELTAS[year_mod_400 as usize] as u32;
+ } else {
+ day_or_year -= delta;
+ }
+
+ (year_mod_400, day_or_year)
+ }
+
+ fn year_day_of_year_to_day_of_cycle(year_mod_400: u32, day_of_year: u32) -> u32 {
+ debug_assert!(year_mod_400 < 400);
+ debug_assert!(day_of_year < days_in_year(year_mod_400 as i64));
+
+ year_mod_400 * 365 + YEAR_DELTAS[year_mod_400 as usize] as u32 + day_of_year
+ }
+
+ // Convert seconds of the day of hour, minute and second
+ fn second_of_day_to_h_m_s(seconds: u32) -> (u32, u32, u32) {
+ debug_assert!(seconds < 86400);
+
+ let hour = seconds / 3600;
+ let minute = seconds % 3600 / 60;
+ let second = seconds % 60;
+
+ (hour, minute, second)
+ }
+
+ fn h_m_s_to_second_of_day(hour: u32, minute: u32, second: u32) -> u32 {
+ debug_assert!(hour < 24);
+ debug_assert!(minute < 60);
+ debug_assert!(second < 60);
+
+ hour * 3600 + minute * 60 + second
+ }
+
+ fn days_in_months(year: i64) -> &'static [u32] {
+ if is_leap_year(year) {
+ &[31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
+ } else {
+ &[31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
+ }
+ }
+
+ // Convert day of year (0-based) to month and day
+ fn day_of_year_to_month_day(year: i64, day_of_year: u32) -> (u32, u32) {
+ debug_assert!(day_of_year < days_in_year(year));
+
+ let days_in_months = TmUtc::days_in_months(year);
+
+ let mut rem_days = day_of_year;
+ let mut month = 1;
+ while rem_days >= days_in_months[month - 1] {
+ rem_days -= days_in_months[month - 1];
+ month += 1;
+ }
+
+ debug_assert!(rem_days + 1 <= days_in_months[month - 1]);
+
+ (month as u32, rem_days + 1)
+ }
+
+ fn month_day_to_day_of_year(year: i64, month: u32, day: u32) -> u32 {
+ debug_assert!(month >= 1);
+ debug_assert!(month <= 12);
+
+ debug_assert!(day >= 1);
+
+ let days_in_months = TmUtc::days_in_months(year);
+
+ // TODO: replace loop with precomputed table
+ let mut day_of_year = 0;
+ for next_month in 1..month {
+ day_of_year += days_in_months[next_month as usize - 1];
+ }
+
+ debug_assert!(day <= days_in_months[month as usize - 1]);
+
+ day_of_year + day - 1
+ }
+
+ // Construct from duration added to cycle start year
+ fn from_cycle_start_add_duration(mut cycle_start: i64, add: Duration) -> TmUtc {
+ debug_assert!(cycle_start % 400 == 0);
+
+ // Split duration to days and duration within day
+
+ let days = add.as_secs() / SECONDS_IN_DAY;
+ let duration_of_day = add - Duration::from_secs(days * SECONDS_IN_DAY);
+
+ let cycles = days / CYCLE_DAYS as u64;
+ cycle_start += cycles as i64 * 400;
+ let day_of_cycle = days % CYCLE_DAYS as u64;
+
+ let (year_mod_400, day_of_year) =
+ TmUtc::day_of_cycle_to_year_day_of_year(day_of_cycle as u32);
+
+ let (year,) = (cycle_start + year_mod_400,);
+ let (month, day) = TmUtc::day_of_year_to_month_day(year, day_of_year);
+ let (hour, minute, second) =
+ TmUtc::second_of_day_to_h_m_s(duration_of_day.as_secs() as u32);
+
+ TmUtc {
+ year,
+ month,
+ day,
+ hour,
+ minute,
+ second,
+ nanos: duration_of_day.subsec_nanos(),
+ }
+ }
+
+ // Protobuf timestamp: seconds from epoch, and nanos 0..=999_999_999 counting forward.
+ pub fn from_protobuf_timestamp(seconds: i64, nanos: u32) -> TmUtc {
+ assert!(nanos <= 999_999_999);
+
+ let (mut year, mut seconds) = if seconds >= 0 {
+ (1970, seconds as u64)
+ } else {
+ let minus_seconds = if seconds == i64::MIN {
+ i64::MIN as u64
+ } else {
+ -seconds as u64
+ };
+
+ let cycles = (minus_seconds + CYCLE_SECONDS) / CYCLE_SECONDS;
+
+ (
+ 1970 - 400 * cycles as i64,
+ cycles * CYCLE_SECONDS - minus_seconds,
+ )
+ };
+
+ year -= 370;
+ seconds += YEARS_1600_1970_SECONDS;
+
+ TmUtc::from_cycle_start_add_duration(year, Duration::new(seconds, nanos))
+ }
+
+ pub fn to_protobuf_timestamp(&self) -> (i64, u32) {
+ assert!(self.year >= 0);
+ assert!(self.year <= 9999);
+
+ let year_mod_400 = ((self.year % 400 + 400) % 400) as u32;
+ let cycle_start = self.year - year_mod_400 as i64;
+
+ let day_of_year = TmUtc::month_day_to_day_of_year(self.year, self.month, self.day);
+ let day_of_cycle = TmUtc::year_day_of_year_to_day_of_cycle(year_mod_400, day_of_year);
+ let second_of_day = TmUtc::h_m_s_to_second_of_day(self.hour, self.minute, self.second);
+
+ let second_of_cycle = day_of_cycle as u64 * SECONDS_IN_DAY + second_of_day as u64;
+
+ let epoch_seconds = (cycle_start - 1600) / 400 * CYCLE_SECONDS as i64
+ - YEARS_1600_1970_SECONDS as i64
+ + second_of_cycle as i64;
+
+ (epoch_seconds, self.nanos)
+ }
+
+ pub fn parse_rfc_3339(s: &str) -> Rfc3339ParseResult<(i64, u32)> {
+ struct Parser<'a> {
+ s: &'a [u8],
+ pos: usize,
+ }
+
+ impl<'a> Parser<'a> {
+ fn next_number(&mut self, len: usize) -> Rfc3339ParseResult<u32> {
+ let end_pos = self.pos + len;
+ if end_pos > self.s.len() {
+ return Err(Rfc3339ParseError::UnexpectedEof);
+ }
+ let mut r = 0;
+ for i in 0..len {
+ let c = self.s[self.pos + i];
+ if c >= b'0' && c <= b'9' {
+ r = r * 10 + (c - b'0') as u32;
+ } else {
+ return Err(Rfc3339ParseError::ExpectingDigits);
+ }
+ }
+ self.pos += len;
+ Ok(r)
+ }
+
+ fn lookahead_char(&self) -> Rfc3339ParseResult<u8> {
+ if self.pos == self.s.len() {
+ return Err(Rfc3339ParseError::UnexpectedEof);
+ }
+ Ok(self.s[self.pos])
+ }
+
+ fn next_char(&mut self, expect: u8) -> Rfc3339ParseResult<()> {
+ assert!(expect < 0x80);
+ let c = self.lookahead_char()?;
+ if c != expect {
+ return Err(Rfc3339ParseError::ExpectingChar(expect as char));
+ }
+ self.pos += 1;
+ Ok(())
+ }
+ }
+
+ let mut parser = Parser {
+ s: s.as_bytes(),
+ pos: 0,
+ };
+
+ let year = parser.next_number(4)? as i64;
+ parser.next_char(b'-')?;
+ let month = parser.next_number(2)?;
+ parser.next_char(b'-')?;
+ let day = parser.next_number(2)?;
+
+ if month < 1 || month > 12 {
+ return Err(Rfc3339ParseError::DateTimeFieldOutOfRange);
+ }
+
+ if day < 1 || day > TmUtc::days_in_months(year as i64)[month as usize - 1] {
+ return Err(Rfc3339ParseError::DateTimeFieldOutOfRange);
+ }
+
+ match parser.lookahead_char()? {
+ b'T' | b't' | b' ' => parser.pos += 1,
+ _ => return Err(Rfc3339ParseError::ExpectingDateTimeSeparator),
+ }
+
+ let hour = parser.next_number(2)?;
+ parser.next_char(b':')?;
+ let minute = parser.next_number(2)?;
+ parser.next_char(b':')?;
+ let second = parser.next_number(2)?;
+
+ if hour > 23 || minute > 59 || second > 60 {
+ return Err(Rfc3339ParseError::DateTimeFieldOutOfRange);
+ }
+
+ // round down leap second
+ let second = if second == 60 { 59 } else { second };
+
+ let nanos = if parser.lookahead_char()? == b'.' {
+ parser.pos += 1;
+ let mut digits = 0;
+ let mut nanos = 0;
+ while parser.lookahead_char()? >= b'0' && parser.lookahead_char()? <= b'9' {
+ let digit = (parser.lookahead_char()? - b'0') as u32;
+ parser.pos += 1;
+ if digits == 9 {
+ continue;
+ }
+ digits += 1;
+ nanos = nanos * 10 + digit;
+ }
+
+ if digits == 0 {
+ return Err(Rfc3339ParseError::NoDigitsAfterDot);
+ }
+
+ for _ in digits..9 {
+ nanos *= 10;
+ }
+ nanos
+ } else {
+ 0
+ };
+
+ let offset_seconds = if parser.lookahead_char()? == b'Z' || parser.lookahead_char()? == b'z'
+ {
+ parser.pos += 1;
+ 0
+ } else {
+ let sign = if parser.lookahead_char()? == b'+' {
+ 1
+ } else if parser.lookahead_char()? == b'-' {
+ -1
+ } else {
+ return Err(Rfc3339ParseError::ExpectingTimezone);
+ };
+
+ parser.pos += 1;
+
+ let hour_offset = parser.next_number(2)?;
+ parser.next_char(b':')?;
+ let minute_offset = parser.next_number(2)?;
+
+ (hour_offset * 3600 + 60 * minute_offset) as i64 * sign
+ };
+
+ if parser.pos != parser.s.len() {
+ return Err(Rfc3339ParseError::TrailngCharacters);
+ }
+
+ let (seconds, nanos) = TmUtc {
+ year,
+ month,
+ day,
+ hour,
+ minute,
+ second,
+ nanos,
+ }
+ .to_protobuf_timestamp();
+
+ Ok((seconds - offset_seconds, nanos))
+ }
+}
+
+impl fmt::Display for TmUtc {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ if self.year > 9999 {
+ write!(f, "+{}", self.year)?;
+ } else if self.year < 0 {
+ write!(f, "{:05}", self.year)?;
+ } else {
+ write!(f, "{:04}", self.year)?;
+ }
+ write!(
+ f,
+ "-{:02}-{:02}T{:02}:{:02}:{:02}",
+ self.month, self.day, self.hour, self.minute, self.second
+ )?;
+
+ // if precision is not specified, print nanoseconds
+ let subsec_digits = f.precision().unwrap_or(9);
+ if subsec_digits != 0 {
+ let mut subsec_digits = subsec_digits;
+
+ let width = if subsec_digits > 9 { 9 } else { subsec_digits };
+
+ // "Truncated" nanonseconds.
+ let mut subsec = self.nanos;
+
+ // Performs 8 iterations when precision=1,
+ // but that's probably not a issue compared to other computations.
+ for _ in width..9 {
+ subsec /= 10;
+ }
+
+ write!(f, ".{:0width$}", subsec, width = width as usize)?;
+
+ // Adding more than 9 digits is meaningless,
+ // but if user requests it, we should print zeros.
+ for _ in 9..subsec_digits {
+ write!(f, "0")?;
+ subsec_digits -= 1;
+ }
+ }
+
+ write!(f, "Z")
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn test_fmt() {
+ fn test_impl(expected: &str, secs: i64, nanos: u32, subsec_digits: u32) {
+ let tm_utc = TmUtc::from_protobuf_timestamp(secs, nanos);
+
+ assert_eq!(
+ expected,
+ format!("{:.prec$}", tm_utc, prec = subsec_digits as usize)
+ );
+ }
+
+ // Tests can be validated with with GNU date:
+ // `TZ=UTC gdate --date @1535585179 --iso-8601=seconds`
+
+ test_impl("1970-01-01T00:00:00Z", 0, 0, 0);
+ test_impl("2018-08-29T23:26:19Z", 1535585179, 0, 0);
+ test_impl("2018-08-29T23:26:19.123Z", 1535585179, 123456789, 3);
+ test_impl("1646-04-01T03:45:44Z", -10216613656, 0, 0);
+ test_impl("1970-01-01T00:00:00.000000001000Z", 0, 1, 12);
+ test_impl("5138-11-16T09:46:40Z", 100000000000, 0, 0);
+ test_impl("+33658-09-27T01:46:41Z", 1000000000001, 0, 0);
+ // Leading zero
+ test_impl("0000-12-31T00:00:00Z", -62135683200, 0, 0);
+ // Minus zero
+ test_impl("-0003-10-30T14:13:20Z", -62235683200, 0, 0);
+ // More than 4 digits
+ // Largest value GNU date can handle
+ test_impl("+2147485547-12-31T23:59:59Z", 67768036191676799, 0, 0);
+ // Negative dates
+ test_impl("1969-12-31T23:59:59Z", -1, 0, 0);
+ test_impl("1969-12-31T23:59:00Z", -60, 0, 0);
+ test_impl("1969-12-31T23:59:58.900Z", -2, 900_000_000, 3);
+ test_impl("1966-10-31T14:13:20Z", -100000000, 0, 0);
+ test_impl("-29719-04-05T22:13:19Z", -1000000000001, 0, 0);
+ // Smallest value GNU date can handle
+ test_impl("-2147481748-01-01T00:00:00Z", -67768040609740800, 0, 0);
+ }
+
+ #[test]
+ fn test_parse_fmt() {
+ fn test_impl(s: &str, width: usize) {
+ let (seconds, nanos) = TmUtc::parse_rfc_3339(s).unwrap();
+ let formatted = format!(
+ "{:.width$}",
+ TmUtc::from_protobuf_timestamp(seconds, nanos),
+ width = width
+ );
+ assert_eq!(formatted, s);
+ }
+
+ test_impl("1970-01-01T00:00:00Z", 0);
+ test_impl("1970-01-01T00:00:00.000Z", 3);
+ test_impl("1970-01-01T00:00:00.000000000Z", 9);
+ test_impl("1970-01-02T00:00:00Z", 0);
+ test_impl("1970-03-01T00:00:00Z", 0);
+ test_impl("1974-01-01T00:00:00Z", 0);
+ test_impl("2018-01-01T00:00:00Z", 0);
+ test_impl("2018-09-02T05:49:10.123456789Z", 9);
+ test_impl("0001-01-01T00:00:00.000000000Z", 9);
+ test_impl("9999-12-31T23:59:59.999999999Z", 9);
+ }
+
+ #[test]
+ fn test_parse_alt() {
+ fn test_impl(alt: &str, parse: &str) {
+ let reference = TmUtc::parse_rfc_3339(alt).unwrap();
+ let parsed = TmUtc::parse_rfc_3339(parse).unwrap();
+ assert_eq!(reference, parsed, "{} - {}", alt, parse);
+ }
+
+ // alternative spelling
+ test_impl("1970-01-01 00:00:00Z", "1970-01-01T00:00:00Z");
+ test_impl("1970-01-01 00:00:00Z", "1970-01-01t00:00:00Z");
+ test_impl("1970-01-01 00:00:00Z", "1970-01-01 00:00:00z");
+ // leap second is rounded down
+ test_impl("2016-12-31 23:59:59Z", "2016-12-31 23:59:60Z");
+ // TZ offset
+ test_impl("1970-01-01 00:00:00Z", "1970-01-01T03:00:00+03:00");
+ test_impl("1970-01-01 00:00:00Z", "1969-12-31 22:15:00-01:45");
+ }
+
+ #[test]
+ fn test_parse_incorrect_inputs() {
+ fn test_impl(s: &str) {
+ assert!(TmUtc::parse_rfc_3339(s).is_err(), "{}", s);
+ }
+
+ test_impl("1970-01-01T00:00:61Z");
+ test_impl("1970-01-01T00:60:61Z");
+ test_impl("1970-01-01T24:00:61Z");
+ test_impl("1970-01-01T00:00:00.Z");
+ test_impl("1970-01-32T00:00:00Z");
+ test_impl("1970-02-29T00:00:00Z");
+ test_impl("1980-02-30T00:00:00Z");
+ test_impl("1980-13-01T00:00:00Z");
+ test_impl("1970-01-01T00:00:00");
+ test_impl("1970-01-01T00:00Z");
+ }
+
+ #[test]
+ fn test_fmt_max_duration() {
+ // Simply check that there are no integer overflows.
+ // I didn't check that resulting strings are correct.
+ assert_eq!(
+ "-292277022657-01-27T08:29:52.000000000Z",
+ format!("{}", TmUtc::from_protobuf_timestamp(i64::MIN, 0))
+ );
+ assert_eq!(
+ "+292277026596-12-04T15:30:07.999999999Z",
+ format!("{}", TmUtc::from_protobuf_timestamp(i64::MAX, 999_999_999))
+ );
+ }
+}
diff --git a/src/well_known_wrapper.rs b/src/well_known_wrapper.rs
new file mode 100644
index 0000000..071630c
--- /dev/null
+++ b/src/well_known_wrapper.rs
@@ -0,0 +1,128 @@
+//! Trait for well-known wrapper types
+
+use protobuf::well_known_types::wrappers::BoolValue;
+use protobuf::well_known_types::wrappers::BytesValue;
+use protobuf::well_known_types::wrappers::DoubleValue;
+use protobuf::well_known_types::wrappers::FloatValue;
+use protobuf::well_known_types::wrappers::Int32Value;
+use protobuf::well_known_types::wrappers::Int64Value;
+use protobuf::well_known_types::wrappers::StringValue;
+use protobuf::well_known_types::wrappers::UInt32Value;
+use protobuf::well_known_types::wrappers::UInt64Value;
+
+/// Well-known wrapper types have single field.
+/// This trait operations return pointers to that field.
+pub(crate) trait WellKnownWrapper {
+ type Underlying;
+
+ fn get_ref(&self) -> &Self::Underlying;
+ fn get_mut(&mut self) -> &mut Self::Underlying;
+}
+
+impl WellKnownWrapper for DoubleValue {
+ type Underlying = f64;
+
+ fn get_ref(&self) -> &f64 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut f64 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for FloatValue {
+ type Underlying = f32;
+
+ fn get_ref(&self) -> &f32 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut f32 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for Int64Value {
+ type Underlying = i64;
+
+ fn get_ref(&self) -> &i64 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut i64 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for UInt64Value {
+ type Underlying = u64;
+
+ fn get_ref(&self) -> &u64 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut u64 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for Int32Value {
+ type Underlying = i32;
+
+ fn get_ref(&self) -> &i32 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut i32 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for UInt32Value {
+ type Underlying = u32;
+
+ fn get_ref(&self) -> &u32 {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut u32 {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for BoolValue {
+ type Underlying = bool;
+
+ fn get_ref(&self) -> &bool {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut bool {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for StringValue {
+ type Underlying = String;
+
+ fn get_ref(&self) -> &String {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut String {
+ &mut self.value
+ }
+}
+
+impl WellKnownWrapper for BytesValue {
+ type Underlying = Vec<u8>;
+
+ fn get_ref(&self) -> &Vec<u8> {
+ &self.value
+ }
+
+ fn get_mut(&mut self) -> &mut Vec<u8> {
+ &mut self.value
+ }
+}