diff --git a/.config/dictionaries/project.dic b/.config/dictionaries/project.dic index 856942f6b77..9c74cf67813 100644 --- a/.config/dictionaries/project.dic +++ b/.config/dictionaries/project.dic @@ -48,7 +48,6 @@ ciphertexts Coap codegen codepoints -collabs coti coverallsapp cpus diff --git a/rust/cardano-blockchain-types/Cargo.toml b/rust/cardano-blockchain-types/Cargo.toml index b345d70fdf0..580c7f9a9f1 100644 --- a/rust/cardano-blockchain-types/Cargo.toml +++ b/rust/cardano-blockchain-types/Cargo.toml @@ -20,8 +20,8 @@ workspace = true [dependencies] pallas = { version = "0.33.0" } # pallas-hardano = { version = "0.33.0" } -cbork-utils = { version = "0.0.1", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } -catalyst-types = { version = "0.0.4", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } +cbork-utils = { version = "0.0.1", path = "../cbork-utils" } +catalyst-types = { version = "0.0.4", path = "../catalyst-types" } ouroboros = "0.18.4" tracing = "0.1.41" diff --git a/rust/cardano-chain-follower/Cargo.toml b/rust/cardano-chain-follower/Cargo.toml index aa415517206..478fcc3abf6 100644 --- a/rust/cardano-chain-follower/Cargo.toml +++ b/rust/cardano-chain-follower/Cargo.toml @@ -19,8 +19,9 @@ mithril-client = { version = "0.12.2", default-features = false, features = [ "full", "num-integer-backend", ] } -cardano-blockchain-types = { version = "0.0.5", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "cardano-blockchain-types-v0.0.5" } -catalyst-types = { version = "0.0.4", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } + +cardano-blockchain-types = { version = "0.0.5", path = "../cardano-blockchain-types" } +catalyst-types = { version = "0.0.4", path = "../catalyst-types" } thiserror = "1.0.69" @@ -64,7 +65,7 @@ test-log = { version = "0.2.16", default-features = false, features = [ "trace", ] } clap = "4.5.23" -rbac-registration = { version = "0.0.5", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } +rbac-registration = { version = "0.0.5", path = "../rbac-registration" } # Note, these features are for support of features exposed by dependencies. [features] diff --git a/rust/catalyst-types/Cargo.toml b/rust/catalyst-types/Cargo.toml index 0b550d8ec68..29c48bc05cf 100644 --- a/rust/catalyst-types/Cargo.toml +++ b/rust/catalyst-types/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "catalyst-types" -version = "0.0.3" +version = "0.0.4" edition.workspace = true license.workspace = true authors.workspace = true diff --git a/rust/catalyst-types/src/catalyst_id/mod.rs b/rust/catalyst-types/src/catalyst_id/mod.rs index 8557e34ef6c..b9cfc86a8ca 100644 --- a/rust/catalyst-types/src/catalyst_id/mod.rs +++ b/rust/catalyst-types/src/catalyst_id/mod.rs @@ -709,6 +709,12 @@ impl TryFrom<&[u8]> for CatalystId { } } +impl From<&CatalystId> for Vec { + fn from(value: &CatalystId) -> Self { + value.to_string().into_bytes() + } +} + #[cfg(test)] mod tests { use chrono::{DateTime, Utc}; diff --git a/rust/catalyst-types/src/uuid/mod.rs b/rust/catalyst-types/src/uuid/mod.rs index 3e25737e1de..572f2ff6b39 100644 --- a/rust/catalyst-types/src/uuid/mod.rs +++ b/rust/catalyst-types/src/uuid/mod.rs @@ -1,10 +1,10 @@ //! `UUID` types. -pub use uuid::Uuid; +pub use uuid::{uuid, Uuid}; #[allow(clippy::module_name_repetitions)] -pub use uuid_v4::UuidV4; +pub use uuid_v4::{InvalidUuidV4, ParsingError as UuidV4ParsingError, UuidV4}; #[allow(clippy::module_name_repetitions)] -pub use uuid_v7::UuidV7; +pub use uuid_v7::{InvalidUuidV7, ParsingError as UuidV7ParsingError, UuidV7}; mod uuid_v4; mod uuid_v7; @@ -15,21 +15,7 @@ use minicbor::data::Tag; pub const INVALID_UUID: uuid::Uuid = uuid::Uuid::from_bytes([0x00; 16]); /// UUID CBOR tag . -#[allow(dead_code)] -const UUID_CBOR_TAG: u64 = 37; - -/// Uuid validation errors, which could occur during decoding or converting to -/// `UuidV4` or `UuidV7` types. -#[derive(Debug, Clone, thiserror::Error)] -#[allow(clippy::module_name_repetitions)] -pub enum UuidError { - /// `UUIDv4` invalid error - #[error("'{0}' is not a valid UUIDv4")] - InvalidUuidV4(uuid::Uuid), - /// `UUIDv7` invalid error - #[error("'{0}' is not a valid UUIDv7")] - InvalidUuidV7(uuid::Uuid), -} +pub const UUID_CBOR_TAG: u64 = 37; /// Context for `CBOR` encoding and decoding pub enum CborContext { diff --git a/rust/catalyst-types/src/uuid/uuid_v4.rs b/rust/catalyst-types/src/uuid/uuid_v4.rs index c7e2dbb8149..6645f284120 100644 --- a/rust/catalyst-types/src/uuid/uuid_v4.rs +++ b/rust/catalyst-types/src/uuid/uuid_v4.rs @@ -1,15 +1,23 @@ //! `UUIDv4` Type. -use std::fmt::{Display, Formatter}; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; use minicbor::{Decode, Decoder, Encode}; use uuid::Uuid; -use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext, UuidError, INVALID_UUID}; +use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext}; /// Type representing a `UUIDv4`. -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, serde::Serialize)] +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, serde::Serialize)] pub struct UuidV4(Uuid); +/// `UUIDv4` invalid error +#[derive(Debug, Clone, thiserror::Error)] +#[error("'{0}' is not a valid UUIDv4")] +pub struct InvalidUuidV4(uuid::Uuid); + impl UuidV4 { /// Version for `UUIDv4`. const UUID_VERSION_NUMBER: usize = 4; @@ -21,28 +29,28 @@ impl UuidV4 { Self(Uuid::new_v4()) } - /// Generates a zeroed out `UUIDv4` that can never be valid. - #[must_use] - pub fn invalid() -> Self { - Self(INVALID_UUID) - } - - /// Check if this is a valid `UUIDv4`. - #[must_use] - pub fn is_valid(&self) -> bool { - is_valid(&self.uuid()) - } - /// Returns the `uuid::Uuid` type. #[must_use] pub fn uuid(&self) -> Uuid { self.0 } + + /// A const alternative impl of `TryFrom` + /// + /// # Errors + /// - `InvalidUuidV4` + pub const fn try_from_uuid(uuid: Uuid) -> Result { + if is_valid(&uuid) { + Ok(Self(uuid)) + } else { + Err(InvalidUuidV4(uuid)) + } + } } /// Check if this is a valid `UUIDv4`. -fn is_valid(uuid: &Uuid) -> bool { - uuid != &INVALID_UUID && uuid.get_version_num() == UuidV4::UUID_VERSION_NUMBER +const fn is_valid(uuid: &Uuid) -> bool { + uuid.get_version_num() == UuidV4::UUID_VERSION_NUMBER } impl Display for UuidV4 { @@ -54,13 +62,7 @@ impl Display for UuidV4 { impl Decode<'_, CborContext> for UuidV4 { fn decode(d: &mut Decoder<'_>, ctx: &mut CborContext) -> Result { let uuid = decode_cbor_uuid(d, ctx)?; - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(minicbor::decode::Error::message(UuidError::InvalidUuidV4( - uuid, - ))) - } + Self::try_from_uuid(uuid).map_err(minicbor::decode::Error::message) } } @@ -74,14 +76,10 @@ impl Encode for UuidV4 { /// Returns a `UUIDv4` from `uuid::Uuid`. impl TryFrom for UuidV4 { - type Error = UuidError; + type Error = InvalidUuidV4; fn try_from(uuid: Uuid) -> Result { - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(UuidError::InvalidUuidV4(uuid)) - } + Self::try_from_uuid(uuid) } } @@ -98,43 +96,47 @@ impl<'de> serde::Deserialize<'de> for UuidV4 { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de> { let uuid = Uuid::deserialize(deserializer)?; - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(serde::de::Error::custom(UuidError::InvalidUuidV4(uuid))) - } + Self::try_from_uuid(uuid).map_err(serde::de::Error::custom) + } +} + +/// `FromStr` invalid error +#[derive(Debug, Clone, thiserror::Error)] +pub enum ParsingError { + /// `UUIDv4` invalid error + #[error(transparent)] + InvalidUuidV4(#[from] InvalidUuidV4), + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), +} + +impl FromStr for UuidV4 { + type Err = ParsingError; + + fn from_str(s: &str) -> Result { + let uuid = Uuid::parse_str(s).map_err(|_| ParsingError::StringConversion(s.to_string()))?; + Ok(Self::try_from_uuid(uuid)?) } } #[cfg(test)] mod tests { use super::*; + use crate::uuid::INVALID_UUID; #[test] fn test_invalid_uuid() { - let invalid_uuid = UuidV4::invalid(); - assert!(!invalid_uuid.is_valid(), "Invalid UUID should not be valid"); - assert_eq!( - invalid_uuid.uuid(), - INVALID_UUID, - "Invalid UUID should match INVALID_UUID" - ); - } + assert!(UuidV4::try_from(Uuid::now_v7()).is_err()); - #[test] - fn test_valid_uuid() { - let valid_uuid = UuidV4::try_from(Uuid::new_v4()).unwrap(); - assert!(valid_uuid.is_valid(), "Valid UUID should be valid"); - - let valid_uuid = UuidV4::new(); - assert!(valid_uuid.is_valid(), "Valid UUID should be valid"); - } - - #[test] - fn test_invalid_version_uuid() { assert!( UuidV4::try_from(INVALID_UUID).is_err(), "Zero UUID should not be valid" ); } + + #[test] + fn test_valid_uuid() { + assert!(UuidV4::try_from(Uuid::new_v4()).is_ok()); + } } diff --git a/rust/catalyst-types/src/uuid/uuid_v7.rs b/rust/catalyst-types/src/uuid/uuid_v7.rs index 98fbd8cda6e..a87bc22ba0d 100644 --- a/rust/catalyst-types/src/uuid/uuid_v7.rs +++ b/rust/catalyst-types/src/uuid/uuid_v7.rs @@ -1,15 +1,23 @@ //! `UUIDv7` Type. -use std::fmt::{Display, Formatter}; +use std::{ + fmt::{Display, Formatter}, + str::FromStr, +}; use minicbor::{Decode, Decoder, Encode}; use uuid::Uuid; -use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext, UuidError, INVALID_UUID}; +use super::{decode_cbor_uuid, encode_cbor_uuid, CborContext}; /// Type representing a `UUIDv7`. -#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, serde::Serialize)] +#[derive(Copy, Clone, Debug, PartialEq, PartialOrd, Eq, Hash, serde::Serialize)] pub struct UuidV7(Uuid); +/// `UUIDv7` invalid error +#[derive(Debug, Clone, thiserror::Error)] +#[error("'{0}' is not a valid UUIDv7")] +pub struct InvalidUuidV7(uuid::Uuid); + impl UuidV7 { /// Version for `UUIDv7`. const UUID_VERSION_NUMBER: usize = 7; @@ -21,28 +29,28 @@ impl UuidV7 { Self(Uuid::now_v7()) } - /// Generates a zeroed out `UUIDv7` that can never be valid. - #[must_use] - pub fn invalid() -> Self { - Self(INVALID_UUID) - } - - /// Check if this is a valid `UUIDv7`. - #[must_use] - pub fn is_valid(&self) -> bool { - is_valid(&self.0) - } - /// Returns the `uuid::Uuid` type. #[must_use] pub fn uuid(&self) -> Uuid { self.0 } + + /// A const alternative impl of `TryFrom` + /// + /// # Errors + /// - `InvalidUuidV7` + pub const fn try_from_uuid(uuid: Uuid) -> Result { + if is_valid(&uuid) { + Ok(Self(uuid)) + } else { + Err(InvalidUuidV7(uuid)) + } + } } /// Check if this is a valid `UUIDv7`. -fn is_valid(uuid: &Uuid) -> bool { - uuid != &INVALID_UUID && uuid.get_version_num() == UuidV7::UUID_VERSION_NUMBER +const fn is_valid(uuid: &Uuid) -> bool { + uuid.get_version_num() == UuidV7::UUID_VERSION_NUMBER } impl Display for UuidV7 { @@ -54,13 +62,7 @@ impl Display for UuidV7 { impl Decode<'_, CborContext> for UuidV7 { fn decode(d: &mut Decoder<'_>, ctx: &mut CborContext) -> Result { let uuid = decode_cbor_uuid(d, ctx)?; - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(minicbor::decode::Error::message(UuidError::InvalidUuidV7( - uuid, - ))) - } + Self::try_from_uuid(uuid).map_err(minicbor::decode::Error::message) } } @@ -74,14 +76,10 @@ impl Encode for UuidV7 { /// Returns a `UUIDv7` from `uuid::Uuid`. impl TryFrom for UuidV7 { - type Error = UuidError; + type Error = InvalidUuidV7; fn try_from(uuid: Uuid) -> Result { - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(UuidError::InvalidUuidV7(uuid)) - } + Self::try_from_uuid(uuid) } } @@ -98,11 +96,27 @@ impl<'de> serde::Deserialize<'de> for UuidV7 { fn deserialize(deserializer: D) -> Result where D: serde::Deserializer<'de> { let uuid = Uuid::deserialize(deserializer)?; - if is_valid(&uuid) { - Ok(Self(uuid)) - } else { - Err(serde::de::Error::custom(UuidError::InvalidUuidV7(uuid))) - } + Self::try_from_uuid(uuid).map_err(serde::de::Error::custom) + } +} + +/// `FromStr` invalid error +#[derive(Debug, Clone, thiserror::Error)] +pub enum ParsingError { + /// `UUIDv7` invalid error + #[error(transparent)] + InvalidUuidV7(#[from] InvalidUuidV7), + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), +} + +impl FromStr for UuidV7 { + type Err = ParsingError; + + fn from_str(s: &str) -> Result { + let uuid = Uuid::parse_str(s).map_err(|_| ParsingError::StringConversion(s.to_string()))?; + Ok(Self::try_from_uuid(uuid)?) } } @@ -111,32 +125,20 @@ mod tests { use uuid::Uuid; use super::*; + use crate::uuid::INVALID_UUID; #[test] fn test_invalid_uuid() { - let invalid_uuid = UuidV7::invalid(); - assert!(!invalid_uuid.is_valid(), "Invalid UUID should not be valid"); - assert_eq!( - invalid_uuid.uuid(), - INVALID_UUID, - "Invalid UUID should match INVALID_UUID" - ); - } + assert!(UuidV7::try_from(Uuid::new_v4()).is_err()); - #[test] - fn test_valid_uuid() { - let valid_uuid = UuidV7::try_from(Uuid::now_v7()).unwrap(); - assert!(valid_uuid.is_valid(), "Valid UUID should be valid"); - - let valid_uuid = UuidV7::new(); - assert!(valid_uuid.is_valid(), "Valid UUID should be valid"); - } - - #[test] - fn test_invalid_version_uuid() { assert!( UuidV7::try_from(INVALID_UUID).is_err(), "Zero UUID should not be valid" ); } + + #[test] + fn test_valid_uuid() { + assert!(UuidV7::try_from(Uuid::now_v7()).is_ok()); + } } diff --git a/rust/cbork-utils/src/array.rs b/rust/cbork-utils/src/array.rs new file mode 100644 index 00000000000..92d1fbc66aa --- /dev/null +++ b/rust/cbork-utils/src/array.rs @@ -0,0 +1,319 @@ +//! CBOR array (CBOR major type 4) structure with CBOR decoding and encoding +//! functionality. Supports deterministically encoded rules (RFC 8949 Section 4.2) if +//! corresponding option is enabled. + +use std::{ops::Deref, vec::IntoIter}; + +use crate::{ + decode_context::DecodeCtx, decode_helper::get_bytes, deterministic_helper::CBOR_MAX_TINY_VALUE, +}; + +/// Represents a CBOR array, preserving original decoding order of values. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Array(Vec>); + +impl Deref for Array { + type Target = Vec>; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl IntoIterator for Array { + type IntoIter = IntoIter>; + type Item = Vec; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +/// Major type indicator for CBOR arrays (major type 4: 100 in top 3 bits) +/// As per RFC 8949 Section 4.2, arrays in deterministic encoding must: +/// - Have lengths encoded minimally (Section 4.2.1) +/// - Use definite-length encoding only (Section 4.2.2) +/// - Have all elements themselves deterministically encoded +const CBOR_MAJOR_TYPE_ARRAY: u8 = 4 << 5; + +/// Initial byte for a CBOR array whose length is encoded as an 8-bit unsigned integer +/// (uint8). +/// +/// This value combines the array major type (4) with the additional information value +/// (24) that indicates a uint8 length follows. The resulting byte is: +/// - High 3 bits: 100 (major type 4 for array) +/// - Low 5 bits: 24 (indicates uint8 length follows) +/// +/// Used when encoding CBOR arrays with lengths between 24 and 255 elements. +const CBOR_ARRAY_LENGTH_UINT8: u8 = CBOR_MAJOR_TYPE_ARRAY | 24; // For uint8 length encoding + +/// Decodes a CBOR array with deterministic encoding validation (RFC 8949 Section 4.2) +/// Returns the raw bytes of the array elements if it passes all deterministic validation +/// rules. +/// +/// From RFC 8949 Section 4.2: +/// Arrays must follow these deterministic encoding rules: +/// - Array lengths must use minimal encoding (Section 4.2.1) +/// - Indefinite-length arrays are not allowed (Section 4.2.2) +/// - All array elements must themselves be deterministically encoded +/// +/// # Errors +/// +/// Returns `DeterministicError` if: +/// - Input is empty (`UnexpectedEof`) +/// - Array uses indefinite-length encoding (`IndefiniteLength`) +/// - Array length is not encoded minimally (`NonMinimalInt`) +/// - Array element decoding fails (`DecoderError`) +/// - Array elements are not deterministically encoded +impl minicbor::Decode<'_, DecodeCtx> for Array { + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut DecodeCtx, + ) -> Result { + // Capture position before reading the array header + let header_start_pos = d.position(); + + // Handle both definite and indefinite-length arrays + let length = d.array()?.ok_or_else(|| { + minicbor::decode::Error::message( + "Indefinite-length items must be made definite-length items", + ) + })?; + + ctx.try_check(|| check_array_minimal_length(d, header_start_pos, length))?; + + decode_array_elements(d, length, ctx).map(Self) + } +} + +/// Validates that a CBOR array's length is encoded using the minimal number of bytes as +/// required by RFC 8949's deterministic encoding rules. +/// +/// According to the deterministic encoding requirements: +/// - The length of an array MUST be encoded using the smallest possible CBOR additional +/// information value +/// - For values 0 through 23, the additional info byte is used directly +/// - For values that fit in 8, 16, 32, or 64 bits, the appropriate multi-byte encoding +/// must be used +/// +/// # Specification Reference +/// This implementation follows RFC 8949 Section 4.2.1 which requires that: +/// "The length of arrays, maps, and strings MUST be encoded using the smallest possible +/// CBOR additional information value." +fn check_array_minimal_length( + decoder: &minicbor::Decoder, header_start_pos: usize, value: u64, +) -> Result<(), minicbor::decode::Error> { + // For zero length, 0x80 is always the minimal encoding + if value == 0 { + return Ok(()); + } + + let initial_byte = decoder + .input() + .get(header_start_pos) + .copied() + .ok_or_else(|| { + minicbor::decode::Error::message("Cannot read initial byte for minimality check") + })?; + + // Only check minimality for array length encodings using uint8 + // Immediate values (0-23) are already minimal by definition + if initial_byte == CBOR_ARRAY_LENGTH_UINT8 && value <= CBOR_MAX_TINY_VALUE { + return Err(minicbor::decode::Error::message( + "array minimal length failure", + )); + } + + Ok(()) +} + +/// Decodes all elements in the array +fn decode_array_elements( + d: &mut minicbor::Decoder, length: u64, _ctx: &mut DecodeCtx, +) -> Result>, minicbor::decode::Error> { + let capacity = usize::try_from(length).map_err(|_| { + minicbor::decode::Error::message("Array length too large for current platform") + })?; + let mut elements = Vec::with_capacity(capacity); + + // Decode each array element + for _ in 0..length { + // Record the starting position of the element + let element_start = d.position(); + + // Skip over the element to find its end position + d.skip()?; + let element_end = d.position(); + + // The elements themselves must be deterministically encoded (4.2.1) + let element_bytes = get_bytes(d, element_start, element_end)?.to_vec(); + + elements.push(element_bytes); + } + + Ok(elements) +} + +#[cfg(test)] +mod tests { + use minicbor::{Decode, Decoder}; + + use super::*; + + /// Ensures that encoding and decoding an array preserves: + /// - The exact byte representation of elements + /// - The definite length encoding format + /// - The order of elements + #[test] + fn test_array_bytes_roundtrip() { + // Create a valid deterministic array encoding + let mut decoder = Decoder::new(&[ + 0x82, // 2 elements + 0x41, 0x01, // h'01' + 0x42, 0x01, 0x02, // h'0102' + ]); + let result = Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).unwrap(); + + // Verify we got back exactly the same bytes + assert_eq!( + result, + Array(vec![ + vec![0x41, 0x01], // h'01' + vec![0x42, 0x01, 0x02], // h'0102' + ]) + ); + } + + /// Test empty array handling - special case mentioned in RFC 8949. + /// An empty array is valid and must still follow length encoding rules + /// from Section 4.2.1. + #[test] + fn test_empty_array() { + let mut decoder = Decoder::new(&[ + 0x80, // Array with 0 elements - encoded with immediate value as per Section 4.2.1 + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test minimal length encoding rules for arrays as specified in RFC 8949 Section + /// 4.2.1 + /// + /// From RFC 8949 Section 4.2.1: + /// "The length of arrays, maps, strings, and byte strings must be encoded in the + /// smallest possible way. For arrays (major type 4), lengths 0-23 must be encoded + /// in the initial byte." + #[test] + fn test_array_minimal_length_encoding() { + // Test case 1: Valid minimal encoding (length = 1) + let mut decoder = Decoder::new(&[ + 0x81, // Array, length 1 (major type 4 with immediate value 1) + 0x01, // Element: unsigned int 1 + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + + // Test case 2: Invalid non-minimal encoding (using additional info 24 for length 1) + let mut decoder = Decoder::new(&[ + 0x98, // Array with additional info = 24 (0x80 | 0x18) + 0x01, // Length encoded as uint8 = 1 + 0x01, // Element: unsigned int 1 + ]); + assert!(Array::decode(&mut decoder.clone(), &mut DecodeCtx::Deterministic).is_err()); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::non_deterministic()).is_ok()); + } + + /// Test handling of complex element structures while maintaining deterministic + /// encoding + /// + /// RFC 8949 Section 4.2 requires that all elements be deterministically encoded: + /// "All contained items must also follow the same rules." + #[test] + fn test_array_complex_elements() { + let mut decoder = Decoder::new(&[ + 0x84, // Array with 4 elements + 0x41, 0x01, // Element 1: simple 1-byte string + 0x42, 0x01, 0x02, // Element 2: 2-byte string + 0x62, 0x68, 0x69, // Element 3: "hi" + 0xF9, 0x00, 0x00, // Element 4: float 0.0 half-precision canonical encoding + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test edge cases for array encoding while maintaining compliance with RFC 8949 + /// + /// These cases test boundary conditions that must still follow all rules from + /// Section 4.2: + /// - Minimal length encoding (4.2.1) + /// - No indefinite lengths (4.2.2) + /// - Deterministic element encoding + #[test] + fn test_array_edge_cases() { + // Single element array - must still follow minimal length encoding rules + let mut decoder = Decoder::new(&[ + 0x81, // Array with 1 element (using immediate value as per Section 4.2.1) + 0x41, 0x01, // Element: 1-byte string + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + + // Array with zero-length string element - tests smallest possible element case + let mut decoder = Decoder::new(&[ + 0x81, // Array with 1 element + 0x40, // Element: 0-byte string (smallest possible element) + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test array with multiple elements of different types + #[test] + fn test_array_mixed_elements() { + // Array with integer, string, and nested array elements + let mut decoder = Decoder::new(&[ + 0x83, // Array with 3 elements + 0x01, // Element 1: unsigned int 1 + 0x41, 0x48, // Element 2: 1-byte string "H" + 0x81, 0x02, // Element 3: nested array with one element (unsigned int 2) + ]); + assert!(Array::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test array with multiple elements + #[allow(clippy::indexing_slicing)] + #[test] + fn test_array_larger_size() { + // Test with a simple array of 5 single-byte strings + let mut decoder = Decoder::new(&[ + 0x85, // Array with 5 elements + 0x41, 0x01, // Element 1: 1-byte string with value 0x01 + 0x41, 0x02, // Element 2: 1-byte string with value 0x02 + 0x41, 0x03, // Element 3: 1-byte string with value 0x03 + 0x41, 0x04, // Element 4: 1-byte string with value 0x04 + 0x41, 0x05, // Element 5: 1-byte string with value 0x05 + ]); + let result = Array::decode(&mut decoder, &mut DecodeCtx::Deterministic); + assert!(result.is_ok()); + + let array = result.unwrap(); + assert_eq!(array.len(), 5); + + // Verify the elements are correctly decoded + assert_eq!(array[0], vec![0x41, 0x01]); + assert_eq!(array[1], vec![0x41, 0x02]); + assert_eq!(array[2], vec![0x41, 0x03]); + assert_eq!(array[3], vec![0x41, 0x04]); + assert_eq!(array[4], vec![0x41, 0x05]); + } + + /// Test indefinite-length array rejection in deterministic mode + /// and acceptance in non-deterministic mode + #[test] + fn test_array_with_indefinite_length() { + // Indefinite-length array (not allowed in deterministic encoding) + let decoder = Decoder::new(&[ + 0x9F, // Array with indefinite length + 0x01, // Element 1 + 0x02, // Element 2 + 0xFF, // Break code + ]); + assert!(Array::decode(&mut decoder.clone(), &mut DecodeCtx::Deterministic).is_err()); + // Even it's non-deterministic, this should fail, as we enforce for the defined length. + assert!(Array::decode(&mut decoder.clone(), &mut DecodeCtx::non_deterministic()).is_err()); + } +} diff --git a/rust/cbork-utils/src/decode_context.rs b/rust/cbork-utils/src/decode_context.rs new file mode 100644 index 00000000000..c206c8891a8 --- /dev/null +++ b/rust/cbork-utils/src/decode_context.rs @@ -0,0 +1,60 @@ +//! CBOR decode context which could used as an argument for the `minicbor::Decode` logic + +/// a type alias for the deterministic error handler function +pub type DeterministicErrorHandler = + Box Result<(), minicbor::decode::Error>>; + +/// CBOR `minicbor::Decode` context struct. +pub enum DecodeCtx { + /// Decode a CBOR object applying deterministic decoding rules (RFC 8949 + /// Section 4.2). + Deterministic, + /// Decode a CBOR object **NOT** applying deterministic decoding rules (RFC 8949 + /// Section 4.2). + /// + /// Optionally it could carry an deterministic decoding error handler, so if provided + /// deterministic decoding rule is applied and the error message passed to the + /// handler function + NonDeterministic(Option), +} + +impl DecodeCtx { + /// Returns `DecodeCtx::NonDeterministic` variant + /// Decode a CBOR object **NOT** applying deterministic decoding rules (RFC 8949 + /// Section 4.2). + #[must_use] + pub fn non_deterministic() -> Self { + Self::NonDeterministic(None) + } + + /// Returns `DecodeCtx::NonDeterministic` variant + /// Decode a CBOR object **NOT** applying deterministic decoding rules (RFC 8949 + /// Section 4.2). + /// + /// When deterministic decoding rule is applied, the error message passed to + /// the provided `handler` + #[must_use] + pub fn non_deterministic_with_handler( + handler: impl FnMut(minicbor::decode::Error) -> Result<(), minicbor::decode::Error> + 'static, + ) -> Self { + Self::NonDeterministic(Some(Box::new(handler))) + } + + /// Depends on the set `DecodeCtx` variant applies the provided deterministic + /// validation + pub(crate) fn try_check( + &mut self, f: impl FnOnce() -> Result<(), minicbor::decode::Error>, + ) -> Result<(), minicbor::decode::Error> { + match self { + Self::Deterministic => f(), + Self::NonDeterministic(None) => Ok(()), + Self::NonDeterministic(Some(h)) => { + if let Err(err) = f() { + h(err) + } else { + Ok(()) + } + }, + } + } +} diff --git a/rust/cbork-utils/src/decode_helper.rs b/rust/cbork-utils/src/decode_helper.rs index 1a8ab480f1e..d1e218311e0 100644 --- a/rust/cbork-utils/src/decode_helper.rs +++ b/rust/cbork-utils/src/decode_helper.rs @@ -92,6 +92,20 @@ pub fn decode_any<'d>(d: &mut Decoder<'d>, from: &str) -> Result<&'d [u8], decod Ok(bytes) } +/// Extracts the raw bytes of a CBOR map from a decoder based on specified positions. +/// This function retrieves the raw byte representation of a CBOR map between the given +/// start and end positions from the decoder's underlying buffer. +/// +/// # Errors +/// - Invalid map byte range: indices out of bounds +pub fn get_bytes<'a>( + d: &Decoder<'a>, map_start: usize, map_end: usize, +) -> Result<&'a [u8], decode::Error> { + d.input() + .get(map_start..map_end) + .ok_or_else(|| decode::Error::message("Invalid map byte range: indices out of bounds")) +} + #[cfg(test)] mod tests { use minicbor::Encoder; diff --git a/rust/cbork-utils/src/deterministic_helper.rs b/rust/cbork-utils/src/deterministic_helper.rs new file mode 100644 index 00000000000..c35229a4125 --- /dev/null +++ b/rust/cbork-utils/src/deterministic_helper.rs @@ -0,0 +1,238 @@ +//! CBOR decoding helper functions with deterministic encoding validation. +//! +//! Based on RFC 8949 Section 4.2 "Deterministically Encoded CBOR" +//! Rules for deterministic encoding: +//! 1. Integers must use the smallest possible encoding +//! 2. Lengths of arrays, maps, strings must use the smallest possible encoding +//! 3. Indefinite-length items are not allowed +//! 4. Keys in every map must be sorted in lexicographic order +//! 5. Duplicate keys in maps are not allowed +//! 6. Floating point values must use smallest possible encoding +//! 7. Non-finite floating point values are not allowed (NaN, infinite) + +/// Maximum value that can be encoded in a 5-bit additional info field +/// RFC 8949 Section 4.2.1: "0 to 23 must be expressed in the same byte as the major type" +/// Values 0-23 are encoded directly in the additional info field of the initial byte +pub(crate) const CBOR_MAX_TINY_VALUE: u64 = 23; + +/// Extracts the declared length from a CBOR data item according to RFC 8949 encoding +/// rules. +/// +/// This function analyzes the major type and additional information in the CBOR initial +/// byte to determine if the data item has a declared length and what that length is. +/// +/// ## CBOR Major Types and Length Semantics (RFC 8949 Section 3): +/// +/// - **Major Type 0/1 (Unsigned/Negative Integers)**: No length concept - the value IS +/// the data +/// - **Major Type 2 (Byte String)**: Length indicates number of bytes in the string +/// - **Major Type 3 (Text String)**: Length indicates number of bytes in UTF-8 encoding +/// - **Major Type 4 (Array)**: Length indicates number of data items (elements) in the +/// array +/// - **Major Type 5 (Map)**: Length indicates number of key-value pairs in the map +/// - **Major Type 6 (Semantic Tag)**: Tags the following data item, length from tagged +/// content +/// - **Major Type 7 (Primitives)**: No length for simple values, floats, etc. +/// +/// ## Errors +pub fn get_declared_length(bytes: &[u8]) -> Result, minicbor::decode::Error> { + let mut decoder = minicbor::Decoder::new(bytes); + + // Extract major type from high 3 bits of initial byte (RFC 8949 Section 3.1) + match bytes.first().map(|&b| b >> 5) { + Some(7 | 0 | 1 | 4 | 5 | 6) => Ok(None), + Some(2) => { + // Read length for byte string header + let len = decoder.bytes()?; + Ok(Some(len.len())) + }, + Some(3) => { + // Read length for text string header + let len = decoder.str()?; + Ok(Some(len.len())) + }, + + _ => Err(minicbor::decode::Error::message("Invalid type")), + } +} + +/// Returns the size of the CBOR header in bytes, based on RFC 8949 encoding rules. +/// +/// CBOR encodes data items with a header that consists of: +/// 1. An initial byte containing: +/// - Major type (3 most significant bits) +/// - Additional information (5 least significant bits) +/// 2. Optional following bytes based on the additional information value +/// +/// This function calculates only the size of the header itself, not including +/// any data that follows the header. It works with all CBOR major types: +/// - 0: Unsigned integer +/// - 1: Negative integer +/// - 2: Byte string +/// - 3: Text string +/// - 4: Array +/// - 5: Map +/// - 6: Tag +/// - 7: Simple/floating-point values +/// +/// For deterministically encoded CBOR (as specified in RFC 8949 Section 4.2), +/// indefinite length items are not allowed, so this function will return an error +/// when encountering additional information value 31. +/// +/// # Arguments +/// * `bytes` - A byte slice containing CBOR-encoded data +/// +/// # Returns +/// * `Ok(usize)` - The size of the CBOR header in bytes +/// * `Err(DeterministicError)` - If the input is invalid or uses indefinite length +/// encoding +/// +/// # Errors +/// Returns an error if: +/// - The input is empty +/// - The input uses indefinite length encoding (additional info = 31) +/// - The additional information value is invalid +pub fn get_cbor_header_size(bytes: &[u8]) -> Result { + // Extract the first byte which contains both major type and additional info + let first_byte = bytes + .first() + .copied() + .ok_or_else(|| minicbor::decode::Error::message("Empty cbor data"))?; + // Major type is in the high 3 bits (not used in this function but noted for clarity) + // let major_type = first_byte >> 5; + // Additional info is in the low 5 bits and determines header size + let additional_info = first_byte & 0b0001_1111; + + // Calculate header size based on additional info value + match additional_info { + // Values 0-23 are encoded directly in the additional info bits + // Header is just the initial byte + 0..=23 => Ok(1), + + // Value 24 means the actual value is in the next 1 byte + // Header is 2 bytes (initial byte + 1 byte) + 24 => Ok(2), + + // Value 25 means the actual value is in the next 2 bytes + // Header is 3 bytes (initial byte + 2 bytes) + 25 => Ok(3), + + // Value 26 means the actual value is in the next 4 bytes + // Header is 5 bytes (initial byte + 4 bytes) + 26 => Ok(5), + + // Value 27 means the actual value is in the next 8 bytes + // Header is 9 bytes (initial byte + 8 bytes) + 27 => Ok(9), + // Value 31 indicates indefinite length, which is not allowed in + // deterministic encoding per RFC 8949 section 4.2.1 + 31 => { + Err(minicbor::decode::Error::message( + "Cannot determine size of indefinite length item", + )) + }, + + // Values 28-30 are reserved in RFC 8949 and not valid in current CBOR + _ => { + Err(minicbor::decode::Error::message( + "Invalid additional info in CBOR header", + )) + }, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + /// Test `get_declared_length` for all CBOR major types per RFC 8949 + #[test] + fn test_get_declared_length() { + // Example 1: Empty byte string + // Encoding: [0x40] + // - 0x40 = 0b010_00000 (major type 2, additional info 0) + // - Length: 0 bytes + // - Content: none + let empty_bytes = vec![0x40]; + + let declared_length = get_declared_length(&empty_bytes).unwrap().unwrap(); + + assert_eq!(declared_length, 0); + + // Example 2: 2-byte string with immediate length encoding + // Encoding: [0x42, 0x01, 0x02] + // - 0x42 = 0b010_00010 (major type 2, additional info 2) + // - Length: 2 bytes (encoded immediately in additional info) + // - Content: [0x01, 0x02] + let short_bytes = vec![0x42, 0x01, 0x02]; + + let declared_length = get_declared_length(&short_bytes).unwrap().unwrap(); + + assert_eq!(declared_length, 2); + + // Example 3: 24-byte string requiring uint8 length encoding + // Encoding: [0x58, 0x18, 0x01, 0x02, ..., 0x18] + // - 0x58 = 0b010_11000 (major type 2, additional info 24) + // - Length: 24 (encoded as uint8 in next byte: 0x18 = 24) + // - Content: 24 bytes [0x01, 0x02, ..., 0x18] + let mut medium_bytes = vec![0x58, 0x18]; // Header: byte string, uint8 length 24 + medium_bytes.extend((1..=24).collect::>()); // Content: 24 bytes + + let declared_length = get_declared_length(&medium_bytes).unwrap().unwrap(); + assert_eq!(declared_length, 24); + + // Example 4: 256-byte string requiring uint16 length encoding + // Encoding: [0x59, 0x01, 0x00, 0x00, 0x00, ..., 0xFF] + // - 0x59 = 0b010_11001 (major type 2, additional info 25) + // - Length: 256 (encoded as uint16 big-endian: [0x01, 0x00]) + // - Content: 256 bytes [0x00, 0x00, ..., 0xFF] + let mut large_bytes = vec![0x59, 0x01, 0x00]; // Header: byte string, uint16 length 256 + large_bytes.extend(vec![0x00; 256]); // Content: 256 zero bytes + + let declared_length = get_declared_length(&large_bytes).unwrap().unwrap(); + assert_eq!(declared_length, 256); + } + + #[test] + fn test_get_cbor_header_size() { + // Test direct values (additional info 0-23) + assert_eq!(get_cbor_header_size(&[0b000_00000]).unwrap(), 1); // Major type 0, value 0 + assert_eq!(get_cbor_header_size(&[0b001_10111]).unwrap(), 1); // Major type 1, value 23 + + // Test 1-byte uint (additional info 24) + assert_eq!(get_cbor_header_size(&[0b010_11000, 0x42]).unwrap(), 2); // Major type 2 + + // Test 2-byte uint (additional info 25) + assert_eq!(get_cbor_header_size(&[0b011_11001, 0x12, 0x34]).unwrap(), 3); // Major type 3 + + // Test 4-byte uint (additional info 26) + assert_eq!( + get_cbor_header_size(&[0b100_11010, 0x12, 0x34, 0x56, 0x78]).unwrap(), + 5 + ); // Major type 4 + + // Test 8-byte uint (additional info 27) + assert_eq!( + get_cbor_header_size(&[0b101_11011, 0x01, 0x23, 0x45, 0x67, 0x89, 0xAB, 0xCD, 0xEF]) + .unwrap(), + 9 + ); // Major type 5 + + // Error cases + // Empty input + assert!(get_cbor_header_size(&[]).is_err()); + + // Indefinite length (additional info 31) + let result = get_cbor_header_size(&[0b110_11111]); + assert!(result.is_err()); + + // Small map (size 1) - additional info 1 + assert_eq!(get_cbor_header_size(&[0b101_00001]).unwrap(), 1); // Map with 1 pair + + // Large map (size 65535) - additional info 25 (2-byte uint follows) + assert_eq!(get_cbor_header_size(&[0b101_11001, 0xFF, 0xFF]).unwrap(), 3); // Map with 65535 pairs + + // Reserved values (additional info 28-30) + assert!(get_cbor_header_size(&[0b111_11100]).is_err()); // Major type 7, value 28 + } +} diff --git a/rust/cbork-utils/src/lib.rs b/rust/cbork-utils/src/lib.rs index 1b29a48aaf0..b961569ca4e 100644 --- a/rust/cbork-utils/src/lib.rs +++ b/rust/cbork-utils/src/lib.rs @@ -1,3 +1,8 @@ //! CBOR utility modules. +pub mod array; +pub mod decode_context; pub mod decode_helper; +pub mod deterministic_helper; +pub mod map; +pub mod with_cbor_bytes; diff --git a/rust/cbork-utils/src/map.rs b/rust/cbork-utils/src/map.rs new file mode 100644 index 00000000000..ed72f54a32c --- /dev/null +++ b/rust/cbork-utils/src/map.rs @@ -0,0 +1,605 @@ +//! CBOR map (CBOR major type 5) structure with CBOR decoding and encoding functionality. +//! Supports deterministically encoded rules (RFC 8949 Section 4.2) if corresponding +//! option is enabled. + +use std::{cmp::Ordering, ops::Deref, vec::IntoIter}; + +use crate::{ + decode_context::DecodeCtx, + decode_helper::get_bytes, + deterministic_helper::{get_cbor_header_size, get_declared_length, CBOR_MAX_TINY_VALUE}, +}; + +/// Represents a CBOR map key-value pair, preserving original decoding order of values. +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Map(Vec); + +impl Deref for Map { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl IntoIterator for Map { + type IntoIter = IntoIter; + type Item = MapEntry; + + fn into_iter(self) -> Self::IntoIter { + self.0.into_iter() + } +} + +/// Represents a CBOR map key-value pair where the key must be deterministically encoded +/// according to RFC 8949 Section 4.2.3. +/// +/// This type stores the raw bytes of both key and value to enable: +/// 1. Length-first ordering of keys (shorter keys before longer ones) +/// 2. Lexicographic comparison of equal-length keys +/// 3. Preservation of the original encoded form +#[derive(Clone, Eq, PartialEq, Debug)] +pub struct MapEntry { + /// Raw bytes of the encoded key, used for deterministic ordering + pub key_bytes: Vec, + /// Raw bytes of the encoded value + pub value: Vec, +} + +impl PartialOrd for MapEntry { + /// Compare map entries according to RFC 8949 Section 4.2.3 rules: + /// 1. Compare by length of encoded key + /// 2. If lengths equal, compare byte wise lexicographically + /// + /// Returns Some(ordering) since comparison is always defined for these types + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for MapEntry { + /// Compare map entries according to RFC 8949 Section 4.2.3 rules: + /// 1. Compare by length of encoded key + /// 2. If lengths equal, compare byte wise lexicographically + fn cmp(&self, other: &Self) -> Ordering { + self.key_bytes + .len() + .cmp(&other.key_bytes.len()) + .then_with(|| self.key_bytes.cmp(&other.key_bytes)) + } +} + +/// Major type indicator for CBOR maps (major type 5: 101 in top 3 bits) +/// As per RFC 8949 Section 4.2.3, maps in deterministic encoding must: +/// - Have keys sorted by length first, then byte wise lexicographically +/// - Contain no duplicate keys +const CBOR_MAJOR_TYPE_MAP: u8 = 5 << 5; + +/// Initial byte for a CBOR map whose length is encoded as an 8-bit unsigned integer +/// (uint8). +/// +/// This value combines the map major type (5) with the additional information value (24) +/// that indicates a uint8 length follows. The resulting byte is: +/// - High 3 bits: 101 (major type 5 for map) +/// - Low 5 bits: 24 (indicates uint8 length follows) +/// +/// Used when encoding CBOR maps with lengths between 24 and 255 elements. +const CBOR_MAP_LENGTH_UINT8: u8 = CBOR_MAJOR_TYPE_MAP | 24; // For uint8 length encoding + +/// Decodes a CBOR map with deterministic encoding validation (RFC 8949 Section 4.2.3) +/// Returns the raw bytes of the map if it passes all deterministic validation rules. +/// +/// From RFC 8949 Section 4.2.3: +/// "The keys in every map must be sorted in the following order: +/// 1. If two keys have different lengths, the shorter one sorts earlier; +/// 2. If two keys have the same length, the one with the lower value in (byte-wise) +/// lexical order sorts earlier." +/// +/// Additionally: +/// - Map lengths must use minimal encoding (Section 4.2.1) +/// - Indefinite-length maps are not allowed (Section 4.2.2) +/// - No two keys may be equal (Section 4.2.3) +/// - The keys themselves must be deterministically encoded +/// +/// # Errors +/// +/// Returns `DeterministicError` if: +/// - Input is empty (`UnexpectedEof`) +/// - Map uses indefinite-length encoding (`IndefiniteLength`) +/// - Map length is not encoded minimally (`NonMinimalInt`) +/// - Map keys are not properly sorted (`UnorderedMapKeys`) +/// - Duplicate keys are found (`DuplicateMapKey`) +/// - Map key or value decoding fails (`DecoderError`) +impl minicbor::Decode<'_, DecodeCtx> for Map { + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut DecodeCtx, + ) -> Result { + // Capture position before reading the map header + let header_start_pos = d.position(); + + // From RFC 8949 Section 4.2.2: + // "Indefinite-length items must be made definite-length items." + // The specification explicitly prohibits indefinite-length items in + // deterministic encoding to ensure consistent representation. + let map_len = d.map()?.ok_or_else(|| { + minicbor::decode::Error::message( + "Indefinite-length items must be made definite-length items", + ) + })?; + + ctx.try_check(|| check_map_minimal_length(d, header_start_pos, map_len))?; + + // Decode entries to validate them + let entries = decode_map_entries(d, map_len, ctx)?; + + ctx.try_check(|| validate_map_ordering(&entries))?; + + Ok(Self(entries)) + } +} + +/// Validates that a CBOR map's length is encoded using the minimal number of bytes as +/// required by RFC 8949's deterministic encoding rules. +/// +/// According to the deterministic encoding requirements: +/// - The length of a map MUST be encoded using the smallest possible CBOR additional +/// information value +/// - For values 0 through 23, the additional info byte is used directly +/// - For values that fit in 8, 16, 32, or 64 bits, the appropriate multi-byte encoding +/// must be used +/// +/// # Specification Reference +/// This implementation follows RFC 8949 Section 4.2.1 which requires that: +/// "The length of arrays, maps, and strings MUST be encoded using the smallest possible +/// CBOR additional information value." +fn check_map_minimal_length( + decoder: &minicbor::Decoder, header_start_pos: usize, value: u64, +) -> Result<(), minicbor::decode::Error> { + // For zero length, 0xA0 is always the minimal encoding + if value == 0 { + return Ok(()); + } + + let initial_byte = decoder + .input() + .get(header_start_pos) + .copied() + .ok_or_else(|| { + minicbor::decode::Error::message("Cannot read initial byte for minimality check") + })?; + + // Only check minimality for map length encodings using uint8 + // Immediate values (0-23) are already minimal by definition + if initial_byte == CBOR_MAP_LENGTH_UINT8 && value <= CBOR_MAX_TINY_VALUE { + return Err(minicbor::decode::Error::message( + "map minimal length failure", + )); + } + + Ok(()) +} + +/// Decodes all key-value pairs in the map +fn decode_map_entries( + d: &mut minicbor::Decoder, length: u64, ctx: &mut DecodeCtx, +) -> Result, minicbor::decode::Error> { + let capacity = usize::try_from(length).map_err(|_| { + minicbor::decode::Error::message("Map length too large for current platform") + })?; + let mut entries = Vec::with_capacity(capacity); + + // Decode each key-value pair + for _ in 0..length { + // Record the starting position of the key + let key_start = d.position(); + + // Skip over the key to find its end position + d.skip()?; + let key_end = d.position(); + + // Record the starting position of the value + let value_start = d.position(); + + // Skip over the value to find its end position + d.skip()?; + let value_end = d.position(); + + // The keys themselves must be deterministically encoded (4.2.1) + let key_bytes = get_bytes(d, key_start, key_end)?.to_vec(); + + ctx.try_check(|| map_keys_are_deterministic(&key_bytes))?; + + let value = get_bytes(d, value_start, value_end)?.to_vec(); + + entries.push(MapEntry { key_bytes, value }); + } + + Ok(entries) +} + +/// Validates that a CBOR map key follows the deterministic encoding rules as specified in +/// RFC 8949. In this case, it validates that the keys themselves must be +/// deterministically encoded (4.2.1). +fn map_keys_are_deterministic(key_bytes: &[u8]) -> Result<(), minicbor::decode::Error> { + // if the map keys are not a txt string or byte string we cannot get a declared length + if let Some(key_declared_length) = get_declared_length(key_bytes)? { + let header_size = get_cbor_header_size(key_bytes)?; + let actual_content_size = key_bytes.len().checked_sub(header_size).ok_or_else(|| { + minicbor::decode::Error::message("Integer overflow in content size calculation") + })?; + + if key_declared_length != actual_content_size { + return Err(minicbor::decode::Error::message( + "Declared length does not match the actual length. Non deterministic map key.", + )); + } + } + Ok(()) +} + +/// Validates map keys are properly ordered according to RFC 8949 Section 4.2.3 +/// and checks for duplicate keys +fn validate_map_ordering(entries: &[MapEntry]) -> Result<(), minicbor::decode::Error> { + let mut iter = entries.iter(); + + // Get the first element if it exists + let Some(mut current) = iter.next() else { + // Empty slice is valid + return Ok(()); + }; + + // Compare each adjacent pair + for next in iter { + check_pair_ordering(current, next)?; + current = next; + } + + Ok(()) +} + +/// Checks if two adjacent map entries are in the correct order: +/// - Keys must be in ascending order (current < next) +/// - Duplicate keys are not allowed (current != next) +fn check_pair_ordering(current: &MapEntry, next: &MapEntry) -> Result<(), minicbor::decode::Error> { + match current.cmp(next) { + Ordering::Less => Ok(()), // Valid: keys are in ascending order + Ordering::Equal => Err(minicbor::decode::Error::message("Duplicate map key found")), + Ordering::Greater => { + Err(minicbor::decode::Error::message( + "Map keys not in canonical order", + )) + }, + } +} + +#[cfg(test)] +mod tests { + use minicbor::{Decode, Decoder}; + + use super::*; + + /// Ensures that encoding and decoding a map preserves: + /// - The byte wise lexicographic ordering of keys + /// - The exact byte representation of values + /// - The definite length encoding format + #[test] + fn test_map_bytes_roundtrip() { + // Create a valid deterministic map encoding + let valid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: 2-byte string + 0x41, 0x01, // Value 1: 1-byte string + 0x43, 0x01, 0x02, 0x03, // Key 2: 3-byte string + 0x41, 0x02, // Value 2: 1-byte string + ]; + + let mut decoder = Decoder::new(&valid_map); + let result = Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).unwrap(); + + // Verify we got back exactly the same bytes + + assert_eq!( + result, + Map(vec![ + MapEntry { + // Key 1: 2-byte string + key_bytes: vec![0x42, 0x01, 0x02], + // Value 1: 1-byte string + value: vec![0x41, 0x01] + }, + MapEntry { + // Key 2: 3-byte string + key_bytes: vec![0x43, 0x01, 0x02, 0x03,], + // Value 2: 1-byte string + value: vec![0x41, 0x02,] + } + ]) + ); + } + + /// Test cases for lexicographic ordering of map keys as specified in RFC 8949 Section + /// 4.2.3. + /// + /// From RFC 8949 Section 4.2.3: + /// "The keys in every map must be sorted in the following order: + /// 1. If two keys have different lengths, the shorter one sorts earlier; + /// 2. If two keys have the same length, the one with the lower value in (byte-wise) + /// lexical order sorts earlier." + #[test] + fn test_map_lexicographic_ordering() { + // Test case: Equal length keys must be sorted lexicographically + // This follows rule 2 from RFC 8949 Section 4.2.3 for same-length keys + let valid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: [0x01, 0x02] + 0x41, 0x01, // Value 1 + 0x42, 0x01, 0x03, // Key 2: [0x01, 0x03] (lexicographically larger) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&valid_map); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + + // Invalid ordering - violates RFC 8949 Section 4.2.3 rule 2: + // "If two keys have the same length, the one with the lower value in + // (byte-wise) lexical order sorts earlier" + let invalid_map = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x03, // Key 1: [0x01, 0x03] + 0x41, 0x01, // Value 1 + 0x42, 0x01, 0x02, // Key 2: [0x01, 0x02] (should come first) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&invalid_map); + assert!(Map::decode(&mut decoder.clone(), &mut DecodeCtx::Deterministic).is_err()); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::non_deterministic()).is_ok()); + } + + /// Test empty map handling - special case mentioned in RFC 8949. + /// An empty map is valid and must still follow length encoding rules + /// from Section 4.2.1. + #[test] + fn test_empty_map() { + let empty_map = vec![ + 0xA0, // Map with 0 pairs - encoded with immediate value as per Section 4.2.1 + ]; + let mut decoder = Decoder::new(&empty_map); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test minimal length encoding rules for maps as specified in RFC 8949 Section 4.2.1 + /// + /// From RFC 8949 Section 4.2.1 "Integer Encoding": + /// "The following must be encoded only with the shortest form that can represent + /// the value: + /// 1. Integer values in items that use integer encoding + /// 2. The length of arrays, maps, strings, and byte strings + /// + /// Specifically for integers: + /// * 0 to 23 must be expressed in the same byte as the major type + /// * 24 to 255 must be expressed only with an additional `uint8_t` + /// * 256 to 65535 must be expressed only with an additional `uint16_t` + /// * 65536 to 4294967295 must be expressed only with an additional `uint32_t`" + /// + /// For maps (major type 5), the length must follow these rules. This ensures + /// a deterministic encoding where the same length is always encoded the same way. + /// Test minimal length encoding rules for maps as specified in RFC 8949 Section 4.2.1 + /// + /// From RFC 8949 Section 4.2.1: + /// "The length of arrays, maps, strings, and byte strings must be encoded in the + /// smallest possible way. For maps (major type 5), lengths 0-23 must be encoded + /// in the initial byte." + #[test] + fn test_map_minimal_length_encoding() { + // Test case 1: Valid minimal encoding (length = 1) + let valid_small = vec![ + 0xA1, // Map, length 1 (major type 5 with immediate value 1) + 0x01, // Key: unsigned int 1 + 0x02, // Value: unsigned int 2 + ]; + let mut decoder = Decoder::new(&valid_small); + + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + + // Test case 2: Invalid non-minimal encoding (using additional info 24 for length 1) + let invalid_small = vec![ + 0xB8, // Map with additional info = 24 (0xa0 | 0x18) + 0x01, // Length encoded as uint8 = 1 + 0x01, // Key: unsigned int 1 + 0x02, // Value: unsigned int 2 + ]; + let mut decoder = Decoder::new(&invalid_small); + assert!(Map::decode(&mut decoder.clone(), &mut DecodeCtx::Deterministic).is_err()); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::non_deterministic()).is_ok()); + } + + /// Test handling of complex key structures while maintaining canonical ordering + /// + /// RFC 8949 Section 4.2.3 requires correct ordering regardless of key complexity: + /// "The keys in every map must be sorted [...] Note that this rule allows maps + /// to be deterministically ordered regardless of the specific data model of + /// the key values." + #[test] + fn test_map_complex_keys() { + // Test nested structures in keys while maintaining order + // Following RFC 8949 Section 4.2.3 length-first rule + let valid_complex = vec![ + 0xA2, // Map with 2 pairs + 0x42, 0x01, 0x02, // Key 1: simple 2-byte string (shorter, so comes first) + 0x41, 0x01, // Value 1 + 0x44, 0x01, 0x02, 0x03, 0x04, // Key 2: 4-byte string (longer, so comes second) + 0x41, 0x02, // Value 2 + ]; + let mut decoder = Decoder::new(&valid_complex); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test edge cases for map encoding while maintaining compliance with RFC 8949 + /// + /// These cases test boundary conditions that must still follow all rules from + /// Section 4.2: + /// - Minimal length encoding (4.2.1) + /// - No indefinite lengths (4.2.2) + /// - Canonical ordering (4.2.3) + #[test] + fn test_map_edge_cases() { + // Single entry map - must still follow minimal length encoding rules + let single_entry = vec![ + 0xA1, // Map with 1 pair (using immediate value as per Section 4.2.1) + 0x41, 0x01, // Key: 1-byte string + 0x41, 0x02, // Value: 1-byte string + ]; + let mut decoder = Decoder::new(&single_entry); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + + // Map with zero-length string key - tests smallest possible key case + // Still must follow sorting rules from Section 4.2.3 + let zero_length_key = vec![ + 0xA1, // Map with 1 pair + 0x40, // Key: 0-byte string (smallest possible key length) + 0x41, 0x01, // Value: 1-byte string + ]; + let mut decoder = Decoder::new(&zero_length_key); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::Deterministic).is_ok()); + } + + /// Test duplicate key detection as required by RFC 8949 Section 4.2.3 + /// + /// From RFC 8949 Section 4.2.3: + /// "The keys in every map must be sorted [...] Note that this rule + /// automatically implies that no two keys in a map can be equal (have + /// the same length and the same value)." + #[test] + fn test_duplicate_keys() { + let map_with_duplicates = vec![ + 0xA2, // Map with 2 pairs + 0x41, 0x01, // Key 1: 1-byte string [0x01] + 0x41, 0x02, // Value 1 + 0x41, 0x01, // Key 2: same as Key 1 (duplicate - invalid) + 0x41, 0x03, // Value 2 + ]; + let mut decoder = Decoder::new(&map_with_duplicates); + assert!(Map::decode(&mut decoder.clone(), &mut DecodeCtx::Deterministic).is_err()); + assert!(Map::decode(&mut decoder, &mut DecodeCtx::non_deterministic()).is_ok()); + } + + #[test] + fn test_map_entry_ord_comprehensive() { + // Test 1: Length-first ordering + // According to RFC 8949, shorter keys must come before longer keys + // regardless of their actual byte values + let short_key = MapEntry { + key_bytes: vec![0x41], // Single byte key + value: vec![0x01], + }; + let long_key = MapEntry { + key_bytes: vec![0x41, 0x42, 0x43], // Three byte key (longer) + value: vec![0x01], + }; + // Even though both start with 0x41, the shorter one comes first + assert!(short_key < long_key); + assert!(long_key > short_key); + + // Test 2: Lexicographic ordering for equal-length keys + // When keys have the same length, they are compared byte by byte + // lexicographically (like dictionary ordering) + let key_a = MapEntry { + key_bytes: vec![0x41, 0x41], // Represents "AA" in ASCII + value: vec![0x01], + }; + let key_b = MapEntry { + key_bytes: vec![0x41, 0x42], // Represents "AB" in ASCII + value: vec![0x01], + }; + // "AA" comes before "AB" lexicographically + assert!(key_a < key_b); + assert!(key_b > key_a); + assert!(key_a == key_a); + + // Test 3: Identical entries (same key AND value) + // Complete MapEntry equality requires both key and value to be identical + let entry1 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], + }; + let entry2 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], // Same value as entry1 + }; + // These are truly identical entries + assert_eq!(entry1, entry2); + + // Test 4: Same key, different values - these are NOT equal + // In CBOR maps, this would represent duplicate keys (invalid) + let entry_v1 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x01], + }; + let entry_v2 = MapEntry { + key_bytes: vec![0x41, 0x42], + value: vec![0x02], // Different value + }; + // These entries are NOT equal (different values) + assert_ne!(entry_v1, entry_v2); + // But they have the same ordering position (same key) + assert_eq!(entry_v1.cmp(&entry_v2), std::cmp::Ordering::Equal); + + // Test 5: Empty key vs non-empty key + // Empty keys should come before any non-empty key (shortest length rule) + let empty_key = MapEntry { + key_bytes: vec![], // Empty key (length 0) + value: vec![0x01], + }; + let non_empty_key = MapEntry { + key_bytes: vec![0x00], // Single null byte (length 1) + value: vec![0x01], + }; + // Empty key (length 0) comes before single byte key (length 1) + assert!(empty_key < non_empty_key); + + // Test 6: Numerical byte value ordering + // Test that individual byte values are compared correctly (0x00 < 0xFF) + let key_0 = MapEntry { + key_bytes: vec![0x00], // Null byte + value: vec![0x01], + }; + let key_255 = MapEntry { + key_bytes: vec![0xFF], // Maximum byte value + value: vec![0x01], + }; + // 0x00 is numerically less than 0xFF + assert!(key_0 < key_255); + + // Test 7: Complex multi-byte lexicographic comparison + // Test lexicographic ordering when keys differ in later bytes + let key_complex1 = MapEntry { + key_bytes: vec![0x01, 0x02, 0x03], // Differs in last byte (0x03) + value: vec![0x01], + }; + let key_complex2 = MapEntry { + key_bytes: vec![0x01, 0x02, 0x04], // Differs in last byte (0x04) + value: vec![0x01], + }; + // First two bytes are identical (0x01, 0x02), so compare third byte: 0x03 < 0x04 + assert!(key_complex1 < key_complex2); + } + /// An edge case where slice [`Ord`] isn't minimal length byte-wise lexicographic. + #[test] + fn test_map_entry_ord_len_edge_case() { + // Shorter length key with greater first byte. + let lhs = MapEntry { + key_bytes: minicbor::to_vec("a").unwrap(), + value: vec![], + }; + assert_eq!(lhs.key_bytes, &[97, 97]); + + // Longer length key with lesser first byte. + let rhs = MapEntry { + key_bytes: minicbor::to_vec(65535u32).unwrap(), + value: vec![], + }; + assert_eq!(rhs.key_bytes, &[25, 255, 255]); + + // Shorter must go first. + assert!(lhs < rhs); + } +} diff --git a/rust/cbork-utils/src/with_cbor_bytes.rs b/rust/cbork-utils/src/with_cbor_bytes.rs new file mode 100644 index 00000000000..cdb63e6a3ef --- /dev/null +++ b/rust/cbork-utils/src/with_cbor_bytes.rs @@ -0,0 +1,64 @@ +//! A helper struct which preserves original CBOR bytes of the decoded object + +use std::{convert::Infallible, ops::Deref}; + +/// A helper immutable data structure, which holds original CBOR bytes of the object with +/// the object themselves. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct WithCborBytes { + /// original CBOR bytes of the `obj` + cbor_bytes: Vec, + /// underlying `T` instance + obj: T, +} + +impl Deref for WithCborBytes { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.obj + } +} + +impl WithCborBytes { + /// Creates a new instance of the `WithCborBytes` from the provided `obj` by encoding + /// it and storing resulted `cbor_bytes`. + /// + /// # Errors + /// - Infallible + pub fn new(obj: T, ctx: &mut C) -> Result> + where T: minicbor::Encode { + let cbor_bytes = minicbor::to_vec_with(&obj, ctx)?; + Ok(Self { cbor_bytes, obj }) + } + + /// Return inner `obj` instance + pub fn inner(self) -> T { + self.obj + } +} + +impl minicbor::Encode<()> for WithCborBytes { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.writer_mut() + .write_all(&self.cbor_bytes) + .map_err(minicbor::encode::Error::write)?; + Ok(()) + } +} + +impl<'a, C, T: minicbor::Decode<'a, C>> minicbor::Decode<'a, C> for WithCborBytes { + fn decode(d: &mut minicbor::Decoder<'a>, ctx: &mut C) -> Result { + let start = d.position(); + let obj = d.decode_with(ctx)?; + let end = d.position(); + let cbor_bytes = d + .input() + .get(start..end) + .ok_or(minicbor::decode::Error::end_of_input())? + .to_vec(); + Ok(Self { cbor_bytes, obj }) + } +} diff --git a/rust/rbac-registration/Cargo.toml b/rust/rbac-registration/Cargo.toml index c610369ddf2..cb5e0821406 100644 --- a/rust/rbac-registration/Cargo.toml +++ b/rust/rbac-registration/Cargo.toml @@ -32,8 +32,8 @@ uuid = "1.11.0" oid-registry = "0.7.1" thiserror = "2.0.11" -c509-certificate = { version = "0.0.3", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } +c509-certificate = { version = "0.0.3", path = "../c509-certificate" } pallas = { version = "0.33.0" } -cbork-utils = { version = "0.0.1", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } -cardano-blockchain-types = { version = "0.0.4", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } -catalyst-types = { version = "0.0.4", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } +cbork-utils = { version = "0.0.1", path = "../cbork-utils" } +cardano-blockchain-types = { version = "0.0.5", path = "../cardano-blockchain-types" } +catalyst-types = { version = "0.0.4", path = "../catalyst-types" } diff --git a/rust/signed_doc/Cargo.toml b/rust/signed_doc/Cargo.toml index 18007272844..2fb110293fd 100644 --- a/rust/signed_doc/Cargo.toml +++ b/rust/signed_doc/Cargo.toml @@ -11,29 +11,31 @@ license.workspace = true workspace = true [dependencies] -catalyst-types = { version = "0.0.4", git = "https://github.com/input-output-hk/catalyst-libs.git", tag = "r20250724-01" } +catalyst-types = { version = "0.0.4", path = "../catalyst-types" } +cbork-utils = { version = "0.0.1", path = "../cbork-utils" } anyhow = "1.0.95" serde = { version = "1.0.217", features = ["derive"] } -serde_json = "1.0.134" -coset = "0.3.8" +serde_json = { version = "1.0.134", features = ["raw_value"] } minicbor = { version = "0.25.1", features = ["half"] } brotli = "7.0.0" ed25519-dalek = { version = "2.1.1", features = ["rand_core", "pem"] } hex = "0.4.3" -strum = { version = "0.26.3", features = ["derive"] } +strum = { version = "0.27.1", features = ["derive"] } clap = { version = "4.5.23", features = ["derive", "env"] } jsonschema = "0.28.3" jsonpath-rust = "0.7.5" futures = "0.3.31" ed25519-bip32 = "0.4.1" # used by the `mk_signed_doc` cli tool - +tracing = "0.1.40" +thiserror = "2.0.11" [dev-dependencies] base64-url = "3.0.0" rand = "0.8.5" uuid = { version = "1.12.0", features = ["v7"] } tokio = { version = "1.42.0", features = [ "macros" ] } +test-case = "3.3.1" [[bin]] name = "mk_signed_doc" diff --git a/rust/signed_doc/README.md b/rust/signed_doc/README.md index 1a8a6670b60..fd8acd87c01 100644 --- a/rust/signed_doc/README.md +++ b/rust/signed_doc/README.md @@ -1,5 +1,3 @@ - - # Catalyst signed document Catalyst signed document crate implementation based on this diff --git a/rust/signed_doc/bins/mk_signed_doc.rs b/rust/signed_doc/bins/mk_signed_doc.rs index 6912a9397e4..66f7dff023f 100644 --- a/rust/signed_doc/bins/mk_signed_doc.rs +++ b/rust/signed_doc/bins/mk_signed_doc.rs @@ -63,13 +63,11 @@ impl Cli { println!("{metadata}"); // Load Document from JSON file let json_doc: serde_json::Value = load_json_from_file(&doc)?; - // Possibly encode if Metadata has an encoding set. - let payload = serde_json::to_vec(&json_doc)?; // Start with no signatures. let signed_doc = Builder::new() - .with_decoded_content(payload) .with_json_metadata(metadata)? - .build(); + .with_json_content(&json_doc)? + .build()?; println!( "report {}", serde_json::to_string(&signed_doc.problem_report())? @@ -82,9 +80,12 @@ impl Cli { let signed_doc = signed_doc_from_bytes(cose_bytes.as_slice())?; let new_signed_doc = signed_doc - .into_builder() - .add_signature(|message| sk.sign::<()>(&message).to_bytes().to_vec(), &kid)? - .build(); + .into_builder()? + .add_signature( + |message| sk.sign::<()>(&message).to_bytes().to_vec(), + kid.clone(), + )? + .build()?; save_signed_doc(new_signed_doc, &doc)?; }, Self::Inspect { path } => { @@ -128,7 +129,7 @@ fn save_signed_doc(signed_doc: CatalystSignedDocument, path: &PathBuf) -> anyhow } fn signed_doc_from_bytes(cose_bytes: &[u8]) -> anyhow::Result { - minicbor::decode(cose_bytes).context("Invalid Catalyst Document") + cose_bytes.try_into().context("Invalid Catalyst Document") } fn load_json_from_file(path: &PathBuf) -> anyhow::Result diff --git a/rust/signed_doc/src/builder.rs b/rust/signed_doc/src/builder.rs index 6efdca778dd..9a6e752fe78 100644 --- a/rust/signed_doc/src/builder.rs +++ b/rust/signed_doc/src/builder.rs @@ -1,33 +1,52 @@ //! Catalyst Signed Document Builder. -use catalyst_types::{catalyst_id::CatalystId, problem_report::ProblemReport}; +use std::io::Write; + +use catalyst_types::catalyst_id::CatalystId; +use cbork_utils::with_cbor_bytes::WithCborBytes; use crate::{ - signature::Signature, CatalystSignedDocument, Content, InnerCatalystSignedDocument, Metadata, - Signatures, PROBLEM_REPORT_CTX, + signature::{tbs_data, Signature}, + CatalystSignedDocument, Content, ContentType, Metadata, Signatures, }; /// Catalyst Signed Document Builder. -#[derive(Debug)] -pub struct Builder(InnerCatalystSignedDocument); +/// Its a type sage state machine which iterates type safely during different stages of +/// the Catalyst Signed Document build process: +/// Setting Metadata -> Setting Content -> Setting Signatures +pub type Builder = MetadataBuilder; -impl Default for Builder { - fn default() -> Self { - Self::new() - } +/// Only `metadata` builder part +pub struct MetadataBuilder { + /// metadata + metadata: Metadata, +} + +/// Only `content` builder part +pub struct ContentBuilder { + /// metadata + metadata: Metadata, + /// content + content: Content, } -impl Builder { +/// Only `Signatures` builder part +pub struct SignaturesBuilder { + /// metadata + metadata: WithCborBytes, + /// content + content: WithCborBytes, + /// signatures + signatures: Signatures, +} + +impl MetadataBuilder { /// Start building a signed document #[must_use] + #[allow(clippy::new_without_default)] pub fn new() -> Self { - let report = ProblemReport::new(PROBLEM_REPORT_CTX); - Self(InnerCatalystSignedDocument { - report, + Self { metadata: Metadata::default(), - content: Content::default(), - signatures: Signatures::default(), - raw_bytes: None, - }) + } } /// Set document metadata in JSON format @@ -35,19 +54,56 @@ impl Builder { /// /// # Errors /// - Fails if it is invalid metadata fields JSON object. - pub fn with_json_metadata(mut self, json: serde_json::Value) -> anyhow::Result { - let metadata = serde_json::from_value(json)?; - self.0.metadata = Metadata::from_metadata_fields(metadata, &self.0.report); - Ok(self) + pub fn with_json_metadata(mut self, json: serde_json::Value) -> anyhow::Result { + self.metadata = Metadata::from_json(json)?; + Ok(ContentBuilder { + metadata: self.metadata, + content: Content::default(), + }) } +} - /// Set decoded (original) document content bytes - #[must_use] - pub fn with_decoded_content(mut self, content: Vec) -> Self { - self.0.content = Content::from_decoded(content); - self +impl ContentBuilder { + /// Prepares a `SignaturesBuilder` from the current `ContentBuilder` + fn into_signatures_builder(self) -> anyhow::Result { + Ok(SignaturesBuilder { + metadata: WithCborBytes::new(self.metadata, &mut ())?, + content: WithCborBytes::new(self.content, &mut ())?, + signatures: Signatures::default(), + }) + } + + /// Sets an empty content + pub fn empty_content(self) -> anyhow::Result { + self.into_signatures_builder() + } + + /// Set the provided JSON content, applying already set `content-encoding`. + /// + /// # Errors + /// - Verifies that `content-type` field is set to JSON + /// - Cannot serialize provided JSON + /// - Compression failure + pub fn with_json_content( + mut self, json: &serde_json::Value, + ) -> anyhow::Result { + anyhow::ensure!( + self.metadata.content_type()? == ContentType::Json, + "Already set metadata field `content-type` is not JSON value" + ); + + let content = serde_json::to_vec(&json)?; + if let Some(encoding) = self.metadata.content_encoding() { + self.content = encoding.encode(&content)?.into(); + } else { + self.content = content.into(); + } + + self.into_signatures_builder() } +} +impl SignaturesBuilder { /// Add a signature to the document /// /// # Errors @@ -56,43 +112,139 @@ impl Builder { /// content, due to malformed data, or when the signed document cannot be /// converted into `coset::CoseSign`. pub fn add_signature( - mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: &CatalystId, + mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: CatalystId, ) -> anyhow::Result { - let cose_sign = self - .0 - .as_cose_sign() - .map_err(|e| anyhow::anyhow!("Failed to sign: {e}"))?; - - let protected_header = coset::HeaderBuilder::new().key_id(kid.to_string().into_bytes()); - - let mut signature = coset::CoseSignatureBuilder::new() - .protected(protected_header.build()) - .build(); - let data_to_sign = cose_sign.tbs_data(&[], &signature); - signature.signature = sign_fn(data_to_sign); - if let Some(sign) = Signature::from_cose_sig(signature, &self.0.report) { - self.0.signatures.push(sign); + if kid.is_id() { + anyhow::bail!("Provided kid should be in a uri format, kid: {kid}"); } + self.signatures.push(build_signature( + sign_fn, + kid, + &self.metadata, + &self.content, + )?); + Ok(self) } - /// Build a signed document with the collected error report. - /// Could provide an invalid document. - #[must_use] - pub fn build(self) -> CatalystSignedDocument { - self.0.into() + /// Builds a document from the set `metadata`, `content` and `signatures`. + /// + /// # Errors: + /// - CBOR encoding/decoding failures + pub fn build(self) -> anyhow::Result { + let metadata_bytes = minicbor::to_vec(&self.metadata)?; + let content_bytes = minicbor::to_vec(&self.content)?; + let signature_bytes = minicbor::to_vec(&self.signatures)?; + let doc = build_document(&metadata_bytes, &content_bytes, &signature_bytes)?; + Ok(doc) } } -impl From<&CatalystSignedDocument> for Builder { - fn from(value: &CatalystSignedDocument) -> Self { - Self(InnerCatalystSignedDocument { +/// Build document from the provided **CBOR encoded** `metadata`, `content` and +/// `signatures`. +fn build_document( + metadata_bytes: &[u8], content_bytes: &[u8], signatures_bytes: &[u8], +) -> anyhow::Result { + let mut e = minicbor::Encoder::new(Vec::new()); + // COSE_Sign tag + // + e.tag(minicbor::data::Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes(metadata_bytes)?; + // empty unprotected headers + e.map(0)?; + // content + e.writer_mut().write_all(content_bytes)?; + // signatures + e.writer_mut().write_all(signatures_bytes)?; + CatalystSignedDocument::try_from(e.into_writer().as_slice()) +} + +/// Builds a `Signature` object by signing provided `metadata_bytes`, `content_bytes` and +/// `kid` params. +fn build_signature( + sign_fn: impl FnOnce(Vec) -> Vec, kid: CatalystId, metadata: &WithCborBytes, + content: &WithCborBytes, +) -> anyhow::Result { + let data_to_sign = tbs_data(&kid, metadata, content)?; + let sign_bytes = sign_fn(data_to_sign); + Ok(Signature::new(kid, sign_bytes)) +} + +impl TryFrom<&CatalystSignedDocument> for SignaturesBuilder { + type Error = anyhow::Error; + + fn try_from(value: &CatalystSignedDocument) -> Result { + Ok(Self { metadata: value.inner.metadata.clone(), content: value.inner.content.clone(), signatures: value.inner.signatures.clone(), - report: value.inner.report.clone(), - raw_bytes: None, }) } } + +#[cfg(test)] +pub(crate) mod tests { + use cbork_utils::with_cbor_bytes::WithCborBytes; + + /// A test version of the builder, which allows to build a not fully valid catalyst + /// signed document + #[derive(Default)] + pub(crate) struct Builder { + /// metadata + metadata: super::Metadata, + /// content + content: super::Content, + /// signatures + signatures: super::Signatures, + } + + impl Builder { + /// Start building a signed document + #[must_use] + pub(crate) fn new() -> Self { + Self::default() + } + + /// Add provided `SupportedField` into the `Metadata`. + pub(crate) fn with_metadata_field( + mut self, field: crate::metadata::SupportedField, + ) -> Self { + self.metadata.add_field(field); + self + } + + /// Set the content (COSE payload) to the document builder. + /// It will set the content as its provided, make sure by yourself that + /// `content-type` and `content-encoding` fields are aligned with the + /// provided content bytes. + pub(crate) fn with_content(mut self, content: Vec) -> Self { + self.content = content.into(); + self + } + + /// Add a signature to the document + pub(crate) fn add_signature( + mut self, sign_fn: impl FnOnce(Vec) -> Vec, kid: super::CatalystId, + ) -> anyhow::Result { + let metadata = WithCborBytes::new(self.metadata, &mut ())?; + let content = WithCborBytes::new(self.content, &mut ())?; + self.signatures + .push(super::build_signature(sign_fn, kid, &metadata, &content)?); + self.metadata = metadata.inner(); + self.content = content.inner(); + Ok(self) + } + + /// Build a signed document with the collected error report. + /// Could provide an invalid document. + pub(crate) fn build(self) -> super::CatalystSignedDocument { + let metadata_bytes = minicbor::to_vec(self.metadata).unwrap(); + let content_bytes = minicbor::to_vec(self.content).unwrap(); + let signature_bytes = minicbor::to_vec(self.signatures).unwrap(); + super::build_document(&metadata_bytes, &content_bytes, &signature_bytes).unwrap() + } + } +} diff --git a/rust/signed_doc/src/content.rs b/rust/signed_doc/src/content.rs index 506ba1f7daa..1126b0711fb 100644 --- a/rust/signed_doc/src/content.rs +++ b/rust/signed_doc/src/content.rs @@ -1,78 +1,53 @@ //! Catalyst Signed Document Content Payload -use anyhow::Context; -use catalyst_types::problem_report::ProblemReport; - -use crate::metadata::ContentEncoding; - -/// Decompressed Document Content type bytes. +/// Document Content bytes (COSE payload). #[derive(Debug, Clone, PartialEq, Default)] -pub struct Content { - /// Original Decompressed Document's data bytes - data: Option>, -} +pub struct Content(Vec); impl Content { - /// Creates a new `Content` value, from the encoded data. - /// verifies a Document's content, that it is correctly encoded and it corresponds and - /// parsed to the specified type - pub(crate) fn from_encoded( - mut data: Vec, content_encoding: Option, report: &ProblemReport, - ) -> Self { - if let Some(content_encoding) = content_encoding { - if let Ok(decoded_data) = content_encoding.decode(&data) { - data = decoded_data; - } else { - report.invalid_value( - "payload", - &hex::encode(&data), - &format!("Invalid Document content, should {content_encoding} encodable"), - "Invalid Document content type.", - ); - return Self::default(); - } - } - Self::from_decoded(data) + /// Return content bytes. + #[must_use] + pub fn bytes(&self) -> &[u8] { + self.0.as_slice() } - /// Creates a new `Content` value, from the decoded (original) data. - pub(crate) fn from_decoded(data: Vec) -> Self { - Self { data: Some(data) } + /// Return content byte size. + #[must_use] + pub fn size(&self) -> usize { + self.0.len() } +} - /// Return an decoded (original) content bytes. - /// - /// # Errors - /// - Missing Document content - pub fn decoded_bytes(&self) -> anyhow::Result<&[u8]> { - self.data - .as_deref() - .ok_or(anyhow::anyhow!("Missing Document content")) +impl From> for Content { + fn from(value: Vec) -> Self { + Self(value) } +} - /// Return an encoded content bytes, - /// by the provided `content_encoding` provided field. - /// - /// # Errors - /// - Missing Document content - /// - Failed to encode content. - pub(crate) fn encoded_bytes( - &self, content_encoding: Option, - ) -> anyhow::Result> { - let content = self.decoded_bytes()?; - if let Some(content_encoding) = content_encoding { - content_encoding - .encode(content) - .context(format!("Failed to encode {content_encoding} content")) +impl minicbor::Encode<()> for Content { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + if self.0.is_empty() { + e.null()?; } else { - Ok(content.to_vec()) + e.bytes(self.0.as_slice())?; } + Ok(()) } +} - /// Return content byte size. - /// If content is empty returns `0`. - #[must_use] - pub fn size(&self) -> usize { - self.data.as_ref().map(Vec::len).unwrap_or_default() +impl minicbor::Decode<'_, ()> for Content { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + let p = d.position(); + d.null() + .map(|()| Self(Vec::new())) + // important to use `or_else` so it will lazy evaluated at the time when it is needed + .or_else(|_| { + d.set_position(p); + d.bytes().map(Vec::from).map(Self) + }) } } diff --git a/rust/signed_doc/src/decode_context.rs b/rust/signed_doc/src/decode_context.rs new file mode 100644 index 00000000000..7d14953ca17 --- /dev/null +++ b/rust/signed_doc/src/decode_context.rs @@ -0,0 +1,50 @@ +//! Context used to pass in decoder for additional information. + +use catalyst_types::problem_report::ProblemReport; + +/// Compatibility policy +#[derive(Copy, Clone)] +pub enum CompatibilityPolicy { + /// Silently allow obsoleted type conversions or non deterministic encoding. + Accept, + /// Allow but log Warnings for all obsoleted type conversions or non deterministic + /// encoding. + Warn, + /// Fail and update problem report when an obsolete type is encountered or the data is + /// not deterministically encoded. + Fail, +} + +/// A context use to pass to decoder. +pub(crate) struct DecodeContext { + /// Compatibility policy. + compatibility_policy: CompatibilityPolicy, + /// Problem report. + report: ProblemReport, +} + +impl DecodeContext { + /// Creates a new instance of the `DecodeContext` + pub(crate) fn new(compatibility_policy: CompatibilityPolicy, report: ProblemReport) -> Self { + Self { + compatibility_policy, + report, + } + } + + /// Returns `CompatibilityPolicy` + pub(crate) fn policy(&self) -> &CompatibilityPolicy { + &self.compatibility_policy + } + + /// Returns `ProblemReport` + pub(crate) fn report(&mut self) -> &mut ProblemReport { + &mut self.report + } + + /// Consuming the current `DecodeContext` by returning the underlying `ProblemReport` + /// instance + pub(crate) fn into_report(self) -> ProblemReport { + self.report + } +} diff --git a/rust/signed_doc/src/doc_types/mod.rs b/rust/signed_doc/src/doc_types/mod.rs index 683e36db2f2..902f3222941 100644 --- a/rust/signed_doc/src/doc_types/mod.rs +++ b/rust/signed_doc/src/doc_types/mod.rs @@ -1,53 +1,42 @@ //! An implementation of different defined document types //! -use catalyst_types::uuid::Uuid; - -/// Proposal document `UuidV4` type. -pub const PROPOSAL_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x7808_D2BA_D511_40AF_84E8_C0D1_625F_DFDC); -/// Proposal template `UuidV4` type. -pub const PROPOSAL_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x0CE8_AB38_9258_4FBC_A62E_7FAA_6E58_318F); -/// Comment document `UuidV4` type. -pub const COMMENT_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0xB679_DED3_0E7C_41BA_89F8_DA62_A178_98EA); -/// Comment template `UuidV4` type. -pub const COMMENT_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x0B84_24D4_EBFD_46E3_9577_1775_A69D_290C); -/// Review document `UuidV4` type. -pub const REVIEW_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0xE4CA_F5F0_098B_45FD_94F3_0702_A457_3DB5); -/// Review template `UuidV4` type. -pub const REVIEW_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0xEBE5_D0BF_5D86_4577_AF4D_008F_DDBE_2EDC); -/// Category document `UuidV4` type. -pub const CATEGORY_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x48C2_0109_362A_4D32_9BBA_E0A9_CF8B_45BE); -/// Category template `UuidV4` type. -pub const CATEGORY_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x65B1_E8B0_51F1_46A5_9970_72CD_F268_84BE); -/// Campaign parameters document `UuidV4` type. -pub const CAMPAIGN_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x0110_EA96_A555_47CE_8408_36EF_E6ED_6F7C); -/// Campaign parameters template `UuidV4` type. -pub const CAMPAIGN_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0x7E8F_5FA2_44CE_49C8_BFD5_02AF_42C1_79A3); -/// Brand parameters document `UuidV4` type. -pub const BRAND_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x3E48_08CC_C86E_467B_9702_D60B_AA9D_1FCA); -/// Brand parameters template `UuidV4` type. -pub const BRAND_TEMPLATE_UUID_TYPE: Uuid = - Uuid::from_u128(0xFD3C_1735_80B1_4EEA_8D63_5F43_6D97_EA31); -/// Proposal action document `UuidV4` type. -pub const PROPOSAL_ACTION_DOCUMENT_UUID_TYPE: Uuid = - Uuid::from_u128(0x5E60_E623_AD02_4A1B_A1AC_406D_B978_EE48); -/// Public vote transaction v2 `UuidV4` type. -pub const PUBLIC_VOTE_TX_V2_UUID_TYPE: Uuid = - Uuid::from_u128(0x8DE5_586C_E998_4B95_8742_7BE3_C859_2803); -/// Private vote transaction v2 `UuidV4` type. -pub const PRIVATE_VOTE_TX_V2_UUID_TYPE: Uuid = - Uuid::from_u128(0xE78E_E18D_F380_44C1_A852_80AA_6ECB_07FE); -/// Immutable ledger block `UuidV4` type. -pub const IMMUTABLE_LEDGER_BLOCK_UUID_TYPE: Uuid = - Uuid::from_u128(0xD9E7_E6CE_2401_4D7D_9492_F4F7_C642_41C3); +use crate::DocType; + +/// helper macro by evaluating `DocType::try_from_uuid(catalyst_types::uuid::uuid!())` +/// expression +macro_rules! doc_type_init { + ($s:literal) => { + match DocType::try_from_uuid(catalyst_types::uuid::uuid!($s)) { + Ok(v) => v, + Err(_) => panic!("invalid uuid v4 value"), + } + }; +} + +/// -------------- Document Types -------------- +/// Brand document type. +pub const BRAND_PARAMETERS: DocType = doc_type_init!("3e4808cc-c86e-467b-9702-d60baa9d1fca"); + +/// Campaign Parameters document type. +pub const CAMPAIGN_PARAMETERS: DocType = doc_type_init!("0110ea96-a555-47ce-8408-36efe6ed6f7c"); + +/// Category Parameters document type. +pub const CATEGORY_PARAMETERS: DocType = doc_type_init!("48c20109-362a-4d32-9bba-e0a9cf8b45be"); + +/// Proposal document type. +pub const PROPOSAL: DocType = doc_type_init!("7808d2ba-d511-40af-84e8-c0d1625fdfdc"); + +/// Proposal comment document type. +pub const PROPOSAL_COMMENT: DocType = doc_type_init!("b679ded3-0e7c-41ba-89f8-da62a17898ea"); + +/// Proposal action document type. +pub const PROPOSAL_SUBMISSION_ACTION: DocType = + doc_type_init!("5e60e623-ad02-4a1b-a1ac-406db978ee48"); + +/// Proposal Comment Template document type. +pub const PROPOSAL_COMMENT_FORM_TEMPLATE: DocType = + doc_type_init!("0b8424d4-ebfd-46e3-9577-1775a69d290c"); + +/// Proposal Template document type. +pub const PROPOSAL_FORM_TEMPLATE: DocType = doc_type_init!("0ce8ab38-9258-4fbc-a62e-7faa6e58318f"); diff --git a/rust/signed_doc/src/lib.rs b/rust/signed_doc/src/lib.rs index c5d5779733a..0c645611c1a 100644 --- a/rust/signed_doc/src/lib.rs +++ b/rust/signed_doc/src/lib.rs @@ -2,6 +2,7 @@ mod builder; mod content; +pub mod decode_context; pub mod doc_types; mod metadata; pub mod providers; @@ -14,38 +15,37 @@ use std::{ sync::Arc, }; -use anyhow::Context; pub use builder::Builder; pub use catalyst_types::{ problem_report::ProblemReport, uuid::{Uuid, UuidV4, UuidV7}, }; +use cbork_utils::{array::Array, decode_context::DecodeCtx, with_cbor_bytes::WithCborBytes}; pub use content::Content; -use coset::{CborSerializable, Header, TaggedCborSerializable}; -pub use metadata::{ContentEncoding, ContentType, DocumentRef, ExtraFields, Metadata, Section}; +use decode_context::{CompatibilityPolicy, DecodeContext}; +pub use metadata::{ + ContentEncoding, ContentType, DocLocator, DocType, DocumentRef, DocumentRefs, Metadata, Section, +}; use minicbor::{decode, encode, Decode, Decoder, Encode}; pub use signature::{CatalystId, Signatures}; -/// A problem report content string -const PROBLEM_REPORT_CTX: &str = "Catalyst Signed Document"; +use crate::{builder::SignaturesBuilder, metadata::SupportedLabel}; + +/// `COSE_Sign` object CBOR tag +const COSE_SIGN_CBOR_TAG: minicbor::data::Tag = minicbor::data::Tag::new(98); /// Inner type that holds the Catalyst Signed Document with parsing errors. #[derive(Debug)] struct InnerCatalystSignedDocument { /// Document Metadata - metadata: Metadata, + metadata: WithCborBytes, /// Document Content - content: Content, + content: WithCborBytes, /// Signatures signatures: Signatures, /// A comprehensive problem report, which could include a decoding errors along with /// the other validation errors report: ProblemReport, - - /// raw CBOR bytes of the `CatalystSignedDocument` object. - /// It is important to keep them to have a consistency what comes from the decoding - /// process, so we would return the same data again - raw_bytes: Option>, } /// Keep all the contents private. @@ -60,13 +60,13 @@ pub struct CatalystSignedDocument { impl Display for CatalystSignedDocument { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { - writeln!(f, "{}", self.inner.metadata)?; + self.inner.metadata.fmt(f)?; writeln!(f, "Payload Size: {} bytes", self.inner.content.size())?; writeln!(f, "Signature Information")?; if self.inner.signatures.is_empty() { writeln!(f, " This document is unsigned.")?; } else { - for kid in &self.inner.signatures.kids() { + for kid in &self.kids() { writeln!(f, " Signature Key ID: {kid}")?; } } @@ -85,11 +85,11 @@ impl From for CatalystSignedDocument { impl CatalystSignedDocument { // A bunch of getters to access the contents, or reason through the document, such as. - /// Return Document Type `UUIDv4`. + /// Return Document Type `DocType` - List of `UUIDv4`. /// /// # Errors /// - Missing 'type' field. - pub fn doc_type(&self) -> anyhow::Result { + pub fn doc_type(&self) -> anyhow::Result<&DocType> { self.inner.metadata.doc_type() } @@ -109,12 +109,30 @@ impl CatalystSignedDocument { self.inner.metadata.doc_ver() } - /// Return document `Content`. + /// Return document content object. #[must_use] - pub fn doc_content(&self) -> &Content { + pub(crate) fn content(&self) -> &WithCborBytes { &self.inner.content } + /// Return document decoded (original/non compressed) content bytes. + /// + /// # Errors + /// - Decompression failure + pub fn decoded_content(&self) -> anyhow::Result> { + if let Some(encoding) = self.doc_content_encoding() { + encoding.decode(self.encoded_content()) + } else { + Ok(self.encoded_content().to_vec()) + } + } + + /// Return document encoded (compressed) content bytes. + #[must_use] + pub fn encoded_content(&self) -> &[u8] { + self.content().bytes() + } + /// Return document `ContentType`. /// /// # Errors @@ -130,9 +148,10 @@ impl CatalystSignedDocument { } /// Return document metadata content. + // TODO: remove this and provide getters from metadata like the rest of its fields have. #[must_use] - pub fn doc_meta(&self) -> &ExtraFields { - self.inner.metadata.extra() + pub fn doc_meta(&self) -> &WithCborBytes { + &self.inner.metadata } /// Return a Document's signatures @@ -144,13 +163,53 @@ impl CatalystSignedDocument { /// Return a list of Document's Catalyst IDs. #[must_use] pub fn kids(&self) -> Vec { - self.inner.signatures.kids() + self.inner + .signatures + .iter() + .map(|s| s.kid().clone()) + .collect() } /// Return a list of Document's author IDs (short form of Catalyst IDs). #[must_use] pub fn authors(&self) -> Vec { - self.inner.signatures.authors() + self.inner + .signatures + .iter() + .map(|s| s.kid().as_short_id()) + .collect() + } + + /// Checks if the CBOR body of the signed doc is in the older version format before + /// v0.04. + /// + /// # Errors + /// + /// Errors from CBOR decoding. + pub fn is_deprecated(&self) -> anyhow::Result { + let mut e = minicbor::Encoder::new(Vec::new()); + + let e = e.encode(self.inner.metadata.clone())?; + let e = e.to_owned().into_writer(); + + for entry in cbork_utils::map::Map::decode( + &mut minicbor::Decoder::new(e.as_slice()), + &mut cbork_utils::decode_context::DecodeCtx::non_deterministic(), + )? { + match minicbor::Decoder::new(&entry.key_bytes).decode::()? { + SupportedLabel::Template + | SupportedLabel::Ref + | SupportedLabel::Reply + | SupportedLabel::Parameters => { + if DocumentRefs::is_deprecated_cbor(&entry.value)? { + return Ok(true); + } + }, + _ => {}, + } + } + + Ok(false) } /// Returns a collected problem report for the document. @@ -170,105 +229,108 @@ impl CatalystSignedDocument { &self.inner.report } - /// Convert Catalyst Signed Document into `coset::CoseSign` - /// - /// # Errors - /// Could fails if the `CatalystSignedDocument` object is not valid. - pub(crate) fn as_cose_sign(&self) -> anyhow::Result { - self.inner.as_cose_sign() - } - /// Returns a signed document `Builder` pre-loaded with the current signed document's /// data. - #[must_use] - pub fn into_builder(&self) -> Builder { - self.into() - } -} - -impl InnerCatalystSignedDocument { - /// Convert Catalyst Signed Document into `coset::CoseSign` /// /// # Errors - /// Could fails if the `CatalystSignedDocument` object is not valid. - fn as_cose_sign(&self) -> anyhow::Result { - if let Some(raw_bytes) = self.raw_bytes.clone() { - let cose_sign = coset::CoseSign::from_tagged_slice(raw_bytes.as_slice()) - .or_else(|_| coset::CoseSign::from_slice(raw_bytes.as_slice())) - .map_err(|e| { - minicbor::decode::Error::message(format!("Invalid COSE Sign document: {e}")) - })?; - Ok(cose_sign) - } else { - let protected_header = - Header::try_from(&self.metadata).context("Failed to encode Document Metadata")?; - - let content = self - .content - .encoded_bytes(self.metadata.content_encoding())?; - - let mut builder = coset::CoseSignBuilder::new() - .protected(protected_header) - .payload(content); - - for signature in self.signatures.cose_signatures() { - builder = builder.add_signature(signature); - } - Ok(builder.build()) - } + /// - If error returned its probably a bug. `CatalystSignedDocument` must be a valid + /// COSE structure. + pub fn into_builder(&self) -> anyhow::Result { + self.try_into() } } -impl Decode<'_, ()> for CatalystSignedDocument { - fn decode(d: &mut Decoder<'_>, _ctx: &mut ()) -> Result { - let start = d.position(); - d.skip()?; - let end = d.position(); - let cose_bytes = d - .input() - .get(start..end) - .ok_or(minicbor::decode::Error::end_of_input())?; - - let cose_sign = coset::CoseSign::from_tagged_slice(cose_bytes) - .or_else(|_| coset::CoseSign::from_slice(cose_bytes)) - .map_err(|e| { - minicbor::decode::Error::message(format!("Invalid COSE Sign document: {e}")) - })?; - - let report = ProblemReport::new(PROBLEM_REPORT_CTX); - let metadata = Metadata::from_protected_header(&cose_sign.protected, &report); - let signatures = Signatures::from_cose_sig_list(&cose_sign.signatures, &report); - - let content = if let Some(payload) = cose_sign.payload { - Content::from_encoded(payload, metadata.content_encoding(), &report) +impl Decode<'_, CompatibilityPolicy> for CatalystSignedDocument { + fn decode(d: &mut Decoder<'_>, ctx: &mut CompatibilityPolicy) -> Result { + let mut ctx = DecodeContext::new( + *ctx, + ProblemReport::new("Catalyst Signed Document Decoding"), + ); + + let p = d.position(); + if let Ok(tag) = d.tag() { + if tag != COSE_SIGN_CBOR_TAG { + return Err(minicbor::decode::Error::message(format!( + "Must be equal to the COSE_Sign tag value: {COSE_SIGN_CBOR_TAG}" + ))); + } } else { - report.missing_field("COSE Sign Payload", "Missing document content (payload)"); - Content::default() + d.set_position(p); + } + + let arr = Array::decode(d, &mut DecodeCtx::Deterministic)?; + + let signed_doc = match arr.as_slice() { + [metadata_bytes, headers_bytes, content_bytes, signatures_bytes] => { + let metadata_bytes = minicbor::Decoder::new(metadata_bytes).bytes()?; + let metadata = WithCborBytes::::decode( + &mut minicbor::Decoder::new(metadata_bytes), + &mut ctx, + )?; + + // empty unprotected headers + let mut map = cbork_utils::map::Map::decode( + &mut minicbor::Decoder::new(headers_bytes.as_slice()), + &mut cbork_utils::decode_context::DecodeCtx::Deterministic, + )? + .into_iter(); + if map.next().is_some() { + ctx.report().unknown_field( + "unprotected headers", + "non empty unprotected headers", + "COSE unprotected headers must be empty", + ); + } + + let content = WithCborBytes::::decode( + &mut minicbor::Decoder::new(content_bytes.as_slice()), + &mut (), + )?; + + let signatures = Signatures::decode( + &mut minicbor::Decoder::new(signatures_bytes.as_slice()), + &mut ctx, + )?; + + InnerCatalystSignedDocument { + metadata, + content, + signatures, + report: ctx.into_report(), + } + }, + _ => { + return Err(minicbor::decode::Error::message( + "Must be a definite size array of 4 elements", + )); + }, }; - Ok(InnerCatalystSignedDocument { - metadata, - content, - signatures, - report, - raw_bytes: Some(cose_bytes.to_vec()), - } - .into()) + Ok(signed_doc.into()) } } -impl Encode<()> for CatalystSignedDocument { +impl Encode for CatalystSignedDocument { fn encode( - &self, e: &mut encode::Encoder, _ctx: &mut (), + &self, e: &mut encode::Encoder, _ctx: &mut C, ) -> Result<(), encode::Error> { - let cose_sign = self.as_cose_sign().map_err(encode::Error::message)?; - let cose_bytes = cose_sign.to_tagged_vec().map_err(|e| { - minicbor::encode::Error::message(format!("Failed to encode COSE Sign document: {e}")) - })?; - - e.writer_mut() - .write_all(&cose_bytes) - .map_err(|_| minicbor::encode::Error::message("Failed to encode to CBOR")) + // COSE_Sign tag + // + e.tag(COSE_SIGN_CBOR_TAG)?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes( + minicbor::to_vec(&self.inner.metadata) + .map_err(minicbor::encode::Error::message)? + .as_slice(), + )?; + // empty unprotected headers + e.map(0)?; + // content + e.encode(&self.inner.content)?; + // signatures + e.encode(&self.inner.signatures)?; + Ok(()) } } @@ -276,7 +338,10 @@ impl TryFrom<&[u8]> for CatalystSignedDocument { type Error = anyhow::Error; fn try_from(value: &[u8]) -> Result { - Ok(minicbor::decode(value)?) + Ok(minicbor::decode_with( + value, + &mut CompatibilityPolicy::Accept, + )?) } } diff --git a/rust/signed_doc/src/metadata/collaborators.rs b/rust/signed_doc/src/metadata/collaborators.rs new file mode 100644 index 00000000000..33dba2c0a8a --- /dev/null +++ b/rust/signed_doc/src/metadata/collaborators.rs @@ -0,0 +1,73 @@ +//! Catalyst Signed Document `collaborators` field type definition. + +use std::{ops::Deref, str::FromStr}; + +use catalyst_types::catalyst_id::CatalystId; +use cbork_utils::{array::Array, decode_context::DecodeCtx}; + +/// 'collaborators' field type definition, which is a JSON path string +#[derive(Clone, Debug, PartialEq)] +pub struct Collaborators(Vec); + +impl Deref for Collaborators { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl minicbor::Encode<()> for Collaborators { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + if !self.0.is_empty() { + e.array( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + for c in &self.0 { + e.bytes(&c.to_string().into_bytes())?; + } + } + Ok(()) + } +} + +impl minicbor::Decode<'_, ()> for Collaborators { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + Array::decode(d, &mut DecodeCtx::Deterministic)? + .iter() + .map(|item| minicbor::Decoder::new(item).bytes()) + .collect::, _>>()? + .into_iter() + .map(CatalystId::try_from) + .collect::>() + .map(Self) + .map_err(minicbor::decode::Error::custom) + } +} + +impl<'de> serde::Deserialize<'de> for Collaborators { + fn deserialize(deserializer: D) -> Result + where D: serde::Deserializer<'de> { + Vec::::deserialize(deserializer)? + .into_iter() + .map(|id| CatalystId::from_str(&id)) + .collect::>() + .map(Self) + .map_err(serde::de::Error::custom) + } +} + +impl serde::Serialize for Collaborators { + fn serialize(&self, serializer: S) -> Result + where S: serde::Serializer { + let iter = self.0.iter().map(ToString::to_string); + serializer.collect_seq(iter) + } +} diff --git a/rust/signed_doc/src/metadata/content_encoding.rs b/rust/signed_doc/src/metadata/content_encoding.rs index d47f696e7ff..96028941cdc 100644 --- a/rust/signed_doc/src/metadata/content_encoding.rs +++ b/rust/signed_doc/src/metadata/content_encoding.rs @@ -5,8 +5,6 @@ use std::{ str::FromStr, }; -use serde::{de, Deserialize, Deserializer}; - /// IANA `CoAP` Content Encoding. #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum ContentEncoding { @@ -64,23 +62,34 @@ impl FromStr for ContentEncoding { } } -impl<'de> Deserialize<'de> for ContentEncoding { +impl<'de> serde::Deserialize<'de> for ContentEncoding { fn deserialize(deserializer: D) -> Result - where D: Deserializer<'de> { + where D: serde::Deserializer<'de> { let s = String::deserialize(deserializer)?; - FromStr::from_str(&s).map_err(de::Error::custom) + FromStr::from_str(&s).map_err(serde::de::Error::custom) } } -impl TryFrom<&coset::cbor::Value> for ContentEncoding { - type Error = anyhow::Error; +impl serde::Serialize for ContentEncoding { + fn serialize(&self, serializer: S) -> Result + where S: serde::Serializer { + self.to_string().serialize(serializer) + } +} - fn try_from(val: &coset::cbor::Value) -> anyhow::Result { - match val.as_text() { - Some(encoding) => encoding.parse(), - None => { - anyhow::bail!("Expected Content Encoding to be a string"); - }, - } +impl minicbor::Encode<()> for ContentEncoding { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.str(self.to_string().as_str())?; + Ok(()) + } +} + +impl minicbor::Decode<'_, ()> for ContentEncoding { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + d.str()?.parse().map_err(minicbor::decode::Error::message) } } diff --git a/rust/signed_doc/src/metadata/content_type.rs b/rust/signed_doc/src/metadata/content_type.rs index b72cb4b9c22..6f30c67e3b4 100644 --- a/rust/signed_doc/src/metadata/content_type.rs +++ b/rust/signed_doc/src/metadata/content_type.rs @@ -5,43 +5,28 @@ use std::{ str::FromStr, }; -use coset::iana::CoapContentFormat; -use serde::{de, Deserialize, Deserializer}; use strum::VariantArray; /// Payload Content Type. #[derive(Debug, Copy, Clone, PartialEq, Eq, VariantArray)] pub enum ContentType { - /// 'application/cbor' + /// `application/cbor` Cbor, - /// 'application/json' + /// `application/cddl` + Cddl, + /// `application/json` Json, -} - -impl ContentType { - /// Validates the provided `content` bytes to be a defined `ContentType`. - pub(crate) fn validate(self, content: &[u8]) -> anyhow::Result<()> { - match self { - Self::Json => { - if let Err(e) = serde_json::from_slice::(content) { - anyhow::bail!("Invalid {self} content: {e}") - } - }, - Self::Cbor => { - if let Err(e) = minicbor::decode::(content) { - anyhow::bail!("Invalid {self} content: {e}") - } - }, - } - Ok(()) - } + /// `application/json+schema` + JsonSchema, } impl Display for ContentType { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match self { Self::Cbor => write!(f, "application/cbor"), + Self::Cddl => write!(f, "application/cddl"), Self::Json => write!(f, "application/json"), + Self::JsonSchema => write!(f, "application/json+schema"), } } } @@ -52,7 +37,9 @@ impl FromStr for ContentType { fn from_str(s: &str) -> Result { match s { "application/cbor" => Ok(Self::Cbor), + "application/cddl" => Ok(Self::Cddl), "application/json" => Ok(Self::Json), + "application/json+schema" => Ok(Self::JsonSchema), _ => { anyhow::bail!( "Unsupported Content Type: {s:?}, Supported only: {:?}", @@ -66,41 +53,51 @@ impl FromStr for ContentType { } } -impl<'de> Deserialize<'de> for ContentType { +impl<'de> serde::Deserialize<'de> for ContentType { fn deserialize(deserializer: D) -> Result - where D: Deserializer<'de> { + where D: serde::Deserializer<'de> { let s = String::deserialize(deserializer)?; - FromStr::from_str(&s).map_err(de::Error::custom) + FromStr::from_str(&s).map_err(serde::de::Error::custom) } } -impl From for CoapContentFormat { - fn from(value: ContentType) -> Self { - match value { - ContentType::Cbor => Self::Cbor, - ContentType::Json => Self::Json, - } +impl serde::Serialize for ContentType { + fn serialize(&self, serializer: S) -> Result + where S: serde::Serializer { + self.to_string().serialize(serializer) } } -impl TryFrom<&coset::ContentType> for ContentType { - type Error = anyhow::Error; +impl minicbor::Encode<()> for ContentType { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + // encode as media types, not in CoAP Content-Formats + e.str(self.to_string().as_str())?; + Ok(()) + } +} - fn try_from(value: &coset::ContentType) -> Result { - let content_type = match value { - coset::ContentType::Assigned(CoapContentFormat::Json) => ContentType::Json, - coset::ContentType::Assigned(CoapContentFormat::Cbor) => ContentType::Cbor, - _ => { - anyhow::bail!( - "Unsupported Content Type {value:?}, Supported only: {:?}", - ContentType::VARIANTS - .iter() - .map(ToString::to_string) - .collect::>() - ) +impl minicbor::Decode<'_, ()> for ContentType { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + let p = d.position(); + match d.int() { + // CoAP Content Format JSON + Ok(val) if val == minicbor::data::Int::from(50_u8) => Ok(Self::Json), + // CoAP Content Format CBOR + Ok(val) if val == minicbor::data::Int::from(60_u8) => Ok(Self::Cbor), + Ok(val) => { + Err(minicbor::decode::Error::message(format!( + "unsupported CoAP Content Formats value: {val}" + ))) }, - }; - Ok(content_type) + Err(_) => { + d.set_position(p); + d.str()?.parse().map_err(minicbor::decode::Error::message) + }, + } } } @@ -108,34 +105,39 @@ impl TryFrom<&coset::ContentType> for ContentType { mod tests { use super::*; - #[test] - fn content_type_validate_test() { - let json_bytes = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - assert!(ContentType::Json.validate(&json_bytes).is_ok()); - assert!(ContentType::Cbor.validate(&json_bytes).is_err()); - - let cbor_bytes = minicbor::to_vec(minicbor::data::Token::Null).unwrap(); - assert!(ContentType::Json.validate(&cbor_bytes).is_err()); - assert!(ContentType::Cbor.validate(&cbor_bytes).is_ok()); - } - #[test] fn content_type_string_test() { assert_eq!( ContentType::from_str("application/cbor").unwrap(), ContentType::Cbor ); + assert_eq!( + ContentType::from_str("application/cddl").unwrap(), + ContentType::Cddl + ); assert_eq!( ContentType::from_str("application/json").unwrap(), ContentType::Json ); + assert_eq!( + ContentType::from_str("application/json+schema").unwrap(), + ContentType::JsonSchema + ); assert_eq!( "application/cbor".parse::().unwrap(), ContentType::Cbor ); + assert_eq!( + "application/cddl".parse::().unwrap(), + ContentType::Cddl + ); assert_eq!( "application/json".parse::().unwrap(), ContentType::Json ); + assert_eq!( + "application/json+schema".parse::().unwrap(), + ContentType::JsonSchema + ); } } diff --git a/rust/signed_doc/src/metadata/doc_type.rs b/rust/signed_doc/src/metadata/doc_type.rs new file mode 100644 index 00000000000..efaaf3c5a16 --- /dev/null +++ b/rust/signed_doc/src/metadata/doc_type.rs @@ -0,0 +1,154 @@ +//! Document Type. + +use std::{ + fmt::{Display, Formatter}, + hash::Hash, + ops::Deref, + str::FromStr, +}; + +use catalyst_types::uuid::{CborContext, Uuid, UuidV4}; +use minicbor::{Decode, Decoder, Encode}; + +/// Document type - `UUIDv4`. +#[derive(Clone, Debug, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)] +pub struct DocType(UuidV4); + +impl Deref for DocType { + type Target = UuidV4; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl DocType { + /// A const alternative impl of `TryFrom` + /// + /// # Errors + /// - `catalyst_types::uuid::InvalidUuidV4` + pub const fn try_from_uuid(uuid: Uuid) -> Result { + match UuidV4::try_from_uuid(uuid) { + Ok(v) => Ok(Self(v)), + Err(err) => Err(err), + } + } +} + +impl From for DocType { + fn from(value: UuidV4) -> Self { + DocType(value) + } +} + +impl TryFrom for DocType { + type Error = catalyst_types::uuid::InvalidUuidV4; + + fn try_from(value: Uuid) -> Result { + UuidV4::try_from(value).map(Into::into) + } +} + +impl FromStr for DocType { + type Err = catalyst_types::uuid::UuidV4ParsingError; + + fn from_str(s: &str) -> Result { + s.parse::().map(Self) + } +} + +impl TryFrom for DocType { + type Error = catalyst_types::uuid::UuidV4ParsingError; + + fn try_from(s: String) -> Result { + s.parse() + } +} + +impl Display for DocType { + fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { + write!(f, "{}", self.0) + } +} + +impl Decode<'_, ()> for DocType { + fn decode(d: &mut Decoder, _ctx: &mut ()) -> Result { + UuidV4::decode(d, &mut CborContext::Tagged) + .map_err(|e| { + minicbor::decode::Error::message(format!( + "DocType decoding Cannot decode single UUIDv4: {e}" + )) + }) + .map(Self) + } +} + +impl Encode for DocType { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut C, + ) -> Result<(), minicbor::encode::Error> { + self.0.encode(e, &mut CborContext::Tagged) + } +} + +#[cfg(test)] +mod tests { + use catalyst_types::uuid::UuidV7; + use minicbor::Encoder; + use test_case::test_case; + + use super::*; + + #[test_case( + { + Encoder::new(Vec::new()) + } ; + "Invalid empty CBOR bytes" + )] + #[test_case( + { + let mut e = Encoder::new(Vec::new()); + e.encode_with(UuidV4::new(), &mut CborContext::Untagged).unwrap(); + e + } ; + "Invalid untagged uuid v4" + )] + #[test_case( + { + let mut e = Encoder::new(Vec::new()); + e.encode_with(UuidV7::new(), &mut CborContext::Tagged).unwrap(); + e + } ; + "Invalid tagged uuid v7" + )] + fn test_invalid_cbor_decode(e: Encoder>) { + assert!(DocType::decode(&mut Decoder::new(e.into_writer().as_slice()), &mut ()).is_err()); + } + + #[test_case( + |uuid: UuidV4| { + let mut e = Encoder::new(Vec::new()); + e.encode_with(uuid, &mut CborContext::Tagged).unwrap(); + e + } ; + "Valid uuid v4" + )] + fn test_valid_cbor_decode(e_gen: impl FnOnce(UuidV4) -> Encoder>) { + let uuid = UuidV4::new(); + let e = e_gen(uuid); + + let doc_type = + DocType::decode(&mut Decoder::new(e.into_writer().as_slice()), &mut ()).unwrap(); + assert_eq!(doc_type.0, uuid); + } + + #[test_case( + serde_json::json!(UuidV4::new()) ; + "Document type old format" + )] + fn test_json_valid_serde(json: serde_json::Value) { + let refs: DocType = serde_json::from_value(json).unwrap(); + let json_from_refs = serde_json::to_value(&refs).unwrap(); + assert_eq!(refs, serde_json::from_value(json_from_refs).unwrap()); + } +} diff --git a/rust/signed_doc/src/metadata/document_ref.rs b/rust/signed_doc/src/metadata/document_ref.rs deleted file mode 100644 index 00e0bba2416..00000000000 --- a/rust/signed_doc/src/metadata/document_ref.rs +++ /dev/null @@ -1,55 +0,0 @@ -//! Catalyst Signed Document Metadata. - -use std::fmt::Display; - -use coset::cbor::Value; - -use super::{utils::CborUuidV7, UuidV7}; - -/// Reference to a Document. -#[derive(Copy, Clone, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct DocumentRef { - /// Reference to the Document Id - pub id: UuidV7, - /// Reference to the Document Ver - pub ver: UuidV7, -} - -impl Display for DocumentRef { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "id: {}, ver: {}", self.id, self.ver) - } -} - -impl TryFrom for Value { - type Error = anyhow::Error; - - fn try_from(value: DocumentRef) -> Result { - Ok(Value::Array(vec![ - Value::try_from(CborUuidV7(value.id))?, - Value::try_from(CborUuidV7(value.ver))?, - ])) - } -} - -impl TryFrom<&Value> for DocumentRef { - type Error = anyhow::Error; - - #[allow(clippy::indexing_slicing)] - fn try_from(val: &Value) -> anyhow::Result { - let Some(array) = val.as_array() else { - anyhow::bail!("Document Reference must be either a single UUID or an array of two"); - }; - anyhow::ensure!( - array.len() == 2, - "Document Reference array of two UUIDs was expected" - ); - let CborUuidV7(id) = CborUuidV7::try_from(&array[0])?; - let CborUuidV7(ver) = CborUuidV7::try_from(&array[1])?; - anyhow::ensure!( - ver >= id, - "Document Reference Version can never be smaller than its ID" - ); - Ok(DocumentRef { id, ver }) - } -} diff --git a/rust/signed_doc/src/metadata/document_refs/doc_locator.rs b/rust/signed_doc/src/metadata/document_refs/doc_locator.rs new file mode 100644 index 00000000000..1375234db70 --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/doc_locator.rs @@ -0,0 +1,138 @@ +//! Document Locator, where a document can be located. +//! A [CBOR Encoded IPLD Content Identifier](https://github.com/ipld/cid-cbor/) +//! or also known as [IPFS CID](https://docs.ipfs.tech/concepts/content-addressing/#what-is-a-cid). + +use std::fmt::Display; + +use cbork_utils::{decode_context::DecodeCtx, map::Map}; +use minicbor::{Decode, Decoder, Encode}; + +/// CBOR tag of IPLD content identifiers (CIDs). +const CID_TAG: u64 = 42; + +/// CID map key. +const CID_MAP_KEY: &str = "cid"; + +/// Document locator number of map item. +const DOC_LOC_MAP_ITEM: u64 = 1; + +/// Document locator, no size limit. +#[derive(Clone, Debug, Default, PartialEq, Hash, Eq)] +pub struct DocLocator(Vec); + +impl DocLocator { + #[must_use] + /// Length of the document locator. + pub fn len(&self) -> usize { + self.0.len() + } + + #[must_use] + /// Is the document locator empty. + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +impl From> for DocLocator { + fn from(value: Vec) -> Self { + DocLocator(value) + } +} + +impl Display for DocLocator { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "0x{}", hex::encode(self.0.as_slice())) + } +} + +// document_locator = { "cid" => cid } +impl Decode<'_, ()> for DocLocator { + fn decode(d: &mut Decoder, _ctx: &mut ()) -> Result { + const CONTEXT: &str = "DocLocator decoding"; + + let entries = Map::decode(d, &mut DecodeCtx::Deterministic)?; + + match entries.as_slice() { + [entry] => { + let key = minicbor::Decoder::new(&entry.key_bytes) + .str() + .map_err(|e| e.with_message(format!("{CONTEXT}: expected string")))?; + + if key != "cid" { + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected key 'cid', found '{key}'" + ))); + } + + let mut value_decoder = minicbor::Decoder::new(&entry.value); + + let tag = value_decoder + .tag() + .map_err(|e| e.with_message(format!("{CONTEXT}: expected tag")))?; + + if tag.as_u64() != CID_TAG { + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected tag {CID_TAG}, found {tag}", + ))); + } + + // No length limit + let cid_bytes = value_decoder + .bytes() + .map_err(|e| e.with_message(format!("{CONTEXT}: expected bytes")))?; + + Ok(DocLocator(cid_bytes.to_vec())) + }, + _ => { + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected map length {DOC_LOC_MAP_ITEM}, found {}", + entries.len() + ))) + }, + } + } +} + +impl Encode<()> for DocLocator { + fn encode( + &self, e: &mut minicbor::Encoder, (): &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.map(DOC_LOC_MAP_ITEM)?; + e.str(CID_MAP_KEY)?; + e.tag(minicbor::data::Tag::new(CID_TAG))?; + e.bytes(&self.0)?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + + use minicbor::{Decoder, Encoder}; + + use super::*; + + #[test] + fn test_doc_locator_encode_decode() { + let locator = DocLocator(vec![1, 2, 3, 4]); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + locator.encode(&mut encoder, &mut ()).unwrap(); + let mut decoder = Decoder::new(&buffer); + let decoded_doc_loc = DocLocator::decode(&mut decoder, &mut ()).unwrap(); + assert_eq!(locator, decoded_doc_loc); + } + + // Empty doc locator should not fail + #[test] + fn test_doc_locator_encode_decode_empty() { + let locator = DocLocator(vec![]); + let mut buffer = Vec::new(); + let mut encoder = Encoder::new(&mut buffer); + locator.encode(&mut encoder, &mut ()).unwrap(); + let mut decoder = Decoder::new(&buffer); + let decoded_doc_loc = DocLocator::decode(&mut decoder, &mut ()).unwrap(); + assert_eq!(locator, decoded_doc_loc); + } +} diff --git a/rust/signed_doc/src/metadata/document_refs/doc_ref.rs b/rust/signed_doc/src/metadata/document_refs/doc_ref.rs new file mode 100644 index 00000000000..efa36f699e2 --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/doc_ref.rs @@ -0,0 +1,120 @@ +//! Document reference. + +use std::fmt::Display; + +use catalyst_types::uuid::{CborContext, UuidV7}; +use cbork_utils::{array::Array, decode_context::DecodeCtx}; +use minicbor::{Decode, Encode}; + +use super::doc_locator::DocLocator; + +/// Number of item that should be in each document reference instance. +const DOC_REF_ARR_ITEM: u64 = 3; + +/// Reference to a Document. +#[derive(Clone, Debug, PartialEq, Hash, Eq)] +pub struct DocumentRef { + /// Reference to the Document Id + id: UuidV7, + /// Reference to the Document Ver + ver: UuidV7, + /// Document locator + doc_locator: DocLocator, +} + +impl DocumentRef { + /// Create a new instance of document reference. + #[must_use] + pub fn new(id: UuidV7, ver: UuidV7, doc_locator: DocLocator) -> Self { + Self { + id, + ver, + doc_locator, + } + } + + /// Get Document Id. + #[must_use] + pub fn id(&self) -> &UuidV7 { + &self.id + } + + /// Get Document Ver. + #[must_use] + pub fn ver(&self) -> &UuidV7 { + &self.ver + } + + /// Get Document Locator. + #[must_use] + pub fn doc_locator(&self) -> &DocLocator { + &self.doc_locator + } +} + +impl Display for DocumentRef { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "id: {}, ver: {}, document_locator: {}", + self.id, self.ver, self.doc_locator + ) + } +} + +impl Decode<'_, ()> for DocumentRef { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + const CONTEXT: &str = "DocumentRef decoding"; + + let arr = Array::decode(d, &mut DecodeCtx::Deterministic) + .map_err(|e| minicbor::decode::Error::message(format!("{CONTEXT}: {e}")))?; + + let doc_ref = match arr.as_slice() { + [id_bytes, ver_bytes, locator_bytes] => { + let id = UuidV7::decode( + &mut minicbor::Decoder::new(id_bytes.as_slice()), + &mut CborContext::Tagged, + ) + .map_err(|e| e.with_message("Invalid ID UUIDv7"))?; + + let ver = UuidV7::decode( + &mut minicbor::Decoder::new(ver_bytes.as_slice()), + &mut CborContext::Tagged, + ) + .map_err(|e| e.with_message("Invalid Ver UUIDv7"))?; + + let doc_locator = minicbor::Decoder::new(locator_bytes.as_slice()) + .decode() + .map_err(|e| e.with_message("Failed to decode locator"))?; + + DocumentRef { + id, + ver, + doc_locator, + } + }, + _ => { + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: expected {DOC_REF_ARR_ITEM} items, found {}", + arr.len() + ))); + }, + }; + + Ok(doc_ref) + } +} + +impl Encode<()> for DocumentRef { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array(DOC_REF_ARR_ITEM)?; + self.id.encode(e, &mut CborContext::Tagged)?; + self.ver.encode(e, &mut CborContext::Tagged)?; + self.doc_locator.encode(e, ctx)?; + Ok(()) + } +} diff --git a/rust/signed_doc/src/metadata/document_refs/mod.rs b/rust/signed_doc/src/metadata/document_refs/mod.rs new file mode 100644 index 00000000000..0bd1ded0b9f --- /dev/null +++ b/rust/signed_doc/src/metadata/document_refs/mod.rs @@ -0,0 +1,460 @@ +//! Document references. + +mod doc_locator; +mod doc_ref; +use std::fmt::Display; + +use catalyst_types::uuid::{CborContext, UuidV7}; +use cbork_utils::{array::Array, decode_context::DecodeCtx}; +pub use doc_locator::DocLocator; +pub use doc_ref::DocumentRef; +use minicbor::{Decode, Encode}; +use tracing::warn; + +use crate::CompatibilityPolicy; + +/// List of document reference instance. +#[derive(Clone, Debug, PartialEq, Hash, Eq)] +pub struct DocumentRefs(Vec); + +impl DocumentRefs { + /// Returns true if provided `cbor` bytes is a valid old format. + /// ```cddl + /// old_format = [id, ver] + /// ``` + /// Returns false if provided `cbor` bytes is a valid new format. + /// ```cddl + /// new_format = [ +[id, ver, cid] ] + /// ``` + pub(crate) fn is_deprecated_cbor(cbor: &[u8]) -> Result { + let mut d = minicbor::Decoder::new(cbor); + d.array()?; + match d.datatype()? { + // new_format = [ +[id, ver, cid] ] + minicbor::data::Type::Array => Ok(false), + // old_format = [id, ver] + minicbor::data::Type::Tag => Ok(true), + ty => Err(minicbor::decode::Error::type_mismatch(ty)), + } + } +} + +/// Document reference error. +#[derive(Debug, Clone, thiserror::Error)] +pub enum DocRefError { + /// Invalid string conversion + #[error("Invalid string conversion: {0}")] + StringConversion(String), + /// Cannot decode hex. + #[error("Cannot decode hex: {0}")] + HexDecode(String), +} + +impl DocumentRefs { + /// Get a list of document reference instance. + #[must_use] + pub fn doc_refs(&self) -> &Vec { + &self.0 + } +} + +impl From> for DocumentRefs { + fn from(value: Vec) -> Self { + DocumentRefs(value) + } +} + +impl Display for DocumentRefs { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let items = self + .0 + .iter() + .map(|inner| format!("{inner}")) + .collect::>() + .join(", "); + write!(f, "[{items}]") + } +} + +impl Decode<'_, CompatibilityPolicy> for DocumentRefs { + fn decode( + d: &mut minicbor::Decoder<'_>, policy: &mut CompatibilityPolicy, + ) -> Result { + const CONTEXT: &str = "DocumentRefs decoding"; + + // Old: [id, ver] + // New: [ 1* [id, ver, locator] ] + let outer_arr = Array::decode(d, &mut DecodeCtx::Deterministic) + .map_err(|e| minicbor::decode::Error::message(format!("{CONTEXT}: {e}")))?; + + match outer_arr.as_slice() { + [first, rest @ ..] => { + match minicbor::Decoder::new(first).datatype()? { + // New structure inner part [id, ver, locator] + minicbor::data::Type::Array => { + let mut arr = vec![first]; + arr.extend(rest); + + let doc_refs = arr + .iter() + .map(|bytes| minicbor::Decoder::new(bytes).decode()) + .collect::>()?; + + Ok(DocumentRefs(doc_refs)) + }, + // Old structure (id, ver) + minicbor::data::Type::Tag => { + match policy { + CompatibilityPolicy::Accept | CompatibilityPolicy::Warn => { + if matches!(policy, CompatibilityPolicy::Warn) { + warn!("{CONTEXT}: Conversion of document reference, id and version, to list of document reference with doc locator"); + } + if rest.len() != 1 { + return Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: Must have exactly 2 elements inside array for document reference id and document reference version, found {}", + rest.len().overflowing_add(1).0 + ))); + } + + let id = UuidV7::decode(&mut minicbor::Decoder::new(first), &mut CborContext::Tagged).map_err(|e| { + e.with_message("Invalid ID UUIDv7") + })?; + let ver = rest + .first() + .map(|ver| UuidV7::decode(&mut minicbor::Decoder::new(ver), &mut CborContext::Tagged).map_err(|e| { + e.with_message("Invalid Ver UUIDv7") + })) + .transpose()? + .ok_or_else(|| minicbor::decode::Error::message(format!("{CONTEXT}: Missing document reference version after document reference id")))?; + + Ok(DocumentRefs(vec![DocumentRef::new( + id, + ver, + // If old implementation is used, the locator will be empty + DocLocator::default(), + )])) + }, + CompatibilityPolicy::Fail => { + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: Conversion of document reference id and version to list of document reference with doc locator is not allowed" + ))) + }, + } + }, + other => { + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: Expected array of document reference, or tag of version and id, found {other}", + ))) + }, + } + }, + _ => { + Err(minicbor::decode::Error::message(format!( + "{CONTEXT}: Empty array", + ))) + }, + } + } +} + +impl Encode<()> for DocumentRefs { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + const CONTEXT: &str = "DocumentRefs encoding"; + if self.0.is_empty() { + return Err(minicbor::encode::Error::message(format!( + "{CONTEXT}: DocumentRefs cannot be empty" + ))); + } + e.array( + self.0 + .len() + .try_into() + .map_err(|e| minicbor::encode::Error::message(format!("{CONTEXT}, {e}")))?, + )?; + + for doc_ref in &self.0 { + doc_ref.encode(e, ctx)?; + } + Ok(()) + } +} + +mod serde_impl { + //! `serde::Deserialize` and `serde::Serialize` trait implementations + + use std::str::FromStr; + + use super::{DocLocator, DocRefError, DocumentRef, DocumentRefs, UuidV7}; + + /// Old structure deserialize as map {id, ver} + #[derive(serde::Deserialize)] + struct OldRef { + /// "id": "uuidv7 + id: String, + /// "ver": "uuidv7" + ver: String, + } + + /// New structure as deserialize as map {id, ver, cid} + #[derive(serde::Deserialize, serde::Serialize)] + struct NewRef { + /// "id": "uuidv7" + id: String, + /// "ver": "uuidv7" + ver: String, + /// "cid": "0x..." + cid: String, + } + + #[derive(serde::Deserialize)] + #[serde(untagged)] + enum DocRefSerde { + /// Old structure of document reference. + Old(OldRef), + /// New structure of document reference. + New(Vec), + } + + impl serde::Serialize for DocumentRefs { + fn serialize(&self, serializer: S) -> Result + where S: serde::Serializer { + let iter = self.0.iter().map(|v| { + NewRef { + id: v.id().to_string(), + ver: v.ver().to_string(), + cid: v.doc_locator().to_string(), + } + }); + serializer.collect_seq(iter) + } + } + + impl<'de> serde::Deserialize<'de> for DocumentRefs { + fn deserialize(deserializer: D) -> Result + where D: serde::Deserializer<'de> { + let input = DocRefSerde::deserialize(deserializer)?; + match input { + DocRefSerde::Old(v) => { + let id = UuidV7::from_str(&v.id).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.id.clone())) + })?; + let ver = UuidV7::from_str(&v.ver).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.ver.clone())) + })?; + + Ok(DocumentRefs(vec![DocumentRef::new( + id, + ver, + DocLocator::default(), + )])) + }, + DocRefSerde::New(value) => { + let mut dr = vec![]; + for v in value { + let id = UuidV7::from_str(&v.id).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.id.clone())) + })?; + let ver = UuidV7::from_str(&v.ver).map_err(|_| { + serde::de::Error::custom(DocRefError::StringConversion(v.ver.clone())) + })?; + let cid = &v.cid.strip_prefix("0x").unwrap_or(&v.cid); + let locator = hex::decode(cid).map_err(|_| { + serde::de::Error::custom(DocRefError::HexDecode(v.cid.clone())) + })?; + dr.push(DocumentRef::new(id, ver, locator.into())); + } + Ok(DocumentRefs(dr)) + }, + } + } + } +} + +#[cfg(test)] +mod tests { + + use minicbor::{Decoder, Encoder}; + use test_case::test_case; + + use super::*; + + #[test_case( + CompatibilityPolicy::Accept, + { + Encoder::new(Vec::new()) + } ; + "Invalid empty CBOR bytes" + )] + #[test_case( + CompatibilityPolicy::Accept, + { + let mut e = Encoder::new(Vec::new()); + e.array(0).unwrap(); + e + } ; + "Invalid empty CBOR array" + )] + #[test_case( + CompatibilityPolicy::Fail, + { + let mut e = Encoder::new(Vec::new()); + e.array(2) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Tagged) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Tagged) + .unwrap(); + e + } ; + "Valid array of two uuid v7 (old format), fail policy" + )] + #[test_case( + CompatibilityPolicy::Accept, + { + let mut e = Encoder::new(Vec::new()); + e.array(2) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Untagged) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Untagged) + .unwrap(); + e + } ; + "Invalid untagged uuids v7 (old format)" + )] + #[test_case( + CompatibilityPolicy::Accept, + { + let mut e = Encoder::new(Vec::new()); + e.array(1) + .unwrap() + .array(3) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Untagged) + .unwrap() + .encode_with(UuidV7::new(), &mut CborContext::Untagged) + .unwrap() + .encode(DocLocator::default()) + .unwrap(); + e + } ; + "Invalid untagged uuid uuids v7 (new format)" + )] + fn test_invalid_cbor_decode(mut policy: CompatibilityPolicy, e: Encoder>) { + assert!( + DocumentRefs::decode(&mut Decoder::new(e.into_writer().as_slice()), &mut policy) + .is_err() + ); + } + + #[test_case( + CompatibilityPolicy::Accept, + |uuid: UuidV7, _: DocLocator| { + let mut e = Encoder::new(Vec::new()); + e.array(2) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap(); + e + } ; + "Valid single doc ref (old format)" + )] + #[test_case( + CompatibilityPolicy::Warn, + |uuid: UuidV7, _: DocLocator| { + let mut e = Encoder::new(Vec::new()); + e.array(2) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap(); + e + } ; + "Valid single doc ref (old format), warn policy" + )] + #[test_case( + CompatibilityPolicy::Accept, + |uuid: UuidV7, doc_loc: DocLocator| { + let mut e = Encoder::new(Vec::new()); + e.array(1) + .unwrap() + .array(3) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode(doc_loc) + .unwrap(); + e + } ; + "Array of new doc ref (new format)" + )] + #[test_case( + CompatibilityPolicy::Fail, + |uuid: UuidV7, doc_loc: DocLocator| { + let mut e = Encoder::new(Vec::new()); + e.array(1) + .unwrap() + .array(3) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode_with(uuid, &mut CborContext::Tagged) + .unwrap() + .encode(doc_loc) + .unwrap(); + e + } ; + "Array of new doc ref (new format), fail policy" + )] + fn test_valid_cbor_decode( + mut policy: CompatibilityPolicy, e_gen: impl FnOnce(UuidV7, DocLocator) -> Encoder>, + ) { + let uuid = UuidV7::new(); + let doc_loc = DocLocator::default(); + let e = e_gen(uuid, doc_loc.clone()); + + let doc_refs = + DocumentRefs::decode(&mut Decoder::new(e.into_writer().as_slice()), &mut policy) + .unwrap(); + assert_eq!(doc_refs.0, vec![DocumentRef::new(uuid, uuid, doc_loc)]); + } + + #[test_case( + serde_json::json!( + { + "id": UuidV7::new(), + "ver": UuidV7::new(), + } + ) ; + "Document reference type old format" + )] + #[test_case( + serde_json::json!( + [ + { + "id": UuidV7::new(), + "ver": UuidV7::new(), + "cid": format!("0x{}", hex::encode([1, 2, 3])) + }, + { + "id": UuidV7::new(), + "ver": UuidV7::new(), + "cid": format!("0x{}", hex::encode([1, 2, 3])) + } + ] + ) ; + "Document reference type new format" + )] + fn test_json_valid_serde(json: serde_json::Value) { + let refs: DocumentRefs = serde_json::from_value(json).unwrap(); + let json_from_refs = serde_json::to_value(&refs).unwrap(); + assert_eq!(refs, serde_json::from_value(json_from_refs).unwrap()); + } +} diff --git a/rust/signed_doc/src/metadata/extra_fields.rs b/rust/signed_doc/src/metadata/extra_fields.rs deleted file mode 100644 index d002b422102..00000000000 --- a/rust/signed_doc/src/metadata/extra_fields.rs +++ /dev/null @@ -1,239 +0,0 @@ -//! Catalyst Signed Document Extra Fields. - -use catalyst_types::problem_report::ProblemReport; -use coset::{cbor::Value, Label, ProtectedHeader}; - -use super::{ - cose_protected_header_find, utils::decode_document_field_from_protected_header, DocumentRef, - Section, -}; - -/// `ref` field COSE key value -const REF_KEY: &str = "ref"; -/// `template` field COSE key value -const TEMPLATE_KEY: &str = "template"; -/// `reply` field COSE key value -const REPLY_KEY: &str = "reply"; -/// `section` field COSE key value -const SECTION_KEY: &str = "section"; -/// `collabs` field COSE key value -const COLLABS_KEY: &str = "collabs"; -/// `parameters` field COSE key value -const PARAMETERS_KEY: &str = "parameters"; -/// `brand_id` field COSE key value (alias of the `parameters` field) -const BRAND_ID_KEY: &str = "brand_id"; -/// `campaign_id` field COSE key value (alias of the `parameters` field) -const CAMPAIGN_ID_KEY: &str = "campaign_id"; -/// `category_id` field COSE key value (alias of the `parameters` field) -const CATEGORY_ID_KEY: &str = "category_id"; - -/// Extra Metadata Fields. -/// -/// These values are extracted from the COSE Sign protected header labels. -#[derive(Clone, Default, Debug, PartialEq, serde::Serialize, serde::Deserialize)] -pub struct ExtraFields { - /// Reference to the latest document. - #[serde(rename = "ref", skip_serializing_if = "Option::is_none")] - doc_ref: Option, - /// Reference to the document template. - #[serde(skip_serializing_if = "Option::is_none")] - template: Option, - /// Reference to the document reply. - #[serde(skip_serializing_if = "Option::is_none")] - reply: Option, - /// Reference to the document section. - #[serde(skip_serializing_if = "Option::is_none")] - section: Option
, - /// Reference to the document collaborators. Collaborator type is TBD. - #[serde(default = "Vec::new", skip_serializing_if = "Vec::is_empty")] - collabs: Vec, - /// Reference to the parameters document. - #[serde(skip_serializing_if = "Option::is_none")] - parameters: Option, -} - -impl ExtraFields { - /// Return `ref` field. - #[must_use] - pub fn doc_ref(&self) -> Option { - self.doc_ref - } - - /// Return `template` field. - #[must_use] - pub fn template(&self) -> Option { - self.template - } - - /// Return `reply` field. - #[must_use] - pub fn reply(&self) -> Option { - self.reply - } - - /// Return `section` field. - #[must_use] - pub fn section(&self) -> Option<&Section> { - self.section.as_ref() - } - - /// Return `collabs` field. - #[must_use] - pub fn collabs(&self) -> &Vec { - &self.collabs - } - - /// Return `parameters` field. - #[must_use] - pub fn parameters(&self) -> Option { - self.parameters - } - - /// Fill the COSE header `ExtraFields` data into the header builder. - pub(super) fn fill_cose_header_fields( - &self, mut builder: coset::HeaderBuilder, - ) -> anyhow::Result { - if let Some(doc_ref) = &self.doc_ref { - builder = builder.text_value(REF_KEY.to_string(), Value::try_from(*doc_ref)?); - } - if let Some(template) = &self.template { - builder = builder.text_value(TEMPLATE_KEY.to_string(), Value::try_from(*template)?); - } - if let Some(reply) = &self.reply { - builder = builder.text_value(REPLY_KEY.to_string(), Value::try_from(*reply)?); - } - - if let Some(section) = &self.section { - builder = builder.text_value(SECTION_KEY.to_string(), Value::from(section.clone())); - } - - if !self.collabs.is_empty() { - builder = builder.text_value( - COLLABS_KEY.to_string(), - Value::Array(self.collabs.iter().cloned().map(Value::Text).collect()), - ); - } - - if let Some(parameters) = &self.parameters { - builder = builder.text_value(PARAMETERS_KEY.to_string(), Value::try_from(*parameters)?); - } - - Ok(builder) - } - - /// Converting COSE Protected Header to `ExtraFields`. - pub(crate) fn from_protected_header( - protected: &ProtectedHeader, error_report: &ProblemReport, - ) -> Self { - /// Context for problem report messages during decoding from COSE protected - /// header. - const COSE_DECODING_CONTEXT: &str = "COSE ProtectedHeader to ExtraFields"; - - let doc_ref = decode_document_field_from_protected_header( - protected, - REF_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let template = decode_document_field_from_protected_header( - protected, - TEMPLATE_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let reply = decode_document_field_from_protected_header( - protected, - REPLY_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - let section = decode_document_field_from_protected_header( - protected, - SECTION_KEY, - COSE_DECODING_CONTEXT, - error_report, - ); - - // process `parameters` field and all its aliases - let (parameters, has_multiple_fields) = [ - PARAMETERS_KEY, - BRAND_ID_KEY, - CAMPAIGN_ID_KEY, - CATEGORY_ID_KEY, - ] - .iter() - .filter_map(|field_name| -> Option { - decode_document_field_from_protected_header( - protected, - field_name, - COSE_DECODING_CONTEXT, - error_report, - ) - }) - .fold((None, false), |(res, _), v| (Some(v), res.is_some())); - if has_multiple_fields { - error_report.duplicate_field( - "brand_id, campaign_id, category_id", - "Only value at the same time is allowed parameters, brand_id, campaign_id, category_id", - "Validation of parameters field aliases" - ); - } - - let mut extra = ExtraFields { - doc_ref, - template, - reply, - section, - parameters, - ..Default::default() - }; - - if let Some(cbor_doc_collabs) = cose_protected_header_find(protected, |key| { - key == &Label::Text(COLLABS_KEY.to_string()) - }) { - if let Ok(collabs) = cbor_doc_collabs.clone().into_array() { - let mut c = Vec::new(); - for (ids, collaborator) in collabs.iter().cloned().enumerate() { - match collaborator.clone().into_text() { - Ok(collaborator) => { - c.push(collaborator); - }, - Err(_) => { - error_report.conversion_error( - &format!("COSE protected header collaborator index {ids}"), - &format!("{collaborator:?}"), - "Expected a CBOR String", - &format!( - "{COSE_DECODING_CONTEXT}, converting collaborator to String", - ), - ); - }, - } - } - extra.collabs = c; - } else { - error_report.conversion_error( - "CBOR COSE protected header collaborators", - &format!("{cbor_doc_collabs:?}"), - "Expected a CBOR Array", - &format!("{COSE_DECODING_CONTEXT}, converting collaborators to Array",), - ); - } - } - - extra - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn empty_extra_fields_json_serde_test() { - let extra = ExtraFields::default(); - - let json = serde_json::to_value(extra).unwrap(); - assert_eq!(json, serde_json::json!({})); - } -} diff --git a/rust/signed_doc/src/metadata/mod.rs b/rust/signed_doc/src/metadata/mod.rs index bbbdb1677d7..7c085dc5d7f 100644 --- a/rust/signed_doc/src/metadata/mod.rs +++ b/rust/signed_doc/src/metadata/mod.rs @@ -1,71 +1,44 @@ //! Catalyst Signed Document Metadata. -use std::fmt::{Display, Formatter}; +use std::{ + collections::HashMap, + fmt::{Display, Formatter}, +}; +mod collaborators; mod content_encoding; mod content_type; -mod document_ref; -mod extra_fields; +pub(crate) mod doc_type; +mod document_refs; mod section; -pub(crate) mod utils; +mod supported_field; -use catalyst_types::{ - problem_report::ProblemReport, - uuid::{UuidV4, UuidV7}, -}; +use catalyst_types::{catalyst_id::CatalystId, problem_report::ProblemReport, uuid::UuidV7}; pub use content_encoding::ContentEncoding; pub use content_type::ContentType; -use coset::{cbor::Value, iana::CoapContentFormat}; -pub use document_ref::DocumentRef; -pub use extra_fields::ExtraFields; +pub use doc_type::DocType; +pub use document_refs::{DocLocator, DocumentRef, DocumentRefs}; +use minicbor::Decoder; pub use section::Section; -use utils::{ - cose_protected_header_find, decode_document_field_from_protected_header, CborUuidV4, CborUuidV7, -}; +use strum::IntoDiscriminant as _; -/// `content_encoding` field COSE key value -const CONTENT_ENCODING_KEY: &str = "Content-Encoding"; -/// `doc_type` field COSE key value -const TYPE_KEY: &str = "type"; -/// `id` field COSE key value -const ID_KEY: &str = "id"; -/// `ver` field COSE key value -const VER_KEY: &str = "ver"; +use crate::decode_context::CompatibilityPolicy; +pub(crate) use crate::metadata::supported_field::{SupportedField, SupportedLabel}; /// Document Metadata. /// /// These values are extracted from the COSE Sign protected header. #[derive(Clone, Debug, PartialEq, Default)] -pub struct Metadata(InnerMetadata); - -/// An actual representation of all metadata fields. -#[derive(Clone, Debug, PartialEq, serde::Deserialize, Default)] -pub(crate) struct InnerMetadata { - /// Document Type `UUIDv4`. - #[serde(rename = "type")] - doc_type: Option, - /// Document ID `UUIDv7`. - id: Option, - /// Document Version `UUIDv7`. - ver: Option, - /// Document Payload Content Type. - #[serde(rename = "content-type")] - content_type: Option, - /// Document Payload Content Encoding. - #[serde(rename = "content-encoding")] - content_encoding: Option, - /// Additional Metadata Fields. - #[serde(flatten)] - extra: ExtraFields, -} +pub struct Metadata(HashMap); impl Metadata { - /// Return Document Type `UUIDv4`. + /// Return Document Type `DocType` - a list of `UUIDv4`. /// /// # Errors /// - Missing 'type' field. - pub fn doc_type(&self) -> anyhow::Result { + pub fn doc_type(&self) -> anyhow::Result<&DocType> { self.0 - .doc_type + .get(&SupportedLabel::Type) + .and_then(SupportedField::try_as_type_ref) .ok_or(anyhow::anyhow!("Missing 'type' field")) } @@ -74,7 +47,11 @@ impl Metadata { /// # Errors /// - Missing 'id' field. pub fn doc_id(&self) -> anyhow::Result { - self.0.id.ok_or(anyhow::anyhow!("Missing 'id' field")) + self.0 + .get(&SupportedLabel::Id) + .and_then(SupportedField::try_as_id_ref) + .copied() + .ok_or(anyhow::anyhow!("Missing 'id' field")) } /// Return Document Version `UUIDv7`. @@ -82,7 +59,11 @@ impl Metadata { /// # Errors /// - Missing 'ver' field. pub fn doc_ver(&self) -> anyhow::Result { - self.0.ver.ok_or(anyhow::anyhow!("Missing 'ver' field")) + self.0 + .get(&SupportedLabel::Ver) + .and_then(SupportedField::try_as_ver_ref) + .copied() + .ok_or(anyhow::anyhow!("Missing 'ver' field")) } /// Returns the Document Content Type, if any. @@ -91,171 +72,306 @@ impl Metadata { /// - Missing 'content-type' field. pub fn content_type(&self) -> anyhow::Result { self.0 - .content_type + .get(&SupportedLabel::ContentType) + .and_then(SupportedField::try_as_content_type_ref) + .copied() .ok_or(anyhow::anyhow!("Missing 'content-type' field")) } /// Returns the Document Content Encoding, if any. #[must_use] pub fn content_encoding(&self) -> Option { - self.0.content_encoding + self.0 + .get(&SupportedLabel::ContentEncoding) + .and_then(SupportedField::try_as_content_encoding_ref) + .copied() } - /// Return reference to additional metadata fields. + /// Return `ref` field. #[must_use] - pub fn extra(&self) -> &ExtraFields { - &self.0.extra + pub fn doc_ref(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Ref) + .and_then(SupportedField::try_as_ref_ref) } - /// Build `Metadata` object from the metadata fields, doing all necessary validation. - pub(crate) fn from_metadata_fields(metadata: InnerMetadata, report: &ProblemReport) -> Self { - if metadata.doc_type.is_none() { - report.missing_field("type", "Missing type field in COSE protected header"); - } - if metadata.id.is_none() { - report.missing_field("id", "Missing id field in COSE protected header"); - } - if metadata.ver.is_none() { - report.missing_field("ver", "Missing ver field in COSE protected header"); - } + /// Return `template` field. + #[must_use] + pub fn template(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Template) + .and_then(SupportedField::try_as_template_ref) + } - if metadata.content_type.is_none() { - report.missing_field( - "content type", - "Missing content_type field in COSE protected header", - ); - } + /// Return `reply` field. + #[must_use] + pub fn reply(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Reply) + .and_then(SupportedField::try_as_reply_ref) + } - Self(metadata) + /// Return `section` field. + #[must_use] + pub fn section(&self) -> Option<&Section> { + self.0 + .get(&SupportedLabel::Section) + .and_then(SupportedField::try_as_section_ref) } - /// Converting COSE Protected Header to Metadata. - pub(crate) fn from_protected_header( - protected: &coset::ProtectedHeader, report: &ProblemReport, - ) -> Self { - let metadata = InnerMetadata::from_protected_header(protected, report); - Self::from_metadata_fields(metadata, report) + /// Return `collaborators` field. + #[must_use] + pub fn collaborators(&self) -> &[CatalystId] { + self.0 + .get(&SupportedLabel::Collaborators) + .and_then(SupportedField::try_as_collaborators_ref) + .map_or(&[], |v| &**v) } -} -impl InnerMetadata { - /// Converting COSE Protected Header to Metadata fields, collecting decoding report - /// issues. - pub(crate) fn from_protected_header( - protected: &coset::ProtectedHeader, report: &ProblemReport, - ) -> Self { - /// Context for problem report messages during decoding from COSE protected - /// header. - const COSE_DECODING_CONTEXT: &str = "COSE Protected Header to Metadata"; - - let extra = ExtraFields::from_protected_header(protected, report); - let mut metadata = Self { - extra, - ..Self::default() - }; + /// Return `parameters` field. + #[must_use] + pub fn parameters(&self) -> Option<&DocumentRefs> { + self.0 + .get(&SupportedLabel::Parameters) + .and_then(SupportedField::try_as_parameters_ref) + } - if let Some(value) = protected.header.content_type.as_ref() { - match ContentType::try_from(value) { - Ok(ct) => metadata.content_type = Some(ct), - Err(e) => { - report.conversion_error( - "COSE protected header content type", - &format!("{value:?}"), - &format!("Expected ContentType: {e}"), - &format!("{COSE_DECODING_CONTEXT}, ContentType"), - ); - }, + /// Add `SupportedField` into the `Metadata`. + /// + /// # Warning + /// + /// Building metadata by-field with this function doesn't ensure the presence of + /// required fields. Use [`Self::from_fields`] or [`Self::from_json`] if it's + /// important for metadata to be valid. + #[cfg(test)] + pub(crate) fn add_field(&mut self, field: SupportedField) { + self.0.insert(field.discriminant(), field); + } + + /// Build `Metadata` object from the metadata fields, doing all necessary validation. + pub(crate) fn from_fields( + fields: impl Iterator>, report: &ProblemReport, + ) -> Result { + const REPORT_CONTEXT: &str = "Metadata building"; + + let mut metadata = Metadata(HashMap::new()); + for v in fields { + let v = v?; + let k = v.discriminant(); + if metadata.0.insert(k, v).is_some() { + report.duplicate_field( + &k.to_string(), + "Duplicate metadata fields are not allowed", + REPORT_CONTEXT, + ); } } - if let Some(value) = cose_protected_header_find( - protected, - |key| matches!(key, coset::Label::Text(label) if label.eq_ignore_ascii_case(CONTENT_ENCODING_KEY)), - ) { - match ContentEncoding::try_from(value) { - Ok(ce) => metadata.content_encoding = Some(ce), - Err(e) => { - report.conversion_error( - "COSE protected header content encoding", - &format!("{value:?}"), - &format!("Expected ContentEncoding: {e}"), - &format!("{COSE_DECODING_CONTEXT}, ContentEncoding"), - ); - }, - } + if metadata.doc_type().is_err() { + report.missing_field("type", REPORT_CONTEXT); } + if metadata.doc_id().is_err() { + report.missing_field("id", REPORT_CONTEXT); + } + if metadata.doc_ver().is_err() { + report.missing_field("ver", REPORT_CONTEXT); + } + if metadata.content_type().is_err() { + report.missing_field("content-type", REPORT_CONTEXT); + } + + Ok(metadata) + } - metadata.doc_type = decode_document_field_from_protected_header::( - protected, - TYPE_KEY, - COSE_DECODING_CONTEXT, - report, - ) - .map(|v| v.0); - - metadata.id = decode_document_field_from_protected_header::( - protected, - ID_KEY, - COSE_DECODING_CONTEXT, - report, - ) - .map(|v| v.0); - - metadata.ver = decode_document_field_from_protected_header::( - protected, - VER_KEY, - COSE_DECODING_CONTEXT, - report, - ) - .map(|v| v.0); - - metadata + /// Build `Metadata` object from the metadata fields, doing all necessary validation. + /// + /// # Errors + /// - Json deserialization failure. + /// - Duplicate fields. + /// - Missing mandatory fields like `id`, `ver`, `type`. + pub fn from_json(fields: serde_json::Value) -> anyhow::Result { + let fields = serde::Deserializer::deserialize_map(fields, MetadataDeserializeVisitor)?; + let report = ProblemReport::new("Deserializing metadata from json"); + let metadata = Self::from_fields(fields.into_iter().map(anyhow::Result::<_>::Ok), &report)?; + anyhow::ensure!(!report.is_problematic(), "{:?}", report); + Ok(metadata) + } + + /// Serializes the current `Metadata` object into the JSON object. + /// + /// # Errors + /// - Json serialization failure. + pub fn to_json(&self) -> anyhow::Result { + let map = self + .0 + .iter() + .map(|(k, v)| Ok((k.to_string(), serde_json::to_value(v)?))) + .collect::>>()?; + Ok(serde_json::Value::Object(map)) } } impl Display for Metadata { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { writeln!(f, "Metadata {{")?; - writeln!(f, " type: {:?},", self.0.doc_type)?; - writeln!(f, " id: {:?},", self.0.id)?; - writeln!(f, " ver: {:?},", self.0.ver)?; - writeln!(f, " content_type: {:?}", self.0.content_type)?; - writeln!(f, " content_encoding: {:?}", self.0.content_encoding)?; - writeln!(f, " additional_fields: {:?},", self.0.extra)?; + writeln!(f, " type: {:?},", self.doc_type().ok())?; + writeln!(f, " id: {:?},", self.doc_id().ok())?; + writeln!(f, " ver: {:?},", self.doc_ver().ok())?; + writeln!(f, " content_type: {:?},", self.content_type().ok())?; + writeln!(f, " content_encoding: {:?},", self.content_encoding())?; + writeln!(f, " additional_fields: {{")?; + writeln!(f, " ref: {:?}", self.doc_ref())?; + writeln!(f, " template: {:?},", self.template())?; + writeln!(f, " reply: {:?},", self.reply())?; + writeln!(f, " section: {:?},", self.section())?; + writeln!(f, " collaborators: {:?},", self.collaborators())?; + writeln!(f, " parameters: {:?},", self.parameters())?; + writeln!(f, " }},")?; writeln!(f, "}}") } } -impl TryFrom<&Metadata> for coset::Header { - type Error = anyhow::Error; +impl minicbor::Encode<()> for Metadata { + /// Encode as a CBOR map. + /// + /// Note that to put it in an [RFC 8152] protected header. + /// The header must be then encoded as a binary string. + /// + /// Also note that this won't check the presence of the required fields, + /// so the checks must be done elsewhere. + /// + /// [RFC 8152]: https://datatracker.ietf.org/doc/html/rfc8152#autoid-8 + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.map( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + self.0 + .values() + .try_fold(e, |e, field| e.encode(field))? + .ok() + } +} + +impl minicbor::Decode<'_, crate::decode_context::DecodeContext> for Metadata { + /// Decode from a CBOR map. + /// + /// Note that this won't decode an [RFC 8152] protected header as is. + /// The header must be first decoded as a binary string. + /// + /// Also note that this won't check the absence of the required fields, + /// so the checks must be done elsewhere. + /// + /// [RFC 8152]: https://datatracker.ietf.org/doc/html/rfc8152#autoid-8 + fn decode( + d: &mut Decoder<'_>, ctx: &mut crate::decode_context::DecodeContext, + ) -> Result { + let mut map_ctx = match ctx.policy() { + CompatibilityPolicy::Accept => { + cbork_utils::decode_context::DecodeCtx::non_deterministic() + }, + CompatibilityPolicy::Warn => { + cbork_utils::decode_context::DecodeCtx::non_deterministic_with_handler(|error| { + tracing::warn!( + error = ?error, + "Catalyst Signed Document non deterministically encoded metadata field", + ); + Ok(()) + }) + }, + CompatibilityPolicy::Fail => cbork_utils::decode_context::DecodeCtx::Deterministic, + }; + + let report = ctx.report().clone(); + let fields = cbork_utils::map::Map::decode(d, &mut map_ctx)? + .into_iter() + .map(|e| { + let mut bytes = e.key_bytes; + bytes.extend(e.value); + Option::::decode(&mut minicbor::Decoder::new(&bytes), ctx) + }) + .filter_map(Result::transpose); + + Self::from_fields(fields, &report) + } +} + +/// Implements [`serde::de::Visitor`], so that [`Metadata`] can be +/// deserialized by [`serde::Deserializer::deserialize_map`]. +struct MetadataDeserializeVisitor; - fn try_from(meta: &Metadata) -> Result { - let mut builder = coset::HeaderBuilder::new() - .content_format(CoapContentFormat::from(meta.content_type()?)); +impl<'de> serde::de::Visitor<'de> for MetadataDeserializeVisitor { + type Value = Vec; - if let Some(content_encoding) = meta.content_encoding() { - builder = builder.text_value( - CONTENT_ENCODING_KEY.to_string(), - format!("{content_encoding}").into(), - ); + fn expecting(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.write_str("Catalyst Signed Document metadata key-value pairs") + } + + fn visit_map>(self, mut d: A) -> Result { + let mut res = Vec::with_capacity(d.size_hint().unwrap_or(0)); + while let Some(k) = d.next_key::()? { + let v = d.next_value_seed(k)?; + res.push(v); } + Ok(res) + } +} + +#[cfg(test)] +mod tests { + use test_case::test_case; - builder = builder - .text_value( - TYPE_KEY.to_string(), - Value::try_from(CborUuidV4(meta.doc_type()?))?, - ) - .text_value( - ID_KEY.to_string(), - Value::try_from(CborUuidV7(meta.doc_id()?))?, - ) - .text_value( - VER_KEY.to_string(), - Value::try_from(CborUuidV7(meta.doc_ver()?))?, - ); - - builder = meta.0.extra.fill_cose_header_fields(builder)?; - - Ok(builder.build()) + use super::*; + + #[test_case( + serde_json::json!({ + "id": "0197f398-9f43-7c23-a576-f765131b81f2", + "ver": "0197f398-9f43-7c23-a576-f765131b81f2", + "type": "ab7c2428-c353-4331-856e-385b2eb20546", + "content-type": "application/json", + }) ; + "minimally valid JSON" + )] + #[test_case( + serde_json::json!( + { + "id": "0197f398-9f43-7c23-a576-f765131b81f2", + "ver": "0197f398-9f43-7c23-a576-f765131b81f2", + "type": "ab7c2428-c353-4331-856e-385b2eb20546", + "content-type": "application/json", + "ref": [ + { + "id": "0197f398-9f43-7c23-a576-f765131b81f2", + "ver": "0197f398-9f43-7c23-a576-f765131b81f2", + "cid": "0x", + }, + ] + } + ) ; + "minimally valid JSON, new format reference type" + )] + #[test_case( + serde_json::json!( + { + "id": "0197f398-9f43-7c23-a576-f765131b81f2", + "ver": "0197f398-9f43-7c23-a576-f765131b81f2", + "type": "ab7c2428-c353-4331-856e-385b2eb20546", + "content-type": "application/json", + "ref": { + "id": "0197f398-9f43-7c23-a576-f765131b81f2", + "ver": "0197f398-9f43-7c23-a576-f765131b81f2", + }, + } + ) ; + "minimally valid JSON, old format reference type" + )] + fn test_json_valid_serde(json: serde_json::Value) { + let metadata = Metadata::from_json(json).unwrap(); + let json_from_meta = metadata.to_json().unwrap(); + assert_eq!(metadata, Metadata::from_json(json_from_meta).unwrap()); } } diff --git a/rust/signed_doc/src/metadata/section.rs b/rust/signed_doc/src/metadata/section.rs index 01e6a02a1b8..cbe97fee3da 100644 --- a/rust/signed_doc/src/metadata/section.rs +++ b/rust/signed_doc/src/metadata/section.rs @@ -2,7 +2,6 @@ use std::{fmt::Display, str::FromStr}; -use coset::cbor::Value; use serde::{Deserialize, Serialize}; /// 'section' field type definition, which is a JSON path string @@ -40,19 +39,19 @@ impl FromStr for Section { } } -impl From
for Value { - fn from(value: Section) -> Self { - Value::Text(value.to_string()) +impl minicbor::Encode<()> for Section { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.str(self.to_string().as_str())?; + Ok(()) } } -impl TryFrom<&Value> for Section { - type Error = anyhow::Error; - - fn try_from(val: &Value) -> anyhow::Result { - let str = val - .as_text() - .ok_or(anyhow::anyhow!("Not a cbor string type"))?; - Self::from_str(str) +impl minicbor::Decode<'_, ()> for Section { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut (), + ) -> Result { + d.str()?.parse().map_err(minicbor::decode::Error::message) } } diff --git a/rust/signed_doc/src/metadata/supported_field.rs b/rust/signed_doc/src/metadata/supported_field.rs new file mode 100644 index 00000000000..9fcc3725280 --- /dev/null +++ b/rust/signed_doc/src/metadata/supported_field.rs @@ -0,0 +1,299 @@ +//! Catalyst Signed Document unified metadata field. + +use std::fmt; + +use catalyst_types::uuid::UuidV7; +use serde::Deserialize; +use strum::{EnumDiscriminants, EnumTryAs, IntoDiscriminant as _}; + +use crate::{ + metadata::collaborators::Collaborators, ContentEncoding, ContentType, DocType, DocumentRefs, + Section, +}; + +/// COSE label. May be either a signed integer or a string. +#[derive(Copy, Clone, Eq, PartialEq)] +enum Label<'a> { + /// Integer label. + /// + /// Note that COSE isn't strictly limited to 8 bits for a label, but in practice it + /// fits. + /// + /// If for any reason wider bounds would be necessary, + /// then additional variants could be added to the [`Label`]. + U8(u8), + /// Text label. + Str(&'a str), +} + +impl minicbor::Encode<()> for Label<'_> { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + match self { + &Label::U8(u) => e.u8(u), + Label::Str(s) => e.str(s), + }? + .ok() + } +} + +impl<'a, C> minicbor::Decode<'a, C> for Label<'a> { + fn decode(d: &mut minicbor::Decoder<'a>, _: &mut C) -> Result { + match d.datatype()? { + minicbor::data::Type::U8 => d.u8().map(Self::U8), + minicbor::data::Type::String => d.str().map(Self::Str), + _ => { + Err(minicbor::decode::Error::message( + "Datatype is neither 8bit unsigned integer nor text", + ) + .at(d.position())) + }, + } + } +} + +impl fmt::Display for Label<'_> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Label::U8(u) => write!(f, "{u}"), + Label::Str(s) => f.write_str(s), + } + } +} + +/// Catalyst Signed Document metadata field. +/// Fields are assigned discriminants based on deterministic ordering (see [RFC 8949 +/// section 4.2.1]). +/// +/// Note that [`PartialEq`] implementation compares both keys and values. +/// +/// [RFC 8949 section 4.2.1]: https://www.rfc-editor.org/rfc/rfc8949.html#section-4.2.1 +#[derive(Clone, Debug, PartialEq, EnumDiscriminants, EnumTryAs)] +#[strum_discriminants( + name(SupportedLabel), + derive(serde::Deserialize, Hash), + serde(rename_all = "kebab-case"), + cfg_attr(test, derive(strum::VariantArray)) +)] +#[non_exhaustive] +#[repr(usize)] +pub(crate) enum SupportedField { + /// `content-type` field. In COSE it's represented as the signed integer `3` (see [RFC + /// 8949 section 3.1]). + /// + /// [RFC 8949 section 3.1]: https://datatracker.ietf.org/doc/html/rfc8152#section-3.1 + ContentType(ContentType) = 0, + /// `id` field. + Id(UuidV7) = 1, + /// `ref` field. + Ref(DocumentRefs) = 2, + /// `ver` field. + Ver(UuidV7) = 3, + /// `type` field. + Type(DocType) = 4, + /// `reply` field. + Reply(DocumentRefs) = 5, + /// `section` field. + Section(Section) = 6, + /// `template` field. + Template(DocumentRefs) = 7, + /// `parameters` field. + Parameters(DocumentRefs) = 8, + /// `collaborators` field. + Collaborators(Collaborators) = 9, + /// `Content-Encoding` field. + ContentEncoding(ContentEncoding) = 10, +} + +impl SupportedLabel { + /// Try to convert from an arbitrary COSE [`Label`]. + /// This doesn't allow any aliases. + fn from_cose(label: Label<'_>) -> Option { + match label { + Label::U8(3) => Some(Self::ContentType), + Label::Str("id") => Some(Self::Id), + Label::Str("ref") => Some(Self::Ref), + Label::Str("ver") => Some(Self::Ver), + Label::Str("type") => Some(Self::Type), + Label::Str("reply") => Some(Self::Reply), + Label::Str("collaborators") => Some(Self::Collaborators), + Label::Str("section") => Some(Self::Section), + Label::Str("template") => Some(Self::Template), + Label::Str("parameters" | "brand_id" | "campaign_id" | "category_id") => { + Some(Self::Parameters) + }, + Label::Str(s) if s.eq_ignore_ascii_case("content-encoding") => { + Some(Self::ContentEncoding) + }, + _ => None, + } + } + + /// Convert to the corresponding COSE [`Label`]. + fn to_cose(self) -> Label<'static> { + match self { + Self::ContentType => Label::U8(3), + Self::Id => Label::Str("id"), + Self::Ref => Label::Str("ref"), + Self::Ver => Label::Str("ver"), + Self::Type => Label::Str("type"), + Self::Reply => Label::Str("reply"), + Self::Collaborators => Label::Str("collaborators"), + Self::Section => Label::Str("section"), + Self::Template => Label::Str("template"), + Self::Parameters => Label::Str("parameters"), + Self::ContentEncoding => Label::Str("content-encoding"), + } + } +} + +impl fmt::Display for SupportedLabel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::ContentType => write!(f, "content-type"), + v => v.to_cose().fmt(f), + } + } +} + +impl serde::ser::Serialize for SupportedField { + fn serialize(&self, serializer: S) -> Result + where S: serde::Serializer { + match self { + Self::Id(v) | Self::Ver(v) => v.serialize(serializer), + Self::Type(v) => v.serialize(serializer), + Self::ContentType(v) => v.serialize(serializer), + Self::ContentEncoding(v) => v.serialize(serializer), + Self::Ref(v) | Self::Reply(v) | Self::Template(v) | Self::Parameters(v) => { + v.serialize(serializer) + }, + Self::Collaborators(v) => v.serialize(serializer), + Self::Section(v) => v.serialize(serializer), + } + } +} + +impl<'de> serde::de::DeserializeSeed<'de> for SupportedLabel { + type Value = SupportedField; + + fn deserialize>(self, d: D) -> Result { + match self { + SupportedLabel::ContentType => { + Deserialize::deserialize(d).map(SupportedField::ContentType) + }, + SupportedLabel::Id => Deserialize::deserialize(d).map(SupportedField::Id), + SupportedLabel::Ref => Deserialize::deserialize(d).map(SupportedField::Ref), + SupportedLabel::Ver => Deserialize::deserialize(d).map(SupportedField::Ver), + SupportedLabel::Type => Deserialize::deserialize(d).map(SupportedField::Type), + SupportedLabel::Reply => Deserialize::deserialize(d).map(SupportedField::Reply), + SupportedLabel::Collaborators => { + Deserialize::deserialize(d).map(SupportedField::Collaborators) + }, + SupportedLabel::Section => Deserialize::deserialize(d).map(SupportedField::Section), + SupportedLabel::Template => Deserialize::deserialize(d).map(SupportedField::Template), + SupportedLabel::Parameters => { + Deserialize::deserialize(d).map(SupportedField::Parameters) + }, + SupportedLabel::ContentEncoding => { + Deserialize::deserialize(d).map(SupportedField::ContentEncoding) + }, + } + } +} + +impl minicbor::Decode<'_, crate::decode_context::DecodeContext> for Option { + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut crate::decode_context::DecodeContext, + ) -> Result { + const REPORT_CONTEXT: &str = "Metadata field decoding"; + + let Ok(key) = d + .decode::() + .inspect_err(|e| ctx.report().other(e.to_string().as_str(), REPORT_CONTEXT)) + else { + return Ok(None); + }; + + let cbor_bytes = cbork_utils::decode_helper::decode_any(d, REPORT_CONTEXT)?; + let mut d = minicbor::Decoder::new(cbor_bytes); + + let field = match key { + SupportedLabel::ContentType => d.decode().map(SupportedField::ContentType), + SupportedLabel::Id => { + d.decode_with(&mut catalyst_types::uuid::CborContext::Tagged) + .map(SupportedField::Id) + }, + SupportedLabel::Ref => { + d.decode_with(&mut ctx.policy().clone()) + .map(SupportedField::Ref) + }, + SupportedLabel::Ver => { + d.decode_with(&mut catalyst_types::uuid::CborContext::Tagged) + .map(SupportedField::Ver) + }, + SupportedLabel::Type => d.decode().map(SupportedField::Type), + SupportedLabel::Reply => { + d.decode_with(&mut ctx.policy().clone()) + .map(SupportedField::Reply) + }, + SupportedLabel::Collaborators => d.decode().map(SupportedField::Collaborators), + SupportedLabel::Section => d.decode().map(SupportedField::Section), + SupportedLabel::Template => { + d.decode_with(&mut ctx.policy().clone()) + .map(SupportedField::Template) + }, + SupportedLabel::Parameters => { + d.decode_with(&mut ctx.policy().clone()) + .map(SupportedField::Parameters) + }, + SupportedLabel::ContentEncoding => d.decode().map(SupportedField::ContentEncoding), + } + .inspect_err(|e| { + ctx.report().invalid_value( + &format!("CBOR COSE protected header {key}"), + &hex::encode(cbor_bytes), + &format!("{e}"), + REPORT_CONTEXT, + ); + }) + .ok(); + + Ok(field) + } +} + +impl minicbor::Decode<'_, C> for SupportedLabel { + fn decode( + d: &mut minicbor::Decoder<'_>, _ctx: &mut C, + ) -> Result { + let label = d.decode()?; + Self::from_cose(label).ok_or(minicbor::decode::Error::message(format!( + "Unsupported key {label}" + ))) + } +} + +impl minicbor::Encode<()> for SupportedField { + fn encode( + &self, e: &mut minicbor::Encoder, ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + let key = self.discriminant().to_cose(); + e.encode(key)?; + + match self { + SupportedField::ContentType(content_type) => content_type.encode(e, ctx), + SupportedField::Id(uuid_v7) | SupportedField::Ver(uuid_v7) => { + uuid_v7.encode(e, &mut catalyst_types::uuid::CborContext::Tagged) + }, + SupportedField::Ref(document_ref) + | SupportedField::Reply(document_ref) + | SupportedField::Template(document_ref) + | SupportedField::Parameters(document_ref) => document_ref.encode(e, ctx), + SupportedField::Type(doc_type) => doc_type.encode(e, ctx), + SupportedField::Collaborators(collaborators) => collaborators.encode(e, ctx), + SupportedField::Section(section) => section.encode(e, ctx), + SupportedField::ContentEncoding(content_encoding) => content_encoding.encode(e, ctx), + } + } +} diff --git a/rust/signed_doc/src/metadata/utils.rs b/rust/signed_doc/src/metadata/utils.rs deleted file mode 100644 index 0e54f10c439..00000000000 --- a/rust/signed_doc/src/metadata/utils.rs +++ /dev/null @@ -1,102 +0,0 @@ -//! Utility functions for metadata decoding fields - -use catalyst_types::{ - problem_report::ProblemReport, - uuid::{CborContext, UuidV4, UuidV7}, -}; -use coset::{CborSerializable, Label, ProtectedHeader}; - -/// Find a value for a predicate in the protected header. -pub(crate) fn cose_protected_header_find( - protected: &coset::ProtectedHeader, mut predicate: impl FnMut(&coset::Label) -> bool, -) -> Option<&coset::cbor::Value> { - protected - .header - .rest - .iter() - .find(|(key, _)| predicate(key)) - .map(|(_, value)| value) -} - -/// Tries to decode field by the `field_name` from the COSE protected header -pub(crate) fn decode_document_field_from_protected_header( - protected: &ProtectedHeader, field_name: &str, report_content: &str, report: &ProblemReport, -) -> Option -where T: for<'a> TryFrom<&'a coset::cbor::Value> { - if let Some(cbor_doc_field) = - cose_protected_header_find(protected, |key| key == &Label::Text(field_name.to_string())) - { - if let Ok(field) = T::try_from(cbor_doc_field) { - return Some(field); - } - report.conversion_error( - &format!("CBOR COSE protected header {field_name}"), - &format!("{cbor_doc_field:?}"), - "Expected a CBOR UUID", - &format!("{report_content}, decoding CBOR UUID for {field_name}",), - ); - } - None -} - -/// A convenient wrapper over the `UuidV4` type, to implement -/// `TryFrom` and `TryFrom for coset::cbor::Value` traits. -pub(crate) struct CborUuidV4(pub(crate) UuidV4); -impl TryFrom<&coset::cbor::Value> for CborUuidV4 { - type Error = anyhow::Error; - - fn try_from(value: &coset::cbor::Value) -> Result { - Ok(Self(decode_cbor_uuid(value)?)) - } -} -impl TryFrom for coset::cbor::Value { - type Error = anyhow::Error; - - fn try_from(value: CborUuidV4) -> Result { - encode_cbor_uuid(value.0) - } -} - -/// A convenient wrapper over the `UuidV7` type, to implement -/// `TryFrom` and `TryFrom for coset::cbor::Value` traits. -pub(crate) struct CborUuidV7(pub(crate) UuidV7); -impl TryFrom<&coset::cbor::Value> for CborUuidV7 { - type Error = anyhow::Error; - - fn try_from(value: &coset::cbor::Value) -> Result { - Ok(Self(decode_cbor_uuid(value)?)) - } -} -impl TryFrom for coset::cbor::Value { - type Error = anyhow::Error; - - fn try_from(value: CborUuidV7) -> Result { - encode_cbor_uuid(value.0) - } -} - -/// Encode `uuid::Uuid` type into `coset::cbor::Value`. -/// -/// This is used to encode `UuidV4` and `UuidV7` types. -fn encode_cbor_uuid>( - value: T, -) -> anyhow::Result { - let mut cbor_bytes = Vec::new(); - minicbor::encode_with(value, &mut cbor_bytes, &mut CborContext::Tagged) - .map_err(|e| anyhow::anyhow!("Unable to encode CBOR value, err: {e}"))?; - coset::cbor::Value::from_slice(&cbor_bytes) - .map_err(|e| anyhow::anyhow!("Invalid CBOR value, err: {e}")) -} - -/// Decode `From` type from `coset::cbor::Value`. -/// -/// This is used to decode `UuidV4` and `UuidV7` types. -fn decode_cbor_uuid minicbor::decode::Decode<'a, CborContext>>( - value: &coset::cbor::Value, -) -> anyhow::Result { - let mut cbor_bytes = Vec::new(); - coset::cbor::ser::into_writer(value, &mut cbor_bytes) - .map_err(|e| anyhow::anyhow!("Invalid CBOR value, err: {e}"))?; - minicbor::decode_with(&cbor_bytes, &mut CborContext::Tagged) - .map_err(|e| anyhow::anyhow!("Invalid UUID, err: {e}")) -} diff --git a/rust/signed_doc/src/providers.rs b/rust/signed_doc/src/providers.rs index 9fd41d1c63c..b839c8166e7 100644 --- a/rust/signed_doc/src/providers.rs +++ b/rust/signed_doc/src/providers.rs @@ -17,7 +17,7 @@ pub trait VerifyingKeyProvider { /// `CatalystSignedDocument` Provider trait pub trait CatalystSignedDocumentProvider: Send + Sync { - /// Try to get `CatalystSignedDocument` + /// Try to get `CatalystSignedDocument`from document reference fn try_get_doc( &self, doc_ref: &DocumentRef, ) -> impl Future>> + Send; @@ -38,24 +38,34 @@ pub mod tests { use std::{collections::HashMap, time::Duration}; - use catalyst_types::uuid::Uuid; - use super::{ - CatalystId, CatalystSignedDocument, CatalystSignedDocumentProvider, DocumentRef, - VerifyingKey, VerifyingKeyProvider, + CatalystId, CatalystSignedDocument, CatalystSignedDocumentProvider, VerifyingKey, + VerifyingKeyProvider, }; + use crate::{DocLocator, DocumentRef}; /// Simple testing implementation of `CatalystSignedDocumentProvider` - #[derive(Default)] - pub struct TestCatalystSignedDocumentProvider(HashMap); + #[derive(Default, Debug)] + + pub struct TestCatalystSignedDocumentProvider(HashMap); impl TestCatalystSignedDocumentProvider { - /// Inserts document into the `TestCatalystSignedDocumentProvider` + /// Inserts document into the `TestCatalystSignedDocumentProvider` where + /// if document reference is provided use that value. + /// if not use the id and version of the provided doc. /// /// # Errors - /// - Missing document id - pub fn add_document(&mut self, doc: CatalystSignedDocument) -> anyhow::Result<()> { - self.0.insert(doc.doc_id()?.uuid(), doc); + /// Returns error if document reference is not provided and its fail to create one + /// from the given doc. + pub fn add_document( + &mut self, doc_ref: Option, doc: &CatalystSignedDocument, + ) -> anyhow::Result<()> { + if let Some(dr) = doc_ref { + self.0.insert(dr, doc.clone()); + } else { + let dr = DocumentRef::new(doc.doc_id()?, doc.doc_ver()?, DocLocator::default()); + self.0.insert(dr, doc.clone()); + } Ok(()) } } @@ -64,7 +74,7 @@ pub mod tests { async fn try_get_doc( &self, doc_ref: &DocumentRef, ) -> anyhow::Result> { - Ok(self.0.get(&doc_ref.id.uuid()).cloned()) + Ok(self.0.get(doc_ref).cloned()) } fn future_threshold(&self) -> Option { diff --git a/rust/signed_doc/src/signature/mod.rs b/rust/signed_doc/src/signature/mod.rs index 31dec15b51a..4bb1988d74a 100644 --- a/rust/signed_doc/src/signature/mod.rs +++ b/rust/signed_doc/src/signature/mod.rs @@ -1,44 +1,34 @@ //! Catalyst Signed Document COSE Signature information. pub use catalyst_types::catalyst_id::CatalystId; -use catalyst_types::problem_report::ProblemReport; -use coset::CoseSignature; +use cbork_utils::{array::Array, decode_context::DecodeCtx, with_cbor_bytes::WithCborBytes}; +use minicbor::Decode; + +use crate::{decode_context::DecodeContext, Content, Metadata}; /// Catalyst Signed Document COSE Signature. #[derive(Debug, Clone)] pub struct Signature { /// Key ID kid: CatalystId, - /// COSE Signature - signature: CoseSignature, + /// Raw signature data + signature: Vec, } impl Signature { - /// Convert COSE Signature to `Signature`. - pub(crate) fn from_cose_sig(signature: CoseSignature, report: &ProblemReport) -> Option { - match CatalystId::try_from(signature.protected.header.key_id.as_ref()) { - Ok(kid) if kid.is_uri() => Some(Self { kid, signature }), - Ok(kid) => { - report.invalid_value( - "COSE signature protected header key ID", - &kid.to_string(), - &format!( - "COSE signature protected header key ID must be a Catalyst ID, missing URI schema {}", CatalystId::SCHEME - ), - "Converting COSE signature header key ID to CatalystId", - ); - None - }, - Err(e) => { - report.conversion_error( - "COSE signature protected header key ID", - &format!("{:?}", &signature.protected.header.key_id), - &format!("{e:?}"), - "Converting COSE signature header key ID to CatalystId", - ); - None - }, - } + /// Creates a `Signature` object from `kid` and raw `signature` bytes + pub(crate) fn new(kid: CatalystId, signature: Vec) -> Self { + Self { kid, signature } + } + + /// Return `kid` field (`CatalystId`), identifier who made a signature + pub fn kid(&self) -> &CatalystId { + &self.kid + } + + /// Return raw signature bytes itself + pub fn signature(&self) -> &[u8] { + &self.signature } } @@ -47,28 +37,9 @@ impl Signature { pub struct Signatures(Vec); impl Signatures { - /// Return a list of author IDs (short form of Catalyst IDs). - #[must_use] - pub(crate) fn authors(&self) -> Vec { - self.kids().into_iter().map(|k| k.as_short_id()).collect() - } - - /// Return a list of Document's Catalyst IDs. - #[must_use] - pub(crate) fn kids(&self) -> Vec { - self.0.iter().map(|sig| sig.kid.clone()).collect() - } - - /// Iterator of COSE signatures object with kids. - pub(crate) fn cose_signatures_with_kids( - &self, - ) -> impl Iterator + use<'_> { - self.0.iter().map(|sig| (&sig.signature, &sig.kid)) - } - - /// List of COSE signatures object. - pub(crate) fn cose_signatures(&self) -> impl Iterator + use<'_> { - self.0.iter().map(|sig| sig.signature.clone()) + /// Return an iterator over the signatures + pub fn iter(&self) -> impl Iterator + use<'_> { + self.0.iter() } /// Add a `Signature` object into the list @@ -87,21 +58,198 @@ impl Signatures { pub fn is_empty(&self) -> bool { self.0.is_empty() } +} + +/// Create a binary blob that will be signed. No support for unprotected headers. +/// +/// Described in [section 4.4 of RFC 8152](https://datatracker.ietf.org/doc/html/rfc8152#section-4.4). +pub(crate) fn tbs_data( + kid: &CatalystId, metadata: &WithCborBytes, content: &WithCborBytes, +) -> anyhow::Result> { + let mut e = minicbor::Encoder::new(Vec::new()); + + e.array(5)?; + e.str("Signature")?; + e.bytes(minicbor::to_vec(metadata)?.as_slice())?; // `body_protected` + e.bytes(protected_header_encode(kid)?.as_slice())?; // `sign_protected` + e.bytes(&[])?; // empty `external_aad` + e.encode(content)?; // `payload` + + Ok(e.into_writer()) +} + +impl minicbor::Encode<()> for Signature { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array(3)?; + e.bytes( + protected_header_encode(&self.kid) + .map_err(minicbor::encode::Error::message)? + .as_slice(), + )?; + // empty unprotected headers + e.map(0)?; + e.bytes(&self.signature)?; + Ok(()) + } +} + +impl minicbor::Decode<'_, DecodeContext> for Option { + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut DecodeContext, + ) -> Result { + let arr = Array::decode(d, &mut DecodeCtx::Deterministic)?; - /// Convert list of COSE Signature to `Signatures`. - pub(crate) fn from_cose_sig_list(cose_sigs: &[CoseSignature], report: &ProblemReport) -> Self { - let res = cose_sigs - .iter() - .cloned() - .enumerate() - .filter_map(|(idx, signature)| { - let sign = Signature::from_cose_sig(signature, report); - if sign.is_none() { - report.other(&format!("COSE signature protected header key ID at id {idx}"), "Converting COSE signatures list to Catalyst Signed Documents signatures list",); + match arr.as_slice() { + [kid_bytes, headers_bytes, signature_bytes] => { + let kid_bytes = minicbor::Decoder::new(kid_bytes).bytes()?; + let kid = protected_header_decode(kid_bytes, ctx) + .map_err(minicbor::decode::Error::message)?; + + // empty unprotected headers + let mut map = cbork_utils::map::Map::decode( + &mut minicbor::Decoder::new(headers_bytes.as_slice()), + &mut cbork_utils::decode_context::DecodeCtx::Deterministic, + )? + .into_iter(); + if map.next().is_some() { + ctx.report().unknown_field( + "unprotected headers", + "non empty unprotected headers", + "COSE signature unprotected headers must be empty", + ); } - sign - }).collect(); - Self(res) + let signature_bytes = minicbor::Decoder::new(signature_bytes).bytes()?; + let signature = signature_bytes.to_vec(); + + Ok(kid.map(|kid| Signature { kid, signature })) + }, + _ => { + Err(minicbor::decode::Error::message( + "COSE signature object must be a definite size array with 3 elements", + )) + }, + } + } +} + +impl minicbor::Encode<()> for Signatures { + fn encode( + &self, e: &mut minicbor::Encoder, _ctx: &mut (), + ) -> Result<(), minicbor::encode::Error> { + e.array( + self.0 + .len() + .try_into() + .map_err(minicbor::encode::Error::message)?, + )?; + for sign in self.iter() { + e.encode(sign)?; + } + Ok(()) + } +} + +impl minicbor::Decode<'_, DecodeContext> for Signatures { + fn decode( + d: &mut minicbor::Decoder<'_>, ctx: &mut DecodeContext, + ) -> Result { + let arr = Array::decode(d, &mut DecodeCtx::Deterministic).map_err(|e| { + minicbor::decode::Error::message(format!( + "COSE signatures array must be a definite size array: {e}" + )) + })?; + + let mut signatures = Vec::with_capacity(arr.len()); + for (idx, bytes) in arr.iter().enumerate() { + match minicbor::Decoder::new(bytes).decode_with(ctx)? { + Some(signature) => signatures.push(signature), + None => { + ctx.report().other( + &format!("COSE signature at id {idx}"), + "Cannot decode a single COSE signature from the array of signatures", + ); + }, + } + } + + Ok(Signatures(signatures)) + } +} + +/// Signatures protected header bytes +/// +/// Described in [section 3.1 of RFC 8152](https://datatracker.ietf.org/doc/html/rfc8152#section-3.1). +fn protected_header_encode(kid: &CatalystId) -> anyhow::Result> { + let mut p_header = minicbor::Encoder::new(Vec::new()); + // protected headers (kid field) + p_header + .map(1)? + .u8(4)? + .bytes(Vec::::from(kid).as_slice())?; + Ok(p_header.into_writer()) +} + +/// Signatures protected header decode from bytes. +/// Return error if its an invalid CBOR sequence. +/// Return None if cannot decode `CatalystId` bytes. +/// +/// Described in [section 3.1 of RFC 8152](https://datatracker.ietf.org/doc/html/rfc8152#section-3.1). +fn protected_header_decode( + bytes: &[u8], ctx: &mut DecodeContext, +) -> anyhow::Result> { + let mut map = cbork_utils::map::Map::decode( + &mut minicbor::Decoder::new(bytes), + &mut cbork_utils::decode_context::DecodeCtx::Deterministic, + )? + .into_iter(); + + if map.len() > 1 { + ctx.report().functional_validation( + "COSE signature protected header must only include the `kid` field", + "COSE signature protected header decoding", + ); } + + let Some(entry) = map.next() else { + anyhow::bail!("COSE signature protected header must include at least one entry"); + }; + + // protected headers (kid field) + anyhow::ensure!( + matches!( + minicbor::Decoder::new(entry.key_bytes.as_slice()).u8(), + Ok(4) + ), + "Missing COSE signature protected header `kid` field" + ); + + let kid = minicbor::Decoder::new(entry.value.as_slice()) + .bytes()? + .try_into() + .inspect_err(|e| { + ctx.report().conversion_error( + "COSE signature protected header `kid`", + &hex::encode(entry.value.as_slice()), + &format!("{e:?}"), + "Converting COSE signature header `kid` to CatalystId", + ); + }) + .ok() + .inspect(|kid: &CatalystId| { + if kid.is_id() { + ctx.report().invalid_value( + "COSE signature protected header key ID", + &kid.to_string(), + &format!( + "COSE signature protected header key ID must be a Catalyst ID, missing URI schema {}", + CatalystId::SCHEME + ), + "Converting COSE signature header key ID to CatalystId", + ); + } + }); + Ok(kid) } diff --git a/rust/signed_doc/src/validator/mod.rs b/rust/signed_doc/src/validator/mod.rs index 4bed4304a89..80d4d5870eb 100644 --- a/rust/signed_doc/src/validator/mod.rs +++ b/rust/signed_doc/src/validator/mod.rs @@ -5,50 +5,42 @@ pub(crate) mod utils; use std::{ collections::HashMap, - fmt, - sync::LazyLock, + sync::{Arc, LazyLock}, time::{Duration, SystemTime}, }; use anyhow::Context; -use catalyst_types::{ - catalyst_id::{role_index::RoleId, CatalystId}, - problem_report::ProblemReport, - uuid::{Uuid, UuidV4}, -}; -use coset::{CoseSign, CoseSignature}; +use catalyst_types::{catalyst_id::role_index::RoleId, problem_report::ProblemReport}; use rules::{ - ContentEncodingRule, ContentRule, ContentSchema, ContentTypeRule, ParametersRule, RefRule, - ReplyRule, Rules, SectionRule, SignatureKidRule, + ContentEncodingRule, ContentRule, ContentSchema, ContentTypeRule, LinkField, + ParameterLinkRefRule, ParametersRule, RefRule, ReplyRule, Rules, SectionRule, SignatureKidRule, }; use crate::{ doc_types::{ - CATEGORY_DOCUMENT_UUID_TYPE, COMMENT_DOCUMENT_UUID_TYPE, COMMENT_TEMPLATE_UUID_TYPE, - PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, PROPOSAL_DOCUMENT_UUID_TYPE, - PROPOSAL_TEMPLATE_UUID_TYPE, + BRAND_PARAMETERS, CAMPAIGN_PARAMETERS, CATEGORY_PARAMETERS, PROPOSAL, PROPOSAL_COMMENT, + PROPOSAL_COMMENT_FORM_TEMPLATE, PROPOSAL_FORM_TEMPLATE, PROPOSAL_SUBMISSION_ACTION, }, + metadata::DocType, providers::{CatalystSignedDocumentProvider, VerifyingKeyProvider}, + signature::{tbs_data, Signature}, CatalystSignedDocument, ContentEncoding, ContentType, }; /// A table representing a full set or validation rules per document id. -static DOCUMENT_RULES: LazyLock> = LazyLock::new(document_rules_init); - -/// Returns an [`UuidV4`] from the provided argument, panicking if the argument is -/// invalid. -#[allow(clippy::expect_used)] -fn expect_uuidv4(t: T) -> UuidV4 -where T: TryInto { - t.try_into().expect("Must be a valid UUID V4") -} - -/// `DOCUMENT_RULES` initialization function -#[allow(clippy::expect_used)] -fn document_rules_init() -> HashMap { - let mut document_rules_map = HashMap::new(); - - let proposal_document_rules = Rules { +static DOCUMENT_RULES: LazyLock>> = LazyLock::new(document_rules_init); + +/// Proposal +/// Require field: type, id, ver, template, parameters +/// +fn proposal_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -57,11 +49,11 @@ fn document_rules_init() -> HashMap { optional: false, }, content: ContentRule::Templated { - exp_template_type: expect_uuidv4(PROPOSAL_TEMPLATE_UUID_TYPE), + exp_template_type: PROPOSAL_FORM_TEMPLATE.clone(), }, parameters: ParametersRule::Specified { - exp_parameters_type: expect_uuidv4(CATEGORY_DOCUMENT_UUID_TYPE), - optional: true, + exp_parameters_type: parameters.clone(), + optional: false, }, doc_ref: RefRule::NotSpecified, reply: ReplyRule::NotSpecified, @@ -69,11 +61,23 @@ fn document_rules_init() -> HashMap { kid: SignatureKidRule { exp: &[RoleId::Proposer], }, - }; - - document_rules_map.insert(PROPOSAL_DOCUMENT_UUID_TYPE, proposal_document_rules); + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Template, + }, + } +} - let comment_document_rules = Rules { +/// Proposal Comment +/// Require field: type, id, ver, ref, template, parameters +/// +fn proposal_comment_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -82,23 +86,42 @@ fn document_rules_init() -> HashMap { optional: false, }, content: ContentRule::Templated { - exp_template_type: expect_uuidv4(COMMENT_TEMPLATE_UUID_TYPE), + exp_template_type: PROPOSAL_COMMENT_FORM_TEMPLATE.clone(), }, doc_ref: RefRule::Specified { - exp_ref_type: expect_uuidv4(PROPOSAL_DOCUMENT_UUID_TYPE), + exp_ref_type: PROPOSAL.clone(), optional: false, }, reply: ReplyRule::Specified { - exp_reply_type: expect_uuidv4(COMMENT_DOCUMENT_UUID_TYPE), + exp_reply_type: PROPOSAL_COMMENT.clone(), optional: true, }, - section: SectionRule::Specified { optional: true }, - parameters: ParametersRule::NotSpecified, + section: SectionRule::NotSpecified, + parameters: ParametersRule::Specified { + exp_parameters_type: parameters.clone(), + optional: false, + }, kid: SignatureKidRule { exp: &[RoleId::Role0], }, - }; - document_rules_map.insert(COMMENT_DOCUMENT_UUID_TYPE, comment_document_rules); + // Link field can be either template or ref + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Template, + }, + } +} + +/// Proposal Submission Action +/// Require fields: type, id, ver, ref, parameters +/// +#[allow(clippy::expect_used)] +fn proposal_submission_action_rule() -> Rules { + // Parameter can be either brand, campaign or category + let parameters = vec![ + BRAND_PARAMETERS.clone(), + CAMPAIGN_PARAMETERS.clone(), + CATEGORY_PARAMETERS.clone(), + ]; let proposal_action_json_schema = jsonschema::options() .with_draft(jsonschema::Draft::Draft7) @@ -109,7 +132,7 @@ fn document_rules_init() -> HashMap { .expect("Must be a valid json file"), ) .expect("Must be a valid json scheme file"); - let proposal_submission_action_rules = Rules { + Rules { content_type: ContentTypeRule { exp: ContentType::Json, }, @@ -119,11 +142,11 @@ fn document_rules_init() -> HashMap { }, content: ContentRule::Static(ContentSchema::Json(proposal_action_json_schema)), parameters: ParametersRule::Specified { - exp_parameters_type: expect_uuidv4(CATEGORY_DOCUMENT_UUID_TYPE), - optional: true, + exp_parameters_type: parameters, + optional: false, }, doc_ref: RefRule::Specified { - exp_ref_type: expect_uuidv4(PROPOSAL_DOCUMENT_UUID_TYPE), + exp_ref_type: PROPOSAL.clone(), optional: false, }, reply: ReplyRule::NotSpecified, @@ -131,11 +154,25 @@ fn document_rules_init() -> HashMap { kid: SignatureKidRule { exp: &[RoleId::Proposer], }, - }; + param_link_ref: ParameterLinkRefRule::Specified { + field: LinkField::Ref, + }, + } +} + +/// `DOCUMENT_RULES` initialization function +fn document_rules_init() -> HashMap> { + let mut document_rules_map = HashMap::new(); + + let proposal_rules = Arc::new(proposal_rule()); + let comment_rules = Arc::new(proposal_comment_rule()); + let action_rules = Arc::new(proposal_submission_action_rule()); + document_rules_map.insert(PROPOSAL.clone(), Arc::clone(&proposal_rules)); + document_rules_map.insert(PROPOSAL_COMMENT.clone(), Arc::clone(&comment_rules)); document_rules_map.insert( - PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - proposal_submission_action_rules, + PROPOSAL_SUBMISSION_ACTION.clone(), + Arc::clone(&action_rules), ); document_rules_map @@ -164,7 +201,7 @@ where Provider: CatalystSignedDocumentProvider { return Ok(false); } - let Some(rules) = DOCUMENT_RULES.get(&doc_type.uuid()) else { + let Some(rules) = DOCUMENT_RULES.get(doc_type) else { doc.report().invalid_value( "`type`", &doc.doc_type()?.to_string(), @@ -281,14 +318,6 @@ where Provider: CatalystSignedDocumentProvider { pub async fn validate_signatures( doc: &CatalystSignedDocument, provider: &impl VerifyingKeyProvider, ) -> anyhow::Result { - let Ok(cose_sign) = doc.as_cose_sign() else { - doc.report().other( - "Cannot build a COSE sign object", - "During encoding signed document as COSE SIGN", - ); - return Ok(false); - }; - if doc.signatures().is_empty() { doc.report().other( "Catalyst Signed Document is unsigned", @@ -299,10 +328,8 @@ pub async fn validate_signatures( let sign_rules = doc .signatures() - .cose_signatures_with_kids() - .map(|(signature, kid)| { - validate_signature(&cose_sign, signature, kid, provider, doc.report()) - }); + .iter() + .map(|sign| validate_signature(doc, sign, provider, doc.report())); let res = futures::future::join_all(sign_rules) .await @@ -316,12 +343,11 @@ pub async fn validate_signatures( /// A single signature validation function async fn validate_signature( - cose_sign: &CoseSign, signature: &CoseSignature, kid: &CatalystId, provider: &Provider, - report: &ProblemReport, + doc: &CatalystSignedDocument, sign: &Signature, provider: &Provider, report: &ProblemReport, ) -> anyhow::Result -where - Provider: VerifyingKeyProvider, -{ +where Provider: VerifyingKeyProvider { + let kid = sign.kid(); + let Some(pk) = provider.try_get_key(kid).await? else { report.other( &format!("Missing public key for {kid}."), @@ -330,11 +356,12 @@ where return Ok(false); }; - let tbs_data = cose_sign.tbs_data(&[], signature); - let Ok(signature_bytes) = signature.signature.as_slice().try_into() else { + let tbs_data = tbs_data(kid, doc.doc_meta(), doc.content()).context("Probably a bug, cannot build CBOR COSE bytes for signature verification from the structurally valid COSE object.")?; + + let Ok(signature_bytes) = sign.signature().try_into() else { report.invalid_value( "cose signature", - &format!("{}", signature.signature.len()), + &format!("{}", sign.signature().len()), &format!("must be {}", ed25519_dalek::Signature::BYTE_SIZE), "During encoding cose signature to bytes", ); @@ -360,9 +387,11 @@ mod tests { use uuid::{Timestamp, Uuid}; use crate::{ + builder::tests::Builder, + metadata::SupportedField, providers::{tests::TestCatalystSignedDocumentProvider, CatalystSignedDocumentProvider}, validator::{document_rules_init, validate_id_and_ver}, - Builder, UuidV7, + UuidV7, }; #[test] @@ -375,25 +404,23 @@ mod tests { let uuid_v7 = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(uuid_v7)) + .with_metadata_field(SupportedField::Ver(uuid_v7)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); assert!(is_valid); - let ver = Uuid::new_v7(Timestamp::from_unix_time(now - 1, 0, 0, 0)); - let id = Uuid::new_v7(Timestamp::from_unix_time(now + 1, 0, 0, 0)); + let ver = Uuid::new_v7(Timestamp::from_unix_time(now - 1, 0, 0, 0)) + .try_into() + .unwrap(); + let id = Uuid::new_v7(Timestamp::from_unix_time(now + 1, 0, 0, 0)) + .try_into() + .unwrap(); assert!(ver < id); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": id.to_string(), - "ver": ver.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(id)) + .with_metadata_field(SupportedField::Ver(ver)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); @@ -404,13 +431,12 @@ mod tests { 0, 0, 0, - )); + )) + .try_into() + .unwrap(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": to_far_in_past.to_string(), - "ver": to_far_in_past.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(to_far_in_past)) + .with_metadata_field(SupportedField::Ver(to_far_in_past)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); @@ -421,13 +447,12 @@ mod tests { 0, 0, 0, - )); + )) + .try_into() + .unwrap(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": to_far_in_future.to_string(), - "ver": to_far_in_future.to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Id(to_far_in_future)) + .with_metadata_field(SupportedField::Ver(to_far_in_future)) .build(); let is_valid = validate_id_and_ver(&doc, &provider).unwrap(); diff --git a/rust/signed_doc/src/validator/rules/content_encoding.rs b/rust/signed_doc/src/validator/rules/content_encoding.rs index f75bcf81e3e..4860ae71061 100644 --- a/rust/signed_doc/src/validator/rules/content_encoding.rs +++ b/rust/signed_doc/src/validator/rules/content_encoding.rs @@ -24,6 +24,17 @@ impl ContentEncodingRule { ); return Ok(false); } + if content_encoding.decode(doc.encoded_content()).is_err() { + doc.report().invalid_value( + "payload", + &hex::encode(doc.encoded_content()), + &format!( + "Document content (payload) must decodable by the set content encoding type: {content_encoding}" + ), + "Invalid Document content value", + ); + return Ok(false); + } } else if !self.optional { doc.report().missing_field( "content-encoding", @@ -38,7 +49,7 @@ impl ContentEncodingRule { #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] async fn content_encoding_rule_test() { @@ -50,17 +61,18 @@ mod tests { }; let doc = Builder::new() - .with_json_metadata( - serde_json::json!({"content-encoding": content_encoding.to_string() }), - ) - .unwrap() + .with_metadata_field(SupportedField::ContentEncoding(content_encoding)) + .with_content(content_encoding.encode(&[1, 2, 3]).unwrap()) .build(); assert!(rule.check(&doc).await.unwrap()); + // empty content (empty bytes) could not be brotli decoded let doc = Builder::new() - .with_json_metadata(serde_json::json!({})) - .unwrap() + .with_metadata_field(SupportedField::ContentEncoding(content_encoding)) .build(); + assert!(!rule.check(&doc).await.unwrap()); + + let doc = Builder::new().build(); assert!(rule.check(&doc).await.unwrap()); rule.optional = false; diff --git a/rust/signed_doc/src/validator/rules/content_type.rs b/rust/signed_doc/src/validator/rules/content_type.rs index 26aa702fa7c..3386f8b9344 100644 --- a/rust/signed_doc/src/validator/rules/content_type.rs +++ b/rust/signed_doc/src/validator/rules/content_type.rs @@ -29,14 +29,14 @@ impl ContentTypeRule { ); return Ok(false); } - let Ok(content) = doc.doc_content().decoded_bytes() else { - doc.report().missing_field( - "payload", + let Ok(content) = doc.decoded_content() else { + doc.report().functional_validation( + "Invalid Document content, cannot get decoded bytes", "Cannot get a document content during the content type field validation", ); return Ok(false); }; - if content_type.validate(content).is_err() { + if self.validate(&content).is_err() { doc.report().invalid_value( "payload", &hex::encode(content), @@ -48,47 +48,145 @@ impl ContentTypeRule { Ok(true) } + + /// Validates the provided `content` bytes to be a defined `ContentType`. + fn validate(&self, content: &[u8]) -> anyhow::Result<()> { + match self.exp { + ContentType::Json => { + if let Err(e) = serde_json::from_slice::<&serde_json::value::RawValue>(content) { + anyhow::bail!("Invalid {} content: {e}", self.exp) + } + }, + ContentType::Cddl => { + // TODO: not implemented yet + anyhow::bail!("`application/cddl` is valid but unavailable yet") + }, + ContentType::Cbor => { + let mut decoder = minicbor::Decoder::new(content); + + decoder.skip()?; + + if decoder.position() != content.len() { + anyhow::bail!("Unused bytes remain in the input after decoding") + } + }, + ContentType::JsonSchema => { + // TODO: not implemented yet + anyhow::bail!("`application/json+schema` is valid but unavailable yet") + }, + } + Ok(()) + } } #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] - async fn content_type_rule_test() { - let content_type = ContentType::Json; + async fn cbor_with_trailing_bytes_test() { + // valid cbor: {1: 2} but with trailing 0xff + let mut buf = Vec::new(); + let mut enc = minicbor::Encoder::new(&mut buf); + enc.map(1).unwrap().u8(1).unwrap().u8(2).unwrap(); + buf.push(0xFF); // extra byte + + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(buf) + .build(); + + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn malformed_cbor_bytes_test() { + // 0xa2 means a map with 2 key-value pairs, but we only give 1 key + let invalid_bytes = &[0xA2, 0x01]; + + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(invalid_bytes.into()) + .build(); - let rule = ContentTypeRule { exp: content_type }; + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn content_type_cbor_rule_test() { + let cbor_rule = ContentTypeRule { + exp: ContentType::Cbor, + }; + + // with json bytes + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + + // with cbor bytes + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .with_content(minicbor::to_vec(minicbor::data::Token::Null).unwrap()) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(true))); + + // without content + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + + // with empty content + let doc = Builder::new() + .with_metadata_field(SupportedField::ContentType(cbor_rule.exp)) + .build(); + assert!(matches!(cbor_rule.check(&doc).await, Ok(false))); + } + + #[tokio::test] + async fn content_type_json_rule_test() { + let json_rule = ContentTypeRule { + exp: ContentType::Json, + }; + // with json bytes let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() - .with_decoded_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) + .with_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) .build(); - assert!(rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(true))); + // with cbor bytes let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": ContentType::Cbor.to_string() })) - .unwrap() - .with_decoded_content(serde_json::to_vec(&serde_json::json!({})).unwrap()) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) + .with_content(minicbor::to_vec(minicbor::data::Token::Null).unwrap()) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); + // without content let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); + // with empty content let doc = Builder::new() - .with_json_metadata(serde_json::json!({"content-type": content_type.to_string() })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::ContentType(json_rule.exp)) .build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); let doc = Builder::new().build(); - assert!(!rule.check(&doc).await.unwrap()); + assert!(matches!(json_rule.check(&doc).await, Ok(false))); } } diff --git a/rust/signed_doc/src/validator/rules/doc_ref.rs b/rust/signed_doc/src/validator/rules/doc_ref.rs index 53fec6825fe..a3ad0f59327 100644 --- a/rust/signed_doc/src/validator/rules/doc_ref.rs +++ b/rust/signed_doc/src/validator/rules/doc_ref.rs @@ -1,13 +1,10 @@ //! `ref` rule type impl. -use catalyst_types::{ - problem_report::ProblemReport, - uuid::{Uuid, UuidV4}, -}; +use catalyst_types::problem_report::ProblemReport; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `ref` field validation rule @@ -16,7 +13,7 @@ pub(crate) enum RefRule { /// Is 'ref' specified Specified { /// expected `type` field of the referenced doc - exp_ref_type: UuidV4, + exp_ref_type: DocType, /// optional flag for the `ref` field optional: bool, }, @@ -29,6 +26,7 @@ impl RefRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Ref rule check"; if let Self::Specified { exp_ref_type, optional, @@ -36,13 +34,12 @@ impl RefRule { { if let Some(doc_ref) = doc.doc_meta().doc_ref() { let ref_validator = |ref_doc: CatalystSignedDocument| { - referenced_doc_check(&ref_doc, exp_ref_type.uuid(), "ref", doc.report()) + referenced_doc_check(&ref_doc, exp_ref_type, "ref", doc.report()) }; - return validate_provided_doc(&doc_ref, provider, doc.report(), ref_validator) - .await; + return validate_doc_refs(doc_ref, provider, doc.report(), ref_validator).await; } else if !optional { doc.report() - .missing_field("ref", "Document must have a ref field"); + .missing_field("ref", &format!("{context}, document must have ref field")); return Ok(false); } } @@ -51,7 +48,7 @@ impl RefRule { doc.report().unknown_field( "ref", &doc_ref.to_string(), - "Document does not expect to have a ref field", + &format!("{context}, document does not expect to have a ref field"), ); return Ok(false); } @@ -63,17 +60,19 @@ impl RefRule { /// A generic implementation of the referenced document validation. pub(crate) fn referenced_doc_check( - ref_doc: &CatalystSignedDocument, exp_ref_type: Uuid, field_name: &str, report: &ProblemReport, + ref_doc: &CatalystSignedDocument, exp_ref_type: &DocType, field_name: &str, + report: &ProblemReport, ) -> bool { let Ok(ref_doc_type) = ref_doc.doc_type() else { report.missing_field("type", "Referenced document must have type field"); return false; }; - if ref_doc_type.uuid() != exp_ref_type { + + if ref_doc_type != exp_ref_type { report.invalid_value( field_name, - ref_doc_type.to_string().as_str(), - exp_ref_type.to_string().as_str(), + &ref_doc_type.to_string(), + &exp_ref_type.to_string(), "Invalid referenced document type", ); return false; @@ -82,11 +81,15 @@ pub(crate) fn referenced_doc_check( } #[cfg(test)] +#[allow(clippy::similar_names, clippy::too_many_lines)] mod tests { - use catalyst_types::uuid::UuidV7; + use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[tokio::test] async fn ref_rule_specified_test() { @@ -96,98 +99,152 @@ mod tests { let valid_referenced_doc_id = UuidV7::new(); let valid_referenced_doc_ver = UuidV7::new(); + let different_id_and_ver_referenced_doc_id = UuidV7::new(); + let different_id_and_ver_referenced_doc_ver = UuidV7::new(); let another_type_referenced_doc_id = UuidV7::new(); let another_type_referenced_doc_ver = UuidV7::new(); let missing_type_referenced_doc_id = UuidV7::new(); let missing_type_referenced_doc_ver = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_referenced_doc_id.to_string(), - "ver": valid_referenced_doc_ver.to_string(), - "type": exp_ref_type.to_string() - })) - .unwrap() + // Valid one + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_referenced_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_ref_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_referenced_doc_id.to_string(), - "ver": another_type_referenced_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Having different id and ver in registered reference + let doc_ref = DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()); + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(different_id_and_ver_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(different_id_and_ver_referenced_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_ref_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_referenced_doc_id.to_string(), - "ver": missing_type_referenced_doc_ver.to_string(), - })) - .unwrap() + provider.add_document(Some(doc_ref), &doc).unwrap(); + + // Having another `type` field + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); + + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_referenced_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_referenced_doc_ver)) + .build(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `ref` field is required and referencing a valid document in + // provider. Using doc ref of new implementation. let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": valid_referenced_doc_id.to_string(), "ver": valid_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + valid_referenced_doc_id, + valid_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // all correct, `ref` field is missing, but its optional + // Having multiple refs, where one ref doc is not found. + // Checking match all of + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![ + DocumentRef::new( + valid_referenced_doc_id, + valid_referenced_doc_ver, + DocLocator::default(), + ), + DocumentRef::new( + different_id_and_ver_referenced_doc_id, + different_id_and_ver_referenced_doc_ver, + DocLocator::default(), + ), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // Invalid the ref doc id and ver doesn't match the id and ver in ref doc ref + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + different_id_and_ver_referenced_doc_id, + different_id_and_ver_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // All correct, `ref` field is missing, but its optional let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: true, }; let doc = Builder::new().build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // missing `ref` field, but its required + // Missing `ref` field, but its required let rule = RefRule::Specified { - exp_ref_type, + exp_ref_type: exp_ref_type.into(), optional: false, }; let doc = Builder::new().build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // reference to the document with another `type` field + // Reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": another_type_referenced_doc_id.to_string(), "ver": another_type_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + another_type_referenced_doc_id, + another_type_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // missing `type` field in the referenced document + // Missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": missing_type_referenced_doc_id.to_string(), "ver": missing_type_referenced_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + missing_type_referenced_doc_id, + missing_type_referenced_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -203,8 +260,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"ref": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/mod.rs b/rust/signed_doc/src/validator/rules/mod.rs index 165dcb043ac..47fafa5cb79 100644 --- a/rust/signed_doc/src/validator/rules/mod.rs +++ b/rust/signed_doc/src/validator/rules/mod.rs @@ -8,6 +8,7 @@ use crate::{providers::CatalystSignedDocumentProvider, CatalystSignedDocument}; mod content_encoding; mod content_type; mod doc_ref; +mod param_link_ref; mod parameters; mod reply; mod section; @@ -17,6 +18,7 @@ mod template; pub(crate) use content_encoding::ContentEncodingRule; pub(crate) use content_type::ContentTypeRule; pub(crate) use doc_ref::RefRule; +pub(crate) use param_link_ref::{LinkField, ParameterLinkRefRule}; pub(crate) use parameters::ParametersRule; pub(crate) use reply::ReplyRule; pub(crate) use section::SectionRule; @@ -41,6 +43,8 @@ pub(crate) struct Rules { pub(crate) parameters: ParametersRule, /// `kid` field validation rule pub(crate) kid: SignatureKidRule, + /// Link reference rule + pub(crate) param_link_ref: ParameterLinkRefRule, } impl Rules { @@ -52,12 +56,13 @@ impl Rules { let rules = [ self.content_type.check(doc).boxed(), self.content_encoding.check(doc).boxed(), - self.doc_ref.check(doc, provider).boxed(), self.content.check(doc, provider).boxed(), + self.doc_ref.check(doc, provider).boxed(), self.reply.check(doc, provider).boxed(), self.section.check(doc).boxed(), self.parameters.check(doc, provider).boxed(), self.kid.check(doc).boxed(), + self.param_link_ref.check(doc, provider).boxed(), ]; let res = futures::future::join_all(rules) diff --git a/rust/signed_doc/src/validator/rules/param_link_ref.rs b/rust/signed_doc/src/validator/rules/param_link_ref.rs new file mode 100644 index 00000000000..8c9206d6173 --- /dev/null +++ b/rust/signed_doc/src/validator/rules/param_link_ref.rs @@ -0,0 +1,187 @@ +//! Parameter linked reference rule impl. + +use crate::{ + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, +}; + +/// Filed that is being used for linked ref +pub(crate) enum LinkField { + /// Ref field + Ref, + /// Template field + Template, +} + +/// Parameter Link reference validation rule +pub(crate) enum ParameterLinkRefRule { + /// Link ref specified + Specified { + /// Filed that is being used for linked ref + field: LinkField, + }, + /// Link ref is not specified + #[allow(dead_code)] + NotSpecified, +} + +impl ParameterLinkRefRule { + /// Validation rule + pub(crate) async fn check( + &self, doc: &CatalystSignedDocument, provider: &Provider, + ) -> anyhow::Result + where Provider: CatalystSignedDocumentProvider { + let context: &str = "Parameter link ref rule check"; + if let Self::Specified { field } = self { + let param_link_ref_validator = |ref_doc: CatalystSignedDocument| { + // The parameters MUST be the same, if not record the error + if doc.doc_meta().parameters() != ref_doc.doc_meta().parameters() { + doc.report().invalid_value( + "parameters", + &format!("Reference doc param: {:?}", ref_doc.doc_meta().parameters()), + &format!("Doc param: {:?}", doc.doc_meta().parameters()), + &format!("{context}, parameters must be the same"), + ); + return false; + } + true + }; + // Which field is use for linked reference + let param_link_ref = match field { + LinkField::Ref => doc.doc_meta().doc_ref(), + LinkField::Template => doc.doc_meta().template(), + }; + + let Some(param_link_ref) = param_link_ref else { + doc.report() + .missing_field("Link ref", &format!("{context}: Invalid link reference")); + return Ok(false); + }; + + return validate_doc_refs( + param_link_ref, + provider, + doc.report(), + param_link_ref_validator, + ) + .await; + } + Ok(true) + } +} + +#[cfg(test)] +mod tests { + use catalyst_types::uuid::{UuidV4, UuidV7}; + + use crate::{ + builder::tests::Builder, + metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, + validator::rules::param_link_ref::{LinkField, ParameterLinkRefRule}, + DocLocator, DocumentRef, + }; + #[tokio::test] + async fn param_link_ref_specified_test() { + let mut provider = TestCatalystSignedDocumentProvider::default(); + + let doc1_id = UuidV7::new(); + let doc1_ver = UuidV7::new(); + let doc2_id = UuidV7::new(); + let doc2_ver = UuidV7::new(); + + let doc_type = UuidV4::new(); + + let category_id = UuidV7::new(); + let category_ver = UuidV7::new(); + let category_type = UuidV4::new(); + + let campaign_id = UuidV7::new(); + let campaign_ver = UuidV7::new(); + let campaign_type = UuidV4::new(); + + // Prepare provider documents + { + // Doc being referenced - parameter MUST match + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(doc1_id)) + .with_metadata_field(SupportedField::Ver(doc1_ver)) + .with_metadata_field(SupportedField::Type(doc_type.into())) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Doc being referenced - parameter does not match + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(doc2_id)) + .with_metadata_field(SupportedField::Ver(doc2_ver)) + .with_metadata_field(SupportedField::Type(doc_type.into())) + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + campaign_id, + campaign_ver, + DocLocator::default(), + )] + .into(), + )) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Category doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(category_id)) + .with_metadata_field(SupportedField::Ver(category_ver)) + .with_metadata_field(SupportedField::Type(category_type.into())) + .build(); + provider.add_document(None, &doc).unwrap(); + + // Campaign doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(campaign_id)) + .with_metadata_field(SupportedField::Ver(campaign_ver)) + .with_metadata_field(SupportedField::Type(campaign_type.into())) + .build(); + provider.add_document(None, &doc).unwrap(); + } + + // Use Ref as a linked reference + let rule = ParameterLinkRefRule::Specified { + field: LinkField::Ref, + }; + // Parameter must match + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(doc1_id, doc1_ver, DocLocator::default())].into(), + )) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(rule.check(&doc, &provider).await.unwrap()); + + // Parameter does not match + let doc = Builder::new() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new(doc2_id, doc2_ver, DocLocator::default())].into(), + )) + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new(category_id, category_ver, DocLocator::default()), + DocumentRef::new(campaign_id, campaign_ver, DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + } +} diff --git a/rust/signed_doc/src/validator/rules/parameters.rs b/rust/signed_doc/src/validator/rules/parameters.rs index 290d158439d..01aadfdbb5b 100644 --- a/rust/signed_doc/src/validator/rules/parameters.rs +++ b/rust/signed_doc/src/validator/rules/parameters.rs @@ -1,11 +1,8 @@ //! `parameters` rule type impl. -use catalyst_types::uuid::UuidV4; - -use super::doc_ref::referenced_doc_check; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `parameters` field validation rule @@ -14,11 +11,12 @@ pub(crate) enum ParametersRule { /// Is `parameters` specified Specified { /// expected `type` field of the parameter doc - exp_parameters_type: UuidV4, + exp_parameters_type: Vec, /// optional flag for the `parameters` field optional: bool, }, /// `parameters` is not specified + #[allow(unused)] NotSpecified, } @@ -28,30 +26,51 @@ impl ParametersRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Parameter rule check"; if let Self::Specified { exp_parameters_type, optional, } = self { - if let Some(parameters) = doc.doc_meta().parameters() { - let parameters_validator = |replied_doc: CatalystSignedDocument| { - referenced_doc_check( - &replied_doc, - exp_parameters_type.uuid(), - "parameters", - doc.report(), - ) + if let Some(parameters_ref) = doc.doc_meta().parameters() { + let parameters_validator = |ref_doc: CatalystSignedDocument| { + let Ok(ref_doc_type) = ref_doc.doc_type() else { + doc.report().missing_field( + "type", + &format!("{context}, Referenced document must have type field"), + ); + return false; + }; + + // Check that the type matches one of the expected ones + if exp_parameters_type + .iter() + .all(|exp_type| ref_doc_type != exp_type) + { + doc.report().invalid_value( + "parameters", + &ref_doc_type.to_string(), + &exp_parameters_type + .iter() + .fold(String::new(), |s, v| format!("{s}, {v}")), + &format!("{context}, Invalid referenced document type"), + ); + return false; + } + true }; - return validate_provided_doc( - ¶meters, + return validate_doc_refs( + parameters_ref, provider, doc.report(), parameters_validator, ) .await; } else if !optional { - doc.report() - .missing_field("parameters", "Document must have a parameters field"); + doc.report().missing_field( + "parameters", + &format!("{context}, document must have parameters field"), + ); return Ok(false); } } @@ -60,7 +79,7 @@ impl ParametersRule { doc.report().unknown_field( "parameters", ¶meters.to_string(), - "Document does not expect to have a parameters field", + &format!("{context}, document does not expect to have a parameters field"), ); return Ok(false); } @@ -71,112 +90,179 @@ impl ParametersRule { } #[cfg(test)] +#[allow(clippy::similar_names, clippy::too_many_lines)] mod tests { use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[tokio::test] async fn ref_rule_specified_test() { let mut provider = TestCatalystSignedDocumentProvider::default(); - let exp_parameters_type = UuidV4::new(); + let exp_parameters_cat_type = UuidV4::new(); + let exp_parameters_cam_type = UuidV4::new(); + let exp_parameters_brand_type = UuidV4::new(); + + let exp_param_type: Vec = vec![ + exp_parameters_cat_type.into(), + exp_parameters_cam_type.into(), + exp_parameters_brand_type.into(), + ]; let valid_category_doc_id = UuidV7::new(); let valid_category_doc_ver = UuidV7::new(); + let valid_brand_doc_id = UuidV7::new(); + let valid_brand_doc_ver = UuidV7::new(); let another_type_category_doc_id = UuidV7::new(); let another_type_category_doc_ver = UuidV7::new(); let missing_type_category_doc_id = UuidV7::new(); let missing_type_category_doc_ver = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_category_doc_id.to_string(), - "ver": valid_category_doc_ver.to_string(), - "type": exp_parameters_type.to_string() - })) - .unwrap() + // Category doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_category_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_category_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_parameters_cat_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_category_doc_id.to_string(), - "ver": another_type_category_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Brand doc + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_brand_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_brand_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_parameters_cat_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); - - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_category_doc_id.to_string(), - "ver": missing_type_category_doc_ver.to_string(), - })) - .unwrap() + provider.add_document(None, &doc).unwrap(); + + // Other type + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_category_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_category_doc_ver)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); + + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_category_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_category_doc_ver)) + .build(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `parameters` field is required and referencing a valid document + // in provider. Using doc ref of new implementation. let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type.clone(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": valid_category_doc_id.to_string(), "ver": valid_category_doc_ver } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // all correct, `parameters` field is missing, but its optional + // Parameters contain multiple ref + let doc = Builder::new() + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + ), + DocumentRef::new( + valid_brand_doc_id, + valid_brand_doc_ver, + DocLocator::default(), + ), + ] + .into(), + )) + .build(); + assert!(rule.check(&doc, &provider).await.unwrap()); + + // Parameters contain multiple ref, but one of them is invalid (not registered). + let doc = Builder::new() + .with_metadata_field(SupportedField::Parameters( + vec![ + DocumentRef::new( + valid_category_doc_id, + valid_category_doc_ver, + DocLocator::default(), + ), + DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()), + ] + .into(), + )) + .build(); + assert!(!rule.check(&doc, &provider).await.unwrap()); + + // All correct, `parameters` field is missing, but its optional let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type.clone(), optional: true, }; let doc = Builder::new().build(); assert!(rule.check(&doc, &provider).await.unwrap()); - // missing `parameters` field, but its required + // Missing `parameters` field, but its required let rule = ParametersRule::Specified { - exp_parameters_type, + exp_parameters_type: exp_param_type, optional: false, }; let doc = Builder::new().build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // reference to the document with another `type` field + // Reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": another_type_category_doc_id.to_string(), "ver": another_type_category_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + another_type_category_doc_id, + another_type_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // missing `type` field in the referenced document + // Missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": missing_type_category_doc_id.to_string(), "ver": missing_type_category_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + missing_type_category_doc_id, + missing_type_category_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); - // cannot find a referenced document + // Cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "parameters": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -192,8 +278,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"parameters": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Parameters( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/reply.rs b/rust/signed_doc/src/validator/rules/reply.rs index 5ac256667db..43696f2adcd 100644 --- a/rust/signed_doc/src/validator/rules/reply.rs +++ b/rust/signed_doc/src/validator/rules/reply.rs @@ -1,11 +1,9 @@ //! `reply` rule type impl. -use catalyst_types::uuid::UuidV4; - use super::doc_ref::referenced_doc_check; use crate::{ - providers::CatalystSignedDocumentProvider, validator::utils::validate_provided_doc, - CatalystSignedDocument, + providers::CatalystSignedDocumentProvider, validator::utils::validate_doc_refs, + CatalystSignedDocument, DocType, }; /// `reply` field validation rule @@ -14,7 +12,7 @@ pub(crate) enum ReplyRule { /// Is 'reply' specified Specified { /// expected `type` field of the replied doc - exp_reply_type: UuidV4, + exp_reply_type: DocType, /// optional flag for the `ref` field optional: bool, }, @@ -28,50 +26,50 @@ impl ReplyRule { &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context: &str = "Reply rule check"; if let Self::Specified { exp_reply_type, optional, } = self { - if let Some(reply) = doc.doc_meta().reply() { - let reply_validator = |replied_doc: CatalystSignedDocument| { - if !referenced_doc_check( - &replied_doc, - exp_reply_type.uuid(), - "reply", - doc.report(), - ) { + if let Some(reply_ref) = doc.doc_meta().reply() { + let reply_validator = |ref_doc: CatalystSignedDocument| { + // Validate type + if !referenced_doc_check(&ref_doc, exp_reply_type, "reply", doc.report()) { return false; } - let Some(doc_ref) = doc.doc_meta().doc_ref() else { + + // Get `ref` from both the doc and the ref doc + let Some(ref_doc_dr) = ref_doc.doc_meta().doc_ref() else { doc.report() - .missing_field("ref", "Document must have a ref field"); + .missing_field("Referenced doc `ref` field", context); return false; }; - let Some(replied_doc_ref) = replied_doc.doc_meta().doc_ref() else { - doc.report() - .missing_field("ref", "Referenced document must have ref field"); + let Some(doc_dr) = doc.doc_meta().doc_ref() else { + doc.report().missing_field("Document `ref` field", context); return false; }; - if replied_doc_ref.id != doc_ref.id { + // Checking the ref field of ref doc, it should match the ref field of the doc + // If not record the error + if ref_doc_dr != doc_dr { doc.report().invalid_value( - "reply", - doc_ref.id .to_string().as_str(), - replied_doc_ref.id.to_string().as_str(), - "Invalid referenced document. Document ID should aligned with the replied document.", - ); + "ref", + &format!("Reference doc ref: {ref_doc_dr}"), + &format!("Doc ref: {doc_dr}"), + &format!("{context}, ref must be the same"), + ); return false; } - true }; - return validate_provided_doc(&reply, provider, doc.report(), reply_validator) - .await; + return validate_doc_refs(reply_ref, provider, doc.report(), reply_validator).await; } else if !optional { - doc.report() - .missing_field("reply", "Document must have a reply field"); + doc.report().missing_field( + "reply", + &format!("{context}, document must have reply field"), + ); return Ok(false); } } @@ -80,7 +78,7 @@ impl ReplyRule { doc.report().unknown_field( "reply", &reply.to_string(), - "Document does not expect to have a reply field", + &format!("{context}, document does not expect to have a reply field"), ); return Ok(false); } @@ -95,7 +93,10 @@ mod tests { use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[allow(clippy::too_many_lines)] #[tokio::test] @@ -110,76 +111,104 @@ mod tests { let valid_replied_doc_ver = UuidV7::new(); let another_type_replied_doc_ver = UuidV7::new(); let another_type_replied_doc_id = UuidV7::new(); - let missing_ref_replied_doc_ver = UuidV7::new(); let missing_ref_replied_doc_id = UuidV7::new(); + let missing_ref_replied_doc_ver = UuidV7::new(); let missing_type_replied_doc_ver = UuidV7::new(); let missing_type_replied_doc_id = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": valid_replied_doc_id.to_string(), - "ver": valid_replied_doc_ver.to_string(), - "type": exp_reply_type.to_string() - })) - .unwrap() + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_reply_type.into())) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // reply doc with other `type` field - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": another_type_replied_doc_id.to_string(), - "ver": another_type_replied_doc_ver.to_string(), - "type": UuidV4::new().to_string() - })) - .unwrap() + // Reply doc with other `type` field + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(another_type_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // missing `ref` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_ref_replied_doc_id.to_string(), - "ver": missing_ref_replied_doc_ver.to_string(), - "type": exp_reply_type.to_string() - })) - .unwrap() + // Missing `type` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_type_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_replied_doc_ver)) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); - // missing `type` field in the referenced document - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "id": missing_type_replied_doc_id.to_string(), - "ver": missing_type_replied_doc_ver.to_string(), - })) - .unwrap() + // Missing `ref` field in the referenced document + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(missing_ref_replied_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_ref_replied_doc_ver)) + .with_metadata_field(SupportedField::Type(exp_reply_type.into())) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); } - // all correct + // Create a document where `reply` field is required and referencing a valid document in + // provider. let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: false, }; + + // common_ref_id ref reply to valid_replied_doc_id. common_ref_id ref filed should match + // valid_replied_doc_id ref field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); - assert!(rule.check(&doc, &provider).await.unwrap()); + assert!( + rule.check(&doc, &provider).await.unwrap(), + "{:?}", + doc.problem_report() + ); // all correct, `reply` field is missing, but its optional let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: true, }; let doc = Builder::new().build(); @@ -187,73 +216,136 @@ mod tests { // missing `reply` field, but its required let rule = ReplyRule::Specified { - exp_reply_type, + exp_reply_type: exp_reply_type.into(), optional: false, }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - })) - .unwrap() + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `ref` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": another_type_replied_doc_id.to_string(), "ver": another_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + another_type_replied_doc_id, + another_type_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `ref` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": missing_ref_replied_doc_id.to_string(), "ver": missing_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + missing_ref_replied_doc_id, + missing_ref_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": { "id": missing_type_replied_doc_id.to_string(), "ver": missing_type_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + missing_type_replied_doc_id, + missing_type_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // `ref` field does not align with the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() }, - "reply": { "id": valid_replied_doc_id.to_string(), "ver": valid_replied_doc_ver.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + valid_replied_doc_id, + valid_replied_doc_ver, + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "ref": { "id": common_ref_id.to_string(), "ver": common_ref_ver.to_string() }, - "reply": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) + .with_metadata_field(SupportedField::Ref( + vec![DocumentRef::new( + common_ref_id, + common_ref_ver, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -269,8 +361,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"reply": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Reply( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/section.rs b/rust/signed_doc/src/validator/rules/section.rs index 87203534254..46a51cc9c76 100644 --- a/rust/signed_doc/src/validator/rules/section.rs +++ b/rust/signed_doc/src/validator/rules/section.rs @@ -5,6 +5,7 @@ use crate::CatalystSignedDocument; /// `section` field validation rule pub(crate) enum SectionRule { /// Is 'section' specified + #[allow(dead_code)] Specified { /// optional flag for the `section` field optional: bool, @@ -42,15 +43,12 @@ impl SectionRule { #[cfg(test)] mod tests { use super::*; - use crate::Builder; + use crate::{builder::tests::Builder, metadata::SupportedField}; #[tokio::test] async fn section_rule_specified_test() { let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "section": "$".to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Section("$".parse().unwrap())) .build(); let rule = SectionRule::Specified { optional: false }; assert!(rule.check(&doc).await.unwrap()); @@ -72,10 +70,7 @@ mod tests { assert!(rule.check(&doc).await.unwrap()); let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "section": "$".to_string() - })) - .unwrap() + .with_metadata_field(SupportedField::Section("$".parse().unwrap())) .build(); assert!(!rule.check(&doc).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/rules/signature_kid.rs b/rust/signed_doc/src/validator/rules/signature_kid.rs index 2e45517b8e3..60d8f2e1018 100644 --- a/rust/signed_doc/src/validator/rules/signature_kid.rs +++ b/rust/signed_doc/src/validator/rules/signature_kid.rs @@ -47,7 +47,7 @@ mod tests { use ed25519_dalek::ed25519::signature::Signer; use super::*; - use crate::{Builder, ContentType}; + use crate::{builder::tests::Builder, metadata::SupportedField, ContentType}; #[tokio::test] async fn signature_kid_rule_test() { @@ -60,15 +60,12 @@ mod tests { let kid = CatalystId::new("cardano", None, pk).with_role(RoleId::Role0); let doc = Builder::new() - .with_decoded_content(serde_json::to_vec(&serde_json::Value::Null).unwrap()) - .with_json_metadata(serde_json::json!({ - "type": UuidV4::new().to_string(), - "id": UuidV7::new().to_string(), - "ver": UuidV7::new().to_string(), - "content-type": ContentType::Json.to_string(), - })) - .unwrap() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) + .with_metadata_field(SupportedField::Id(UuidV7::new())) + .with_metadata_field(SupportedField::Ver(UuidV7::new())) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::ContentType(ContentType::Json)) + .with_content(vec![1, 2, 3]) + .add_signature(|m| sk.sign(&m).to_vec(), kid) .unwrap() .build(); diff --git a/rust/signed_doc/src/validator/rules/template.rs b/rust/signed_doc/src/validator/rules/template.rs index 0d5b0c9aaab..4253a16b99b 100644 --- a/rust/signed_doc/src/validator/rules/template.rs +++ b/rust/signed_doc/src/validator/rules/template.rs @@ -2,12 +2,10 @@ use std::fmt::Write; -use catalyst_types::uuid::UuidV4; - use super::doc_ref::referenced_doc_check; use crate::{ metadata::ContentType, providers::CatalystSignedDocumentProvider, - validator::utils::validate_provided_doc, CatalystSignedDocument, + validator::utils::validate_doc_refs, CatalystSignedDocument, DocType, }; /// Enum represents different content schemas, against which documents content would be @@ -21,9 +19,10 @@ pub(crate) enum ContentSchema { /// Document's content validation rule pub(crate) enum ContentRule { /// Based on the 'template' field and loaded corresponding template document + #[allow(dead_code)] Templated { /// expected `type` field of the template - exp_template_type: UuidV4, + exp_template_type: DocType, }, /// Statically defined document's content schema. /// `template` field should not been specified @@ -36,56 +35,47 @@ pub(crate) enum ContentRule { impl ContentRule { /// Field validation rule + #[allow(dead_code)] pub(crate) async fn check( &self, doc: &CatalystSignedDocument, provider: &Provider, ) -> anyhow::Result where Provider: CatalystSignedDocumentProvider { + let context = "Content/Template rule check"; if let Self::Templated { exp_template_type } = self { let Some(template_ref) = doc.doc_meta().template() else { doc.report() - .missing_field("template", "Document must have a template field"); + .missing_field("template", &format!("{context}, doc")); return Ok(false); }; - let template_validator = |template_doc: CatalystSignedDocument| { - if !referenced_doc_check( - &template_doc, - exp_template_type.uuid(), - "template", - doc.report(), - ) { + if !referenced_doc_check(&template_doc, exp_template_type, "template", doc.report()) + { return false; } - let Ok(template_content_type) = template_doc.doc_content_type() else { doc.report().missing_field( "content-type", - "Referenced template document must have a content-type field", + &format!("{context}, referenced document must have a content-type field"), ); return false; }; match template_content_type { ContentType::Json => templated_json_schema_check(doc, &template_doc), - ContentType::Cbor => { + ContentType::Cddl | ContentType::Cbor | ContentType::JsonSchema => { // TODO: not implemented yet true }, } }; - return validate_provided_doc( - &template_ref, - provider, - doc.report(), - template_validator, - ) - .await; + return validate_doc_refs(template_ref, provider, doc.report(), template_validator) + .await; } if let Self::Static(content_schema) = self { if let Some(template) = doc.doc_meta().template() { doc.report().unknown_field( "template", &template.to_string(), - "Document does not expect to have a template field", + &format!("{context} Static, Document does not expect to have a template field",) ); return Ok(false); } @@ -97,7 +87,7 @@ impl ContentRule { doc.report().unknown_field( "template", &template.to_string(), - "Document does not expect to have a template field", + &format!("{context} Not Specified, Document does not expect to have a template field",) ); return Ok(false); } @@ -112,14 +102,14 @@ impl ContentRule { fn templated_json_schema_check( doc: &CatalystSignedDocument, template_doc: &CatalystSignedDocument, ) -> bool { - let Ok(template_content) = template_doc.doc_content().decoded_bytes() else { - doc.report().missing_field( - "payload", - "Referenced template document must have a content", + let Ok(template_content) = template_doc.decoded_content() else { + doc.report().functional_validation( + "Invalid document content, cannot get decoded bytes", + "Cannot get a referenced template document content during the templated validation", ); return false; }; - let Ok(template_json_schema) = serde_json::from_slice(template_content) else { + let Ok(template_json_schema) = serde_json::from_slice(&template_content) else { doc.report().functional_validation( "Template document content must be json encoded", "Invalid referenced template document content", @@ -139,15 +129,22 @@ fn templated_json_schema_check( content_schema_check(doc, &ContentSchema::Json(schema_validator)) } - +#[allow(dead_code)] /// Validating the document's content against the provided schema fn content_schema_check(doc: &CatalystSignedDocument, schema: &ContentSchema) -> bool { - let Ok(doc_content) = doc.doc_content().decoded_bytes() else { + let Ok(doc_content) = doc.decoded_content() else { + doc.report().functional_validation( + "Invalid Document content, cannot get decoded bytes", + "Cannot get a document content during the templated validation", + ); + return false; + }; + if doc_content.is_empty() { doc.report() .missing_field("payload", "Document must have a content"); return false; - }; - let Ok(doc_json) = serde_json::from_slice(doc_content) else { + } + let Ok(doc_json) = serde_json::from_slice(&doc_content) else { doc.report().functional_validation( "Document content must be json encoded", "Invalid referenced template document content", @@ -181,10 +178,13 @@ fn content_schema_check(doc: &CatalystSignedDocument, schema: &ContentSchema) -> #[cfg(test)] mod tests { - use catalyst_types::uuid::UuidV7; + use catalyst_types::uuid::{UuidV4, UuidV7}; use super::*; - use crate::{providers::tests::TestCatalystSignedDocumentProvider, Builder}; + use crate::{ + builder::tests::Builder, metadata::SupportedField, + providers::tests::TestCatalystSignedDocumentProvider, DocLocator, DocumentRef, + }; #[allow(clippy::too_many_lines)] #[tokio::test] @@ -203,181 +203,204 @@ mod tests { let missing_content_template_doc_id = UuidV7::new(); let invalid_content_template_doc_id = UuidV7::new(); - // prepare replied documents + // Prepare provider documents { - let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": valid_template_doc_id.to_string(), - "ver": valid_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + let doc = Builder::new() + .with_metadata_field(SupportedField::Id(valid_template_doc_id)) + .with_metadata_field(SupportedField::Ver(valid_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &doc).unwrap(); // reply doc with other `type` field let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": another_type_template_doc_id.to_string(), - "ver": another_type_template_doc_id.to_string(), - "type": UuidV4::new().to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(another_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(another_type_template_doc_id)) + .with_metadata_field(SupportedField::Type(UuidV4::new().into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing `type` field in the referenced document let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_type_template_doc_id.to_string(), - "ver": missing_type_template_doc_id.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(missing_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_type_template_doc_id)) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing `content-type` field in the referenced document let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_content_type_template_doc_id.to_string(), - "ver": missing_content_type_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - })) - .unwrap() - .with_decoded_content(json_schema.clone()) + .with_metadata_field(SupportedField::Id(missing_content_type_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_content_type_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_content(json_schema.clone()) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // missing content let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": missing_content_template_doc_id.to_string(), - "ver": missing_content_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() + .with_metadata_field(SupportedField::Id(missing_content_template_doc_id)) + .with_metadata_field(SupportedField::Ver(missing_content_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); // invalid content, must be json encoded let ref_doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "id": invalid_content_template_doc_id.to_string(), - "ver": invalid_content_template_doc_id.to_string(), - "type": exp_template_type.to_string(), - "content-type": content_type.to_string(), - })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::Id(invalid_content_template_doc_id)) + .with_metadata_field(SupportedField::Ver(invalid_content_template_doc_id)) + .with_metadata_field(SupportedField::Type(exp_template_type.into())) + .with_metadata_field(SupportedField::ContentType(content_type)) + .with_content(vec![]) .build(); - provider.add_document(ref_doc).unwrap(); + provider.add_document(None, &ref_doc).unwrap(); } - // all correct - let rule = ContentRule::Templated { exp_template_type }; + // Create a document where `templates` field is required and referencing a valid document + // in provider. Using doc ref of new implementation. + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(rule.check(&doc, &provider).await.unwrap()); // missing `template` field, but its required - let doc = Builder::new() - .with_json_metadata(serde_json::json!({})) - .unwrap() - .with_decoded_content(json_content.clone()) - .build(); + let doc = Builder::new().with_content(json_content.clone()).build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing content - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": valid_template_doc_id.to_string(), "ver": valid_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(vec![]) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + valid_template_doc_id, + valid_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(vec![1, 2, 3]) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // reference to the document with another `type` field let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": another_type_template_doc_id.to_string(), "ver": another_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + another_type_template_doc_id, + another_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `type` field in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_type_template_doc_id.to_string(), "ver": missing_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_type_template_doc_id, + missing_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing `content-type` field in the referenced doc - let rule = ContentRule::Templated { exp_template_type }; + let rule = ContentRule::Templated { + exp_template_type: exp_template_type.into(), + }; let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_content_type_template_doc_id.to_string(), "ver": missing_content_type_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_content_type_template_doc_id, + missing_content_type_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // missing content in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": missing_content_template_doc_id.to_string(), "ver": missing_content_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + missing_content_template_doc_id, + missing_content_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded in the referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": invalid_content_template_doc_id.to_string(), "ver": invalid_content_template_doc_id.to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + invalid_content_template_doc_id, + invalid_content_template_doc_id, + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // cannot find a referenced document let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "template": {"id": UuidV7::new().to_string(), "ver": UuidV7::new().to_string() } - })) - .unwrap() - .with_decoded_content(json_content.clone()) + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )] + .into(), + )) + .with_content(json_content.clone()) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -397,9 +420,7 @@ mod tests { // all correct let rule = ContentRule::Static(json_schema); - let doc = Builder::new() - .with_decoded_content(json_content.clone()) - .build(); + let doc = Builder::new().with_content(json_content.clone()).build(); assert!(rule.check(&doc, &provider).await.unwrap()); // missing content @@ -407,15 +428,17 @@ mod tests { assert!(!rule.check(&doc, &provider).await.unwrap()); // content not a json encoded - let doc = Builder::new().with_decoded_content(vec![]).build(); + let doc = Builder::new().with_content(vec![1, 2, 3]).build(); assert!(!rule.check(&doc, &provider).await.unwrap()); // defined `template` field which should be absent let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); - let doc = Builder::new().with_decoded_content(json_content) - .with_json_metadata(serde_json::json!({"template": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + let doc = Builder::new() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) + .with_content(json_content) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } @@ -432,8 +455,9 @@ mod tests { let ref_id = UuidV7::new(); let ref_ver = UuidV7::new(); let doc = Builder::new() - .with_json_metadata(serde_json::json!({"template": {"id": ref_id.to_string(), "ver": ref_ver.to_string() } })) - .unwrap() + .with_metadata_field(SupportedField::Template( + vec![DocumentRef::new(ref_id, ref_ver, DocLocator::default())].into(), + )) .build(); assert!(!rule.check(&doc, &provider).await.unwrap()); } diff --git a/rust/signed_doc/src/validator/utils.rs b/rust/signed_doc/src/validator/utils.rs index 4b25a9bbfaf..1dc2a06cb34 100644 --- a/rust/signed_doc/src/validator/utils.rs +++ b/rust/signed_doc/src/validator/utils.rs @@ -2,7 +2,9 @@ use catalyst_types::problem_report::ProblemReport; -use crate::{providers::CatalystSignedDocumentProvider, CatalystSignedDocument, DocumentRef}; +use crate::{ + providers::CatalystSignedDocumentProvider, CatalystSignedDocument, DocumentRef, DocumentRefs, +}; /// A helper validation document function, which validates a document from the /// `ValidationDataProvider`. @@ -13,13 +15,56 @@ where Provider: CatalystSignedDocumentProvider, Validator: Fn(CatalystSignedDocument) -> bool, { + const CONTEXT: &str = "Validation data provider"; + + // General check for document ref + + // Getting the Signed Document instance from a doc ref. + // The reference document must exist if let Some(doc) = provider.try_get_doc(doc_ref).await? { + let id = doc + .doc_id() + .inspect_err(|_| report.missing_field("id", CONTEXT))?; + + let ver = doc + .doc_ver() + .inspect_err(|_| report.missing_field("ver", CONTEXT))?; + // id and version must match the values in ref doc + if &id != doc_ref.id() && &ver != doc_ref.ver() { + report.invalid_value( + "id and version", + &format!("id: {id}, ver: {ver}"), + &format!("id: {}, ver: {}", doc_ref.id(), doc_ref.ver()), + CONTEXT, + ); + return Ok(false); + } Ok(validator(doc)) } else { report.functional_validation( format!("Cannot retrieve a document {doc_ref}").as_str(), - "Validation data provider could not return a corresponding document.", + CONTEXT, ); Ok(false) } } + +/// Validate the document references +/// Document all possible error in doc report (no fail fast) +pub(crate) async fn validate_doc_refs( + doc_refs: &DocumentRefs, provider: &Provider, report: &ProblemReport, validator: Validator, +) -> anyhow::Result +where + Provider: CatalystSignedDocumentProvider, + Validator: Fn(CatalystSignedDocument) -> bool, +{ + let mut all_valid = true; + + for dr in doc_refs.doc_refs() { + let is_valid = validate_provided_doc(dr, provider, report, &validator).await?; + if !is_valid { + all_valid = false; + } + } + Ok(all_valid) +} diff --git a/rust/signed_doc/tests/comment.rs b/rust/signed_doc/tests/comment.rs index 1c746e589c7..f96dab12c54 100644 --- a/rust/signed_doc/tests/comment.rs +++ b/rust/signed_doc/tests/comment.rs @@ -1,143 +1,345 @@ -//! Integration test for comment document validation part. +//! Test for Proposal Comment document. +//! Require fields: type, id, ver, ref, template, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; +use std::sync::LazyLock; + +use catalyst_signed_doc::{ + providers::tests::{TestCatalystSignedDocumentProvider, TestVerifyingKeyProvider}, + *, +}; use catalyst_types::catalyst_id::role_index::RoleId; +use ed25519_dalek::ed25519::signature::Signer; + +use crate::common::create_dummy_key_pair; mod common; +#[allow(clippy::unwrap_used)] +static DUMMY_PROPOSAL_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::PROPOSAL.clone(), + })) + .unwrap() + .empty_content() + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static COMMENT_TEMPLATE_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT_FORM_TEMPLATE.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {}, + "required": [], + "additionalProperties": false + })) + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static COMMENT_REF_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "template": { + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap() +}); + +// Given a proposal comment document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `template`, This mean the document +// that `ref` field in `doc` points to (in this case = template_doc), must have the same +// `parameters` value as `doc`. +// +// - Reply: +// The `reply` field in `doc` points to another comment (`ref_doc`). +// The rule requires that the `ref` field in `ref_doc` must match the `ref` field in `doc` #[tokio::test] async fn test_valid_comment_doc() { - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); + let mut key_provider = TestVerifyingKeyProvider::default(); + key_provider.add_pk(kid.clone(), pk); + + // Create a main comment doc, contain all fields mention in the document (except + // revocations and section) + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), }, - "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), } - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(is_valid, "{:?}", doc.problem_report()); + let is_valid = validator::validate_signatures(&doc, &key_provider) + .await + .unwrap(); assert!(is_valid); + assert!(!doc.problem_report().is_problematic()); } #[tokio::test] -async fn test_valid_comment_doc_with_reply() { - let empty_json = serde_json::to_vec(&serde_json::json!({})).unwrap(); +async fn test_invalid_comment_doc_wrong_role() { + let (sk, _pk, kid) = create_dummy_key_pair(RoleId::Proposer).unwrap(); - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let comment_doc_id = UuidV7::new(); - let comment_doc_ver = UuidV7::new(); - let comment_doc = Builder::new() + // Create a main comment doc, contain all fields mention in the document (except + // revocations and section) + let doc = Builder::new() .with_json_metadata(serde_json::json!({ - "id": comment_doc_id, - "ver": comment_doc_ver, - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, "content-type": ContentType::Json.to_string(), - "template": { "id": template_doc_id.to_string(), "ver": template_doc_ver.to_string() }, + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "template": { + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } })) .unwrap() - .with_decoded_content(empty_json.clone()) - .build(); + .with_json_content(&serde_json::json!({})) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(!is_valid, "{:?}", doc.problem_report()); +} + +#[tokio::test] +async fn test_invalid_comment_doc_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() }, + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); + + let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(!is_valid); +} + +#[tokio::test] +async fn test_invalid_comment_doc_missing_template() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, + // "template": { + // "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + // "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + // }, "reply": { - "id": comment_doc_id, - "ver": comment_doc_ver + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), } - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); - provider.add_document(comment_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); - - assert!(is_valid); + assert!(!is_valid); } #[tokio::test] -async fn test_invalid_comment_doc() { - let (proposal_doc, ..) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::COMMENT_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_comment_doc_missing_ref() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::COMMENT_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_COMMENT.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "ref": { + // "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + // "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + // }, "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": COMMENT_TEMPLATE_DOC.doc_id().unwrap(), + "ver": COMMENT_TEMPLATE_DOC.doc_ver().unwrap(), + }, + "reply": { + "id": COMMENT_REF_DOC.doc_id().unwrap(), + "ver": COMMENT_REF_DOC.doc_ver().unwrap() }, - // without ref - "ref": serde_json::Value::Null - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); - provider.add_document(proposal_doc).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &COMMENT_REF_DOC).unwrap(); + provider.add_document(None, &COMMENT_TEMPLATE_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); - assert!(!is_valid); } diff --git a/rust/signed_doc/tests/common/mod.rs b/rust/signed_doc/tests/common/mod.rs index d7ea84150b0..e7a52b25f16 100644 --- a/rust/signed_doc/tests/common/mod.rs +++ b/rust/signed_doc/tests/common/mod.rs @@ -4,28 +4,6 @@ use std::str::FromStr; use catalyst_signed_doc::*; use catalyst_types::catalyst_id::role_index::RoleId; -use ed25519_dalek::ed25519::signature::Signer; - -pub fn test_metadata() -> (UuidV7, UuidV4, serde_json::Value) { - let uuid_v7 = UuidV7::new(); - let uuid_v4 = UuidV4::new(); - - let metadata_fields = serde_json::json!({ - "content-type": ContentType::Json.to_string(), - "content-encoding": ContentEncoding::Brotli.to_string(), - "type": uuid_v4.to_string(), - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - "ref": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "reply": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "template": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - "section": "$".to_string(), - "collabs": vec!["Alex1".to_string(), "Alex2".to_string()], - "parameters": {"id": uuid_v7.to_string(), "ver": uuid_v7.to_string()}, - }); - - (uuid_v7, uuid_v4, metadata_fields) -} pub fn create_dummy_key_pair( role_index: RoleId, @@ -44,47 +22,7 @@ pub fn create_dummy_key_pair( Ok((sk, pk, kid)) } -pub fn create_dummy_doc( - doc_type_id: Uuid, -) -> anyhow::Result<(CatalystSignedDocument, UuidV7, UuidV7)> { - let empty_json = serde_json::to_vec(&serde_json::json!({}))?; - - let doc_id = UuidV7::new(); - let doc_ver = UuidV7::new(); - - let doc = Builder::new() - .with_json_metadata(serde_json::json!({ - "content-type": ContentType::Json.to_string(), - "type": doc_type_id, - "id": doc_id, - "ver": doc_ver, - "template": { "id": doc_id.to_string(), "ver": doc_ver.to_string() } - }))? - .with_decoded_content(empty_json.clone()) - .build(); - - Ok((doc, doc_id, doc_ver)) -} - pub fn create_signing_key() -> ed25519_dalek::SigningKey { let mut csprng = rand::rngs::OsRng; ed25519_dalek::SigningKey::generate(&mut csprng) } - -pub fn create_dummy_signed_doc( - metadata: serde_json::Value, content: Vec, with_role_index: RoleId, -) -> anyhow::Result<( - CatalystSignedDocument, - ed25519_dalek::VerifyingKey, - CatalystId, -)> { - let (sk, pk, kid) = create_dummy_key_pair(with_role_index)?; - - let signed_doc = Builder::new() - .with_decoded_content(content) - .with_json_metadata(metadata)? - .add_signature(|m| sk.sign(&m).to_vec(), &kid)? - .build(); - - Ok((signed_doc, pk, kid)) -} diff --git a/rust/signed_doc/tests/decoding.rs b/rust/signed_doc/tests/decoding.rs index c1f632f84aa..49f80bd32ee 100644 --- a/rust/signed_doc/tests/decoding.rs +++ b/rust/signed_doc/tests/decoding.rs @@ -1,183 +1,1403 @@ //! Integration test for COSE decoding part. -use catalyst_signed_doc::{providers::tests::TestVerifyingKeyProvider, *}; +use catalyst_signed_doc::{decode_context::CompatibilityPolicy, *}; use catalyst_types::catalyst_id::role_index::RoleId; use common::create_dummy_key_pair; -use coset::TaggedCborSerializable; -use ed25519_dalek::ed25519::signature::Signer; +use minicbor::{data::Tag, Decode, Encoder}; +use rand::Rng; mod common; -#[test] -fn catalyst_signed_doc_cbor_roundtrip_test() { - let (uuid_v7, uuid_v4, metadata_fields) = common::test_metadata(); - let (sk, _, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); +type PostCheck = dyn Fn(&CatalystSignedDocument) -> anyhow::Result<()>; + +struct TestCase { + name: String, + bytes_gen: Box anyhow::Result>>>, + policy: CompatibilityPolicy, + // If the provided bytes can be even decoded without error (valid COSE or not). + // If set to `false` all further checks will not even happen. + can_decode: bool, + // If the decoded doc is a valid `CatalystSignedDocument`, underlying problem report is empty. + valid_doc: bool, + post_checks: Option>, +} + +fn signed_doc_deprecated_doc_ref_case(field_name: &'static str) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let doc_ref = DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()); + TestCase { + name: format!( + "Catalyst Signed Doc with deprecated {field_name} version before v0.04 validating." + ), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers.str(field_name)?; + p_headers.array(2)?; + p_headers.encode_with( + doc_ref.id(), + &mut catalyst_types::uuid::CborContext::Tagged, + )?; + p_headers.encode_with( + doc_ref.ver(), + &mut catalyst_types::uuid::CborContext::Tagged, + )?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.is_deprecated()?); + Ok(()) + } + })), + } +} + +fn signed_doc_with_valid_alias_case(alias: &'static str) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let doc_ref = DocumentRefs::from(vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )]); + let doc_ref_cloned = doc_ref.clone(); + TestCase { + name: format!("Provided '{alias}' field should be processed as parameters."), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str(alias)? + .encode_with(doc_ref.clone(), &mut ())?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + let cmp = doc_ref_cloned.clone(); + anyhow::ensure!(doc.doc_meta().parameters() == Some(&cmp)); + Ok(()) + } + })), + } +} + +fn signed_doc_with_missing_header_field_case(field: &'static str) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let doc_ref = DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()); + TestCase { + name: format!("Catalyst Signed Doc with missing '{field}' header."), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(4)?; + if field != "content-type" { + p_headers.u8(3)?.encode(ContentType::Json)?; + } + if field != "type" { + p_headers.str("type")?.encode(&doc_type)?; + } + if field != "id" { + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + } + if field != "ver" { + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + } + + p_headers + .str("parameters")? + .encode_with(doc_ref.clone(), &mut ())?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: Some(Box::new({ + move |doc| { + if field == "content-type" { + anyhow::ensure!(doc.doc_meta().content_type().is_err()); + } + if field == "type" { + anyhow::ensure!(doc.doc_meta().doc_type().is_err()); + } + if field == "id" { + anyhow::ensure!(doc.doc_meta().doc_id().is_err()); + } + if field == "ver" { + anyhow::ensure!(doc.doc_meta().doc_ver().is_err()); + } + + Ok(()) + } + })), + } +} + +fn signed_doc_with_random_header_field_case(field: &'static str) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: format!("Catalyst Signed Doc with random bytes in '{field}' header field."), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut rng = rand::thread_rng(); + let mut rand_buf = [0u8; 128]; + rng.try_fill(&mut rand_buf)?; + + let is_required_header = ["content-type", "type", "id", "ver"] + .iter() + .any(|v| v == &field); + + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(if is_required_header { 4 } else { 5 })?; + if field == "content-type" { + p_headers.u8(3)?.encode_with(rand_buf, &mut ())?; + } else { + p_headers.u8(3)?.encode(ContentType::Json)?; + } + if field == "type" { + p_headers.str("type")?.encode_with(rand_buf, &mut ())?; + } else { + p_headers.str("type")?.encode(&doc_type)?; + } + if field == "id" { + p_headers.str("id")?.encode_with(rand_buf, &mut ())?; + } else { + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + } + if field == "ver" { + p_headers.str("ver")?.encode_with(rand_buf, &mut ())?; + } else { + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + } - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); + if !is_required_header { + p_headers.str(field)?.encode_with(rand_buf, &mut ())?; + } - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); + p_headers.into_writer().as_slice() + })?; - assert!(!doc.problem_report().is_problematic()); + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; - let bytes: Vec = doc.try_into().unwrap(); - let decoded: CatalystSignedDocument = bytes.as_slice().try_into().unwrap(); - let extra_fields: ExtraFields = serde_json::from_value(metadata_fields).unwrap(); + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_meta().content_encoding().is_none()); + anyhow::ensure!(doc.doc_meta().doc_ref().is_none()); + anyhow::ensure!(doc.doc_meta().template().is_none()); + anyhow::ensure!(doc.doc_meta().reply().is_none()); + anyhow::ensure!(doc.doc_meta().section().is_none()); + anyhow::ensure!(doc.doc_meta().parameters().is_none()); + anyhow::ensure!(doc.doc_meta().collaborators().is_empty()); - assert_eq!(decoded.doc_type().unwrap(), uuid_v4); - assert_eq!(decoded.doc_id().unwrap(), uuid_v7); - assert_eq!(decoded.doc_ver().unwrap(), uuid_v7); - assert_eq!(decoded.doc_content().decoded_bytes().unwrap(), &content); - assert_eq!(decoded.doc_meta(), &extra_fields); + if field == "content-type" { + anyhow::ensure!(doc.doc_meta().content_type().is_err()); + } + if field == "type" { + anyhow::ensure!(doc.doc_meta().doc_type().is_err()); + } + if field == "id" { + anyhow::ensure!(doc.doc_meta().doc_id().is_err()); + } + if field == "ver" { + anyhow::ensure!(doc.doc_meta().doc_ver().is_err()); + } + + Ok(()) + } + })), + } +} + +// `parameters` value along with its aliases are not allowed to be presented +fn signed_doc_with_parameters_and_aliases_case(aliases: &'static [&'static str]) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let doc_ref = DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()); + TestCase { + name: format!("Multiple definitions of '{}' at once.", aliases.join(", ")), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(4u64.overflowing_add(u64::try_from(aliases.len())?).0)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + + for alias in aliases { + p_headers + .str(alias)? + .encode_with(doc_ref.clone(), &mut ())?; + } + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_content_encoding_case(upper: bool) -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let name = if upper { + "Content-Encoding" + } else { + "content-encoding" + }; + TestCase { + name: format!("content_encoding field, allow upper and lower case key value: '{name}'"), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.str(name)?.encode(ContentEncoding::Brotli)?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // zero signatures + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(matches!( + doc.doc_meta().content_encoding(), + Some(ContentEncoding::Brotli) + )); + Ok(()) + } + })), + } +} + +fn signed_doc_with_random_kid_case() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Invalid signature kid field format (random bytes)".to_string(), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + e.bytes({ + let mut rng = rand::thread_rng(); + let mut rand_buf = [0u8; 128]; + rng.try_fill(&mut rand_buf)?; + + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.bytes(&rand_buf)?; + + p_headers.into_writer().as_slice() + })?; + e.map(0)?; + e.bytes(&[1, 2, 3])?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: false, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_wrong_cose_tag_case() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with wrong COSE sign tag value (not `98`)".to_string(), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(u64::MAX))?; + e.array(4)?; + + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + + p_headers.into_writer().as_slice() + })?; + + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: false, + valid_doc: false, + post_checks: None, + } +} + +fn decoding_empty_bytes_case() -> TestCase { + TestCase { + name: "Decoding empty bytes".to_string(), + bytes_gen: Box::new(|| Ok(Encoder::new(Vec::new()))), + policy: CompatibilityPolicy::Accept, + can_decode: false, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_minimal_metadata_fields_case() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, signed (one signature), CBOR tagged.".to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers + .str("type")? + .encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + anyhow::ensure!(doc.kids().len() == 1); + anyhow::ensure!(!doc.is_deprecated()?); + Ok(()) + } + })), + } +} + +fn signed_doc_with_complete_metadata_fields_case() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + let doc_ref = DocumentRefs::from(vec![DocumentRef::new( + UuidV7::new(), + UuidV7::new(), + DocLocator::default(), + )]); + let doc_ref_cloned = doc_ref.clone(); + TestCase { + name: "Catalyst Signed Doc with all metadata fields defined, signed (one signature), CBOR tagged.".to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(9)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers.str("id")?.encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.str("ver")?.encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ref")? + .encode_with(doc_ref.clone(), &mut ())?; + p_headers + .str("template")? + .encode_with(doc_ref.clone(), &mut ())?; + p_headers + .str("reply")? + .encode_with(doc_ref.clone(), &mut ())?; + p_headers.str("section")?.encode("$")?; + + /* cspell:disable */ + p_headers.str("collaborators")?; + p_headers.array(2)?; + p_headers.bytes(b"cardano/FftxFnOrj2qmTuB2oZG2v0YEWJfKvQ9Gg8AgNAhDsKE")?; + p_headers.bytes(b"id.catalyst://preprod.cardano/FftxFnOrj2qmTuB2oZG2v0YEWJfKvQ9Gg8AgNAhDsKE/7/3")?; + /* cspell:enable */ + p_headers.str("parameters")?.encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1,2,3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + let refs = doc_ref_cloned.clone(); + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_meta().doc_ref() == Some(&refs)); + anyhow::ensure!(doc.doc_meta().template() == Some(&refs)); + anyhow::ensure!(doc.doc_meta().reply() == Some(&refs)); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!(doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)?); + anyhow::ensure!(doc.kids().len() == 1); + anyhow::ensure!(!doc.is_deprecated()?); + Ok(()) + } + })), + } +} + +fn minimally_valid_tagged_signed_doc() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, unsigned, CBOR tagged." + .to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!(doc.doc_meta().doc_ref().is_none()); + anyhow::ensure!(doc.doc_meta().template().is_none()); + anyhow::ensure!(doc.doc_meta().reply().is_none()); + anyhow::ensure!(doc.doc_meta().parameters().is_none()); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + Ok(()) + } + })), + } +} + +fn minimally_valid_untagged_signed_doc() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, unsigned, CBOR tagged." + .to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let mut e = Encoder::new(Vec::new()); + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!(doc.doc_meta().doc_ref().is_none()); + anyhow::ensure!(doc.doc_meta().template().is_none()); + anyhow::ensure!(doc.doc_meta().reply().is_none()); + anyhow::ensure!(doc.doc_meta().parameters().is_none()); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + Ok(()) + } + })), + } +} + +fn signed_doc_valid_null_as_no_content() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with 'content' defined as Null.".to_string(), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + Ok(()) + } + })), + } +} + +fn signed_doc_valid_empty_bstr_as_no_content() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with 'content' defined as empty bstr.".to_string(), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes("".as_bytes())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.encoded_content() == Vec::::new()); + Ok(()) + } + })), + } +} + +fn signed_doc_with_non_empty_unprotected_headers() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with non empty unprotected headers".to_string(), + bytes_gen: Box::new({ + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // non empty unprotected headers + e.map(1)?; + e.str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_signatures_non_empty_unprotected_headers() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with signatures non empty unprotected headers".to_string(), + bytes_gen: Box::new({ + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + e.bytes(p_headers.into_writer().as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers + .map(1)? + .u8(4)? + .bytes(Vec::::from(&kid).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + // non empty unprotected headers + e.map(1)?.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_strict_deterministic_decoding_wrong_order() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, with enabled strictly decoded rules, metadata field in the wrong order".to_string(), + bytes_gen: Box::new({ + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers + .str("type")? + .encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Fail, + can_decode: false, + valid_doc: false, + post_checks: None, + } +} + +fn signed_doc_with_non_strict_deterministic_decoding_wrong_order() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with minimally defined metadata fields, with enabled non strictly (warn) decoded rules, metadata field in the wrong order".to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers + .str("type")? + .encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers.map(1)?.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Warn, + can_decode: true, + valid_doc: true, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + anyhow::ensure!(doc.kids().len() == 1); + Ok(()) + } + })), + } +} + +fn signed_doc_with_non_supported_metadata_invalid() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with non-supported defined metadata fields is invalid." + .to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(5)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("unsupported")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // no signature + e.array(0)?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + anyhow::ensure!(doc.kids().len() == 0); + Ok(()) + } + })), + } +} + +fn signed_doc_with_kid_in_id_form_invalid() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with Signature KID in Id form, instead of URI form is invalid." + .to_string(), + bytes_gen: Box::new({ + let doc_type = doc_type.clone(); + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut p_headers = minicbor::Encoder::new(Vec::new()); + p_headers + .map(1)? + .u8(4)? + .bytes(Vec::::from(&kid.as_id()).as_slice())?; + e.bytes(p_headers.into_writer().as_slice())?; + e.map(0)?; + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: Some(Box::new({ + move |doc| { + anyhow::ensure!(doc.doc_type()? == &doc_type); + anyhow::ensure!(doc.doc_id()? == uuid_v7); + anyhow::ensure!(doc.doc_ver()? == uuid_v7); + anyhow::ensure!(doc.doc_content_type()? == ContentType::Json); + anyhow::ensure!( + doc.encoded_content() == serde_json::to_vec(&serde_json::Value::Null)? + ); + anyhow::ensure!(doc.kids().len() == 1); + Ok(()) + } + })), + } +} + +fn signed_doc_with_non_supported_protected_signature_header_invalid() -> TestCase { + let uuid_v7 = UuidV7::new(); + let doc_type = DocType::from(UuidV4::new()); + TestCase { + name: "Catalyst Signed Doc with unsupported protected Signature header is invalid." + .to_string(), + bytes_gen: Box::new({ + move || { + let (_, _, kid) = create_dummy_key_pair(RoleId::Role0)?; + + let mut e = Encoder::new(Vec::new()); + e.tag(Tag::new(98))?; + e.array(4)?; + // protected headers (metadata fields) + e.bytes({ + let mut p_headers = Encoder::new(Vec::new()); + + p_headers.map(4)?; + p_headers.u8(3)?.encode(ContentType::Json)?; + p_headers.str("type")?.encode(&doc_type)?; + p_headers + .str("id")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers + .str("ver")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + p_headers.into_writer().as_slice() + })?; + // empty unprotected headers + e.map(0)?; + // content + e.bytes(serde_json::to_vec(&serde_json::Value::Null)?.as_slice())?; + // signatures + e.array(1)?; + // signature + e.array(3)?; + // protected headers + e.bytes({ + let mut s_headers = minicbor::Encoder::new(Vec::new()); + s_headers.map(2)?; + // (kid field) + s_headers.u8(4)?.bytes(Vec::::from(&kid).as_slice())?; + // Unsupported label/value + s_headers + .str("unsupported")? + .encode_with(uuid_v7, &mut catalyst_types::uuid::CborContext::Tagged)?; + s_headers.into_writer().as_slice() + })?; + // unprotected headers + e.map(0)?; + // signature bytes + e.bytes(&[1, 2, 3])?; + Ok(e) + } + }), + policy: CompatibilityPolicy::Accept, + can_decode: true, + valid_doc: false, + post_checks: None, + } } #[test] -fn catalyst_signed_doc_cbor_roundtrip_kid_as_id_test() { - let (_, _, metadata_fields) = common::test_metadata(); - let (sk, _, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); - // transform Catalyst ID URI form to the ID form - let kid = kid.as_id(); - - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - - assert!(doc.problem_report().is_problematic()); -} - -#[tokio::test] -#[allow(clippy::too_many_lines)] -async fn catalyst_signed_doc_parameters_aliases_test() { - let (_, _, metadata_fields) = common::test_metadata(); - let (sk, pk, kid) = common::create_dummy_key_pair(RoleId::Role0).unwrap(); - let mut provider = TestVerifyingKeyProvider::default(); - provider.add_pk(kid.clone(), pk); - - let content = serde_json::to_vec(&serde_json::Value::Null).unwrap(); - - let doc = Builder::new() - .with_json_metadata(metadata_fields.clone()) - .unwrap() - .with_decoded_content(content.clone()) - .build(); - assert!(!doc.problem_report().is_problematic()); - - let parameters_val = doc.doc_meta().parameters().unwrap(); - let parameters_val_cbor: coset::cbor::Value = parameters_val.try_into().unwrap(); - // replace parameters with the alias values `category_id`, `brand_id`, `campaign_id`. - let bytes: Vec = doc.try_into().unwrap(); - let mut cose = coset::CoseSign::from_tagged_slice(bytes.as_slice()).unwrap(); - cose.protected.original_data = None; - cose.protected - .header - .rest - .retain(|(l, _)| l != &coset::Label::Text("parameters".to_string())); - - let doc: CatalystSignedDocument = cose - .clone() - .to_tagged_vec() - .unwrap() - .as_slice() - .try_into() - .unwrap(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_none()); - - // case: `category_id`. - let mut cose_with_category_id = cose.clone(); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("category_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_category_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // case: `brand_id`. - let mut cose_with_brand_id = cose.clone(); - cose_with_brand_id.protected.header.rest.push(( - coset::Label::Text("brand_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_brand_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // case: `campaign_id`. - let mut cose_with_campaign_id = cose.clone(); - cose_with_campaign_id.protected.header.rest.push(( - coset::Label::Text("campaign_id".to_string()), - parameters_val_cbor.clone(), - )); - - let cbor_bytes = cose_with_campaign_id.to_tagged_vec().unwrap(); - let doc: CatalystSignedDocument = cbor_bytes.as_slice().try_into().unwrap(); - let doc = doc - .into_builder() - .add_signature(|m| sk.sign(&m).to_vec(), &kid) - .unwrap() - .build(); - assert!(!doc.problem_report().is_problematic()); - assert!(doc.doc_meta().parameters().is_some()); - assert!(validator::validate_signatures(&doc, &provider) - .await - .unwrap()); - - // `parameters` value along with its aliases are not allowed to be present at the - let mut cose_with_category_id = cose.clone(); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("parameters".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("category_id".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("brand_id".to_string()), - parameters_val_cbor.clone(), - )); - cose_with_category_id.protected.header.rest.push(( - coset::Label::Text("campaign_id".to_string()), - parameters_val_cbor.clone(), - )); - - let doc: CatalystSignedDocument = cose_with_category_id - .to_tagged_vec() - .unwrap() - .as_slice() - .try_into() - .unwrap(); - assert!(doc.problem_report().is_problematic()); +fn catalyst_signed_doc_decoding_test() { + let test_cases = [ + decoding_empty_bytes_case(), + signed_doc_deprecated_doc_ref_case("template"), + signed_doc_deprecated_doc_ref_case("ref"), + signed_doc_deprecated_doc_ref_case("reply"), + signed_doc_deprecated_doc_ref_case("parameters"), + signed_doc_deprecated_doc_ref_case("category_id"), + signed_doc_deprecated_doc_ref_case("brand_id"), + signed_doc_deprecated_doc_ref_case("campaign_id"), + signed_doc_with_minimal_metadata_fields_case(), + signed_doc_with_complete_metadata_fields_case(), + signed_doc_valid_null_as_no_content(), + signed_doc_valid_empty_bstr_as_no_content(), + signed_doc_with_random_kid_case(), + signed_doc_with_wrong_cose_tag_case(), + signed_doc_with_content_encoding_case(true), + signed_doc_with_content_encoding_case(false), + signed_doc_with_valid_alias_case("category_id"), + signed_doc_with_valid_alias_case("brand_id"), + signed_doc_with_valid_alias_case("campaign_id"), + signed_doc_with_valid_alias_case("parameters"), + signed_doc_with_missing_header_field_case("content-type"), + signed_doc_with_missing_header_field_case("type"), + signed_doc_with_missing_header_field_case("id"), + signed_doc_with_missing_header_field_case("ver"), + signed_doc_with_random_header_field_case("content-type"), + signed_doc_with_random_header_field_case("type"), + signed_doc_with_random_header_field_case("id"), + signed_doc_with_random_header_field_case("ver"), + signed_doc_with_random_header_field_case("ref"), + signed_doc_with_random_header_field_case("template"), + signed_doc_with_random_header_field_case("reply"), + signed_doc_with_random_header_field_case("section"), + signed_doc_with_random_header_field_case("collaborators"), + signed_doc_with_random_header_field_case("parameters"), + signed_doc_with_random_header_field_case("content-encoding"), + signed_doc_with_parameters_and_aliases_case(&["parameters", "category_id"]), + signed_doc_with_parameters_and_aliases_case(&["parameters", "brand_id"]), + signed_doc_with_parameters_and_aliases_case(&["parameters", "campaign_id"]), + signed_doc_with_parameters_and_aliases_case(&["category_id", "campaign_id"]), + signed_doc_with_parameters_and_aliases_case(&["category_id", "brand_id"]), + signed_doc_with_parameters_and_aliases_case(&["brand_id", "campaign_id"]), + signed_doc_with_parameters_and_aliases_case(&["category_id", "brand_id", "campaign_id"]), + signed_doc_with_parameters_and_aliases_case(&[ + "parameters", + "category_id", + "brand_id", + "campaign_id", + ]), + minimally_valid_tagged_signed_doc(), + minimally_valid_untagged_signed_doc(), + signed_doc_with_non_empty_unprotected_headers(), + signed_doc_with_signatures_non_empty_unprotected_headers(), + signed_doc_with_strict_deterministic_decoding_wrong_order(), + signed_doc_with_non_strict_deterministic_decoding_wrong_order(), + signed_doc_with_non_supported_metadata_invalid(), + signed_doc_with_kid_in_id_form_invalid(), + signed_doc_with_non_supported_protected_signature_header_invalid(), + ]; + + for mut case in test_cases { + let bytes_res = case.bytes_gen.as_ref()(); + assert!( + bytes_res.is_ok(), + "Case: [{}], error: {:?}", + case.name, + bytes_res.err() + ); + let bytes = bytes_res.unwrap().into_writer(); + let doc_res = + CatalystSignedDocument::decode(&mut minicbor::Decoder::new(&bytes), &mut case.policy); + assert_eq!( + doc_res.is_ok(), + case.can_decode, + "Case: [{}], error: {:?}", + case.name, + doc_res.err() + ); + if let Ok(doc) = doc_res { + assert_eq!( + !doc.problem_report().is_problematic(), + case.valid_doc, + "Case: [{}]. Problem report: {:?}", + case.name, + doc.problem_report() + ); + + if let Some(post_checks) = &case.post_checks { + let post_checks_res = post_checks(&doc); + assert!( + post_checks_res.is_ok(), + "Case: [{}]. Post checks fails: {:?}", + case.name, + post_checks_res.err() + ); + } + } + } } diff --git a/rust/signed_doc/tests/proposal.rs b/rust/signed_doc/tests/proposal.rs index 50ce1799e4a..03f4d608644 100644 --- a/rust/signed_doc/tests/proposal.rs +++ b/rust/signed_doc/tests/proposal.rs @@ -1,93 +1,218 @@ //! Integration test for proposal document validation part. +//! Require fields: type, id, ver, template, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; +use std::sync::LazyLock; + +use catalyst_signed_doc::{ + providers::tests::{TestCatalystSignedDocumentProvider, TestVerifyingKeyProvider}, + *, +}; use catalyst_types::catalyst_id::role_index::RoleId; +use ed25519_dalek::ed25519::signature::Signer; + +use crate::common::create_dummy_key_pair; mod common; +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static PROPOSAL_TEMPLATE_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL_FORM_TEMPLATE.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + }, + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": {}, + "required": [], + "additionalProperties": false + })) + .unwrap() + .build() + .unwrap() +}); + +// Given a proposal document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `template`, This mean the document +// that `ref` field in `doc` points to (in this case = `template_doc`), must have the same +// `parameters` value as `doc`. #[tokio::test] async fn test_valid_proposal_doc() { - let (template_doc, template_doc_id, template_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_TEMPLATE_UUID_TYPE).unwrap(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Proposer).unwrap(); + let mut key_provider = TestVerifyingKeyProvider::default(); + key_provider.add_pk(kid.clone(), pk); + + // Create a main proposal doc, contain all fields mention in the document (except + // collaborations and revocations) + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(template_doc).unwrap(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(is_valid); + let is_valid = validator::validate_signatures(&doc, &key_provider) + .await + .unwrap(); assert!(is_valid); + assert!(!doc.problem_report().is_problematic()); } #[tokio::test] -async fn test_valid_proposal_doc_with_empty_provider() { - // dummy template doc to dummy provider - let template_doc_id = UuidV7::new(); - let template_doc_ver = UuidV7::new(); - - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_proposal_doc_wrong_role() { + let (sk, _pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); + + // Create a main proposal doc, contain all fields mention in the document (except + // collaborations and revocations) + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "template": { - "id": template_doc_id, - "ver": template_doc_ver + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); + let mut provider = TestCatalystSignedDocumentProvider::default(); - let is_valid = validator::validate(&doc, &provider).await.unwrap(); + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); } #[tokio::test] -async fn test_invalid_proposal_doc() { - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_proposal_doc_missing_template() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - // without specifying template id - "template": serde_json::Value::Null, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); - - let provider = TestCatalystSignedDocumentProvider::default(); + "type": doc_types::PROPOSAL.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "template": { + // "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + // "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), + // }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); + assert!(!is_valid); +} +#[tokio::test] +async fn test_invalid_proposal_doc_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": doc_types::PROPOSAL.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "template": { + "id": PROPOSAL_TEMPLATE_DOC.doc_id().unwrap(), + "ver": PROPOSAL_TEMPLATE_DOC.doc_ver().unwrap(), + }, + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &PROPOSAL_TEMPLATE_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); } diff --git a/rust/signed_doc/tests/signature.rs b/rust/signed_doc/tests/signature.rs index 5c93ec25bb5..e5e791bb918 100644 --- a/rust/signed_doc/tests/signature.rs +++ b/rust/signed_doc/tests/signature.rs @@ -1,29 +1,62 @@ //! Integration test for signature validation part. +use std::io::Write; + use catalyst_signed_doc::{providers::tests::TestVerifyingKeyProvider, *}; use catalyst_types::catalyst_id::role_index::RoleId; -use common::test_metadata; use ed25519_dalek::ed25519::signature::Signer; +use crate::common::create_dummy_key_pair; + mod common; +fn metadata() -> serde_json::Value { + serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "content-encoding": ContentEncoding::Brotli.to_string(), + "type": UuidV4::new(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "reply": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "template": {"id": UuidV7::new(), "ver": UuidV7::new()}, + "section": "$", + "collaborators": vec![ + /* cspell:disable */ + "cardano/FftxFnOrj2qmTuB2oZG2v0YEWJfKvQ9Gg8AgNAhDsKE", + "id.catalyst://preprod.cardano/FftxFnOrj2qmTuB2oZG2v0YEWJfKvQ9Gg8AgNAhDsKE/7/3" + /* cspell:enable */ + ], + "parameters": {"id": UuidV7::new(), "ver": UuidV7::new()}, + }) +} + #[tokio::test] async fn single_signature_validation_test() { - let (_, _, metadata) = test_metadata(); - let (signed_doc, pk, kid) = common::create_dummy_signed_doc( - metadata, - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Role0, - ) - .unwrap(); + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); + + let signed_doc = Builder::new() + .with_json_metadata(metadata()) + .unwrap() + .with_json_content(&serde_json::Value::Null) + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid.clone()) + .unwrap() + .build() + .unwrap(); + assert!(!signed_doc.problem_report().is_problematic()); // case: has key let mut provider = TestVerifyingKeyProvider::default(); provider.add_pk(kid.clone(), pk); - assert!(validator::validate_signatures(&signed_doc, &provider) - .await - .unwrap()); + assert!( + validator::validate_signatures(&signed_doc, &provider) + .await + .unwrap(), + "{:?}", + signed_doc.problem_report() + ); // case: empty provider assert!( @@ -32,8 +65,32 @@ async fn single_signature_validation_test() { .unwrap() ); + // case: signed with different key + let (another_sk, ..) = create_dummy_key_pair(RoleId::Role0).unwrap(); + let invalid_doc = signed_doc + .into_builder() + .unwrap() + .add_signature(|m| another_sk.sign(&m).to_vec(), kid.clone()) + .unwrap() + .build() + .unwrap(); + assert!(!validator::validate_signatures(&invalid_doc, &provider) + .await + .unwrap()); + // case: missing signatures - let (unsigned_doc, ..) = common::create_dummy_doc(UuidV4::new().into()).unwrap(); + let unsigned_doc = Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": UuidV4::new(), + })) + .unwrap() + .with_json_content(&serde_json::json!({})) + .unwrap() + .build() + .unwrap(); assert!(!validator::validate_signatures(&unsigned_doc, &provider) .await .unwrap()); @@ -47,16 +104,18 @@ async fn multiple_signatures_validation_test() { let (_, pk_n, kid_n) = common::create_dummy_key_pair(RoleId::Role0).unwrap(); let signed_doc = Builder::new() - .with_decoded_content(serde_json::to_vec(&serde_json::Value::Null).unwrap()) - .with_json_metadata(common::test_metadata().2) + .with_json_metadata(metadata()) + .unwrap() + .with_json_content(&serde_json::Value::Null) .unwrap() - .add_signature(|m| sk1.sign(&m).to_vec(), &kid1) + .add_signature(|m| sk1.sign(&m).to_vec(), kid1.clone()) .unwrap() - .add_signature(|m| sk2.sign(&m).to_vec(), &kid2) + .add_signature(|m| sk2.sign(&m).to_vec(), kid2.clone()) .unwrap() - .add_signature(|m| sk3.sign(&m).to_vec(), &kid3) + .add_signature(|m| sk3.sign(&m).to_vec(), kid3.clone()) .unwrap() - .build(); + .build() + .unwrap(); assert!(!signed_doc.problem_report().is_problematic()); @@ -67,27 +126,177 @@ async fn multiple_signatures_validation_test() { provider.add_pk(kid3.clone(), pk3); assert!(validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| v)); + .unwrap()); // case: partially available signatures let mut provider = TestVerifyingKeyProvider::default(); provider.add_pk(kid1.clone(), pk1); provider.add_pk(kid2.clone(), pk2); - assert!(validator::validate_signatures(&signed_doc, &provider) + assert!(!validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| !v)); + .unwrap()); // case: with unrecognized provider let mut provider = TestVerifyingKeyProvider::default(); provider.add_pk(kid_n.clone(), pk_n); - assert!(validator::validate_signatures(&signed_doc, &provider) + assert!(!validator::validate_signatures(&signed_doc, &provider) .await - .is_ok_and(|v| !v)); + .unwrap()); // case: no valid signatures available assert!( - validator::validate_signatures(&signed_doc, &TestVerifyingKeyProvider::default()) + !validator::validate_signatures(&signed_doc, &TestVerifyingKeyProvider::default()) .await - .is_ok_and(|v| !v) + .unwrap() ); } + +fn content( + content_bytes: &[u8], sk: &ed25519_dalek::SigningKey, kid: &CatalystId, +) -> anyhow::Result>> { + let mut e = minicbor::Encoder::new(Vec::new()); + e.array(4)?; + // protected headers (empty metadata fields) + let mut m_p_headers = minicbor::Encoder::new(Vec::new()); + m_p_headers.map(0)?; + let m_p_headers = m_p_headers.into_writer(); + e.bytes(m_p_headers.as_slice())?; + // empty unprotected headers + e.map(0)?; + // content + let _ = e.writer_mut().write(content_bytes)?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut s_p_headers = minicbor::Encoder::new(Vec::new()); + s_p_headers + .map(1)? + .u8(4)? + .bytes(Vec::::from(kid).as_slice())?; + let s_p_headers = s_p_headers.into_writer(); + + // [RFC 8152 section 4.4](https://datatracker.ietf.org/doc/html/rfc8152#section-4.4) + let mut tbs: minicbor::Encoder> = minicbor::Encoder::new(Vec::new()); + tbs.array(5)?; + tbs.str("Signature")?; + tbs.bytes(&m_p_headers)?; // `body_protected` + tbs.bytes(&s_p_headers)?; // `sign_protected` + tbs.bytes(&[])?; // empty `external_aad` + tbs.writer_mut().write_all(content_bytes)?; // `payload` + + e.bytes(s_p_headers.as_slice())?; + e.map(0)?; + e.bytes(&sk.sign(tbs.writer()).to_bytes())?; + Ok(e) +} + +fn parameters_alias_field( + alias: &str, sk: &ed25519_dalek::SigningKey, kid: &CatalystId, +) -> anyhow::Result>> { + let mut e = minicbor::Encoder::new(Vec::new()); + e.array(4)?; + // protected headers (empty metadata fields) + let mut m_p_headers = minicbor::Encoder::new(Vec::new()); + m_p_headers.map(0)?; + let m_p_headers = m_p_headers.into_writer(); + e.bytes(m_p_headers.as_slice())?; + // empty unprotected headers + e.map(1)?; + e.str(alias)?.encode_with( + DocumentRef::new(UuidV7::new(), UuidV7::new(), DocLocator::default()), + &mut (), + )?; + // content (random bytes) + let content = [1, 2, 3]; + e.bytes(&content)?; + // signatures + // one signature + e.array(1)?; + e.array(3)?; + // protected headers (kid field) + let mut s_p_headers = minicbor::Encoder::new(Vec::new()); + s_p_headers + .map(1)? + .u8(4)? + .bytes(Vec::::from(kid).as_slice())?; + let s_p_headers = s_p_headers.into_writer(); + + // [RFC 8152 section 4.4](https://datatracker.ietf.org/doc/html/rfc8152#section-4.4) + let mut tbs: minicbor::Encoder> = minicbor::Encoder::new(Vec::new()); + tbs.array(5)?; + tbs.str("Signature")?; + tbs.bytes(&m_p_headers)?; // `body_protected` + tbs.bytes(&s_p_headers)?; // `sign_protected` + tbs.bytes(&[])?; // empty `external_aad` + tbs.bytes(&content)?; // `payload` + + e.bytes(s_p_headers.as_slice())?; + e.map(0)?; + e.bytes(&sk.sign(tbs.writer()).to_bytes())?; + Ok(e) +} + +type DocBytesGenerator = + dyn Fn(&ed25519_dalek::SigningKey, &CatalystId) -> anyhow::Result>>; + +struct SpecialCborTestCase<'a> { + name: &'static str, + doc_bytes_fn: &'a DocBytesGenerator, +} + +#[tokio::test] +async fn special_cbor_cases() { + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); + let mut provider = TestVerifyingKeyProvider::default(); + provider.add_pk(kid.clone(), pk); + + let test_cases: &[SpecialCborTestCase] = &[ + SpecialCborTestCase { + name: "content encoded as cbor null", + doc_bytes_fn: &|sk, kid| { + let mut e = minicbor::Encoder::new(Vec::new()); + content(e.null()?.writer().as_slice(), sk, kid) + }, + }, + SpecialCborTestCase { + name: "content encoded empty bstr e.g. &[]", + doc_bytes_fn: &|sk, kid| { + let mut e = minicbor::Encoder::new(Vec::new()); + content(e.bytes(&[])?.writer().as_slice(), sk, kid) + }, + }, + SpecialCborTestCase { + name: "parameters alias `category_id` field", + doc_bytes_fn: &|sk, kid| parameters_alias_field("category_id", sk, kid), + }, + SpecialCborTestCase { + name: "parameters alias `brand_id` field", + doc_bytes_fn: &|sk, kid| parameters_alias_field("brand_id", sk, kid), + }, + SpecialCborTestCase { + name: "`parameters` alias `campaign_id` field", + doc_bytes_fn: &|sk, kid| parameters_alias_field("campaign_id", sk, kid), + }, + ]; + + for case in test_cases { + let doc = CatalystSignedDocument::try_from( + (case.doc_bytes_fn)(&sk, &kid) + .unwrap() + .into_writer() + .as_slice(), + ) + .unwrap(); + + assert!( + validator::validate_signatures(&doc, &provider) + .await + .unwrap(), + "[case: {}] {:?}", + case.name, + doc.problem_report() + ); + } +} diff --git a/rust/signed_doc/tests/submission.rs b/rust/signed_doc/tests/submission.rs index d10c6c3952f..6436181064c 100644 --- a/rust/signed_doc/tests/submission.rs +++ b/rust/signed_doc/tests/submission.rs @@ -1,150 +1,250 @@ -//! Test for proposal submission action. +//! Test for Proposal Submission Action. +//! Require fields: type, id, ver, ref, parameters +//! -use catalyst_signed_doc::{providers::tests::TestCatalystSignedDocumentProvider, *}; +use std::sync::LazyLock; + +use catalyst_signed_doc::{ + providers::tests::{TestCatalystSignedDocumentProvider, TestVerifyingKeyProvider}, + *, +}; use catalyst_types::catalyst_id::role_index::RoleId; +use ed25519_dalek::ed25519::signature::Signer; + +use crate::common::create_dummy_key_pair; mod common; +#[allow(clippy::unwrap_used)] +static DUMMY_PROPOSAL_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::PROPOSAL.clone(), + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .empty_content() + .unwrap() + .build() + .unwrap() +}); + +#[allow(clippy::unwrap_used)] +static DUMMY_BRAND_DOC: LazyLock = LazyLock::new(|| { + Builder::new() + .with_json_metadata(serde_json::json!({ + "content-type": ContentType::Json.to_string(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "type": doc_types::BRAND_PARAMETERS.clone(), + })) + .unwrap() + .empty_content() + .unwrap() + .build() + .unwrap() +}); + +// Given a proposal comment document `doc`: +// +// - Parameters: +// The `parameters` field in `doc` points to a brand document. +// The parameter rule defines the link reference as `ref`, This mean the document that +// `ref` field in `doc` points to (in this case = `proposal_doc`), must have the same +// `parameters` value as `doc`. #[tokio::test] async fn test_valid_submission_action() { - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); + let (sk, pk, kid) = create_dummy_key_pair(RoleId::Proposer).unwrap(); + let mut key_provider = TestVerifyingKeyProvider::default(); + key_provider.add_pk(kid.clone(), pk); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + // Create a main proposal submission doc, contain all fields mention in the document + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::json!({ + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ "action": "final" })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(is_valid, "{:?}", doc.problem_report()); + + let is_valid = validator::validate_signatures(&doc, &key_provider) + .await + .unwrap(); + assert!(is_valid); + assert!(!doc.problem_report().is_problematic()); } #[tokio::test] -async fn test_valid_submission_action_with_empty_provider() { - let proposal_doc_id = UuidV7::new(); - let proposal_doc_ver = UuidV7::new(); +async fn test_invalid_submission_action_wrong_role() { + let (sk, _pk, kid) = create_dummy_key_pair(RoleId::Role0).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ + // Create a main proposal submission doc, contain all fields mention in the document + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - serde_json::to_vec(&serde_json::json!({ + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ "action": "final" })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .add_signature(|m| sk.sign(&m).to_vec(), kid) + .unwrap() + .build() + .unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); + let mut provider = TestCatalystSignedDocumentProvider::default(); - let is_valid = validator::validate(&doc, &provider).await.unwrap(); + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); } #[tokio::test] -async fn test_invalid_submission_action() { - let uuid_v7 = UuidV7::new(); - // missing `ref` field - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +async fn test_invalid_submission_action_corrupted_json() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - // without specifying ref - "ref": serde_json::Value::Null, - }), - serde_json::to_vec(&serde_json::json!({ - "action": "final" + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + "ref": { + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } })) - .unwrap(), - RoleId::Proposer, - ) - .unwrap(); + .unwrap() + .with_json_content(&serde_json::Value::Null) + .unwrap() + .build() + .unwrap(); + + let mut provider = TestCatalystSignedDocumentProvider::default(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); - let provider = TestCatalystSignedDocumentProvider::default(); let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); +} - // corrupted JSON - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +#[tokio::test] +async fn test_invalid_submission_action_missing_ref() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), - "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver - }, - }), - serde_json::to_vec(&serde_json::Value::Null).unwrap(), - RoleId::Proposer, - ) - .unwrap(); + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), + // "ref": { + // "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + // "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), + // }, + "parameters": { + "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "action": "final" + })) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); +} - // empty content - let (proposal_doc, proposal_doc_id, proposal_doc_ver) = - common::create_dummy_doc(doc_types::PROPOSAL_DOCUMENT_UUID_TYPE).unwrap(); - let uuid_v7 = UuidV7::new(); - let (doc, ..) = common::create_dummy_signed_doc( - serde_json::json!({ +#[tokio::test] +async fn test_invalid_submission_action_missing_parameters() { + let doc = Builder::new() + .with_json_metadata(serde_json::json!({ "content-type": ContentType::Json.to_string(), "content-encoding": ContentEncoding::Brotli.to_string(), - "type": doc_types::PROPOSAL_ACTION_DOCUMENT_UUID_TYPE, - "id": uuid_v7.to_string(), - "ver": uuid_v7.to_string(), + "type": doc_types::PROPOSAL_SUBMISSION_ACTION.clone(), + "id": UuidV7::new(), + "ver": UuidV7::new(), "ref": { - "id": proposal_doc_id, - "ver": proposal_doc_ver + "id": DUMMY_PROPOSAL_DOC.doc_id().unwrap(), + "ver": DUMMY_PROPOSAL_DOC.doc_ver().unwrap(), }, - }), - vec![], - RoleId::Proposer, - ) - .unwrap(); + // "parameters": { + // "id": DUMMY_BRAND_DOC.doc_id().unwrap(), + // "ver": DUMMY_BRAND_DOC.doc_ver().unwrap(), + // } + })) + .unwrap() + .with_json_content(&serde_json::json!({ + "action": "final" + })) + .unwrap() + .build() + .unwrap(); let mut provider = TestCatalystSignedDocumentProvider::default(); - provider.add_document(proposal_doc).unwrap(); + + provider.add_document(None, &DUMMY_PROPOSAL_DOC).unwrap(); + provider.add_document(None, &DUMMY_BRAND_DOC).unwrap(); + let is_valid = validator::validate(&doc, &provider).await.unwrap(); assert!(!is_valid); }