diff --git a/services/backup/src/Constants.h b/services/backup/src/Constants.h --- a/services/backup/src/Constants.h +++ b/services/backup/src/Constants.h @@ -18,9 +18,9 @@ // than the chunk limit, once we get the amount of data of size equal to the // limit, we wouldn't know if we should put this in the database right away or // wait for more data. -// 400KB limit (KB, not KiB, that's why it's 1000 not 1024) - +// 400KiB limit (in docs there is KB but they mean KiB) - // https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/ServiceQuotas.html -const size_t LOG_DATA_SIZE_DATABASE_LIMIT = 400 * 1000; +const size_t LOG_DATA_SIZE_DATABASE_LIMIT = 1024 * 400; } // namespace network } // namespace comm diff --git a/services/commtest/tests/lib/tools.rs b/services/commtest/tests/lib/tools.rs --- a/services/commtest/tests/lib/tools.rs +++ b/services/commtest/tests/lib/tools.rs @@ -1,7 +1,10 @@ use bytesize::ByteSize; #[allow(dead_code)] -pub fn generate_nbytes(number_of_bytes: usize, predefined_byte_value: Option) -> Vec { +pub fn generate_nbytes( + number_of_bytes: usize, + predefined_byte_value: Option, +) -> Vec { let byte_value = predefined_byte_value.unwrap_or(b'A'); return vec![byte_value; number_of_bytes]; } @@ -18,6 +21,11 @@ TonicStatus(tonic::Status), } +#[allow(dead_code)] +pub fn get_dynamo_db_item_size_limit() -> usize { + ByteSize::kib(400).as_u64() as usize +} + pub const GRPC_METADATA_SIZE_BYTES: usize = 5; #[allow(dead_code)]