Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
File renamed without changes.
1 change: 0 additions & 1 deletion crates/hotfix-message/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ workspace = true
hotfix-derive = { version = "0.1.2", path = "../hotfix-derive" }
hotfix-dictionary = { version = "0.1.5", path = "../hotfix-dictionary" }

anyhow.workspace = true
chrono.workspace = true
indexmap.workspace = true
thiserror.workspace = true
Expand Down
67 changes: 37 additions & 30 deletions crates/hotfix-message/src/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ use crate::message::{Config, Message};
use crate::parsed_message::{GarbledReason, InvalidReason, ParsedMessage};
use crate::parts::{Body, Header, RepeatingGroup, Trailer};
use crate::tags::{BEGIN_STRING, BODY_LENGTH, CHECK_SUM, MSG_TYPE};
use anyhow::anyhow;
use hotfix_dictionary::{Dictionary, LayoutItem, LayoutItemKind, TagU32};
use std::collections::{HashMap, HashSet};

Expand All @@ -32,7 +31,7 @@ pub struct MessageBuilder {
}

impl MessageBuilder {
pub fn new(dict: Dictionary, config: Config) -> anyhow::Result<Self> {
pub fn new(dict: Dictionary, config: Config) -> ParserResult<Self> {
let header_tags = Self::get_tags_for_component(&dict, "StandardHeader")?;
let trailer_tags = Self::get_tags_for_component(&dict, "StandardTrailer")?;
let message_definitions = build_message_specifications(&dict)?;
Expand Down Expand Up @@ -66,7 +65,8 @@ impl MessageBuilder {
}
};

let msg_type = header.get::<&str>(MSG_TYPE).unwrap(); // we know this is valid at this point as we have already verified the integrity of the header
#[allow(clippy::expect_used)]
let msg_type = header.get::<&str>(MSG_TYPE).expect("we know this is valid at this point as we have already verified the integrity of the header");
let (body, next) = match self.build_body(msg_type, &mut parser, next) {
Ok((body, field)) => (body, field),
Err(err) => {
Expand Down Expand Up @@ -174,9 +174,10 @@ impl MessageBuilder {
} else {
// check the message type once all other header fields have been parsed
// we delay it until after parsing so our rejection has access to fields like the sequence number
#[allow(clippy::expect_used)]
let msg_type = header
.get::<&str>(MSG_TYPE)
.expect("this should never fail as we've verified the integrity of the header");
.expect("this never fails as we've verified the integrity of the header");
if self.dict.message_by_msgtype(msg_type).is_none() {
return Err(ParserError::InvalidMsgType(msg_type.to_string()));
}
Expand All @@ -197,15 +198,17 @@ impl MessageBuilder {
let mut field = next_field;

while message_def.contains_tag(field.tag) {
let tag = field.tag.get();
let tag = field.tag;
body.store_field(field);

// check if it's the start of a group and parse the group as needed
let field_def = self.get_dict_field_by_tag(tag)?;
match message_def.get_group(TagU32::new(tag).unwrap()) {
let field_def = self.get_dict_field_by_tag(tag.get())?;
match message_def.get_group(tag) {
Some(group_def) => {
let (groups, next) = Self::parse_groups(parser, group_def, field_def.tag())?;
body.set_groups(groups);
#[allow(clippy::expect_used)]
body.set_groups(groups)
.expect("groups are guaranteed to be valid at this point");
field = next;
}
None => {
Expand Down Expand Up @@ -259,7 +262,10 @@ impl MessageBuilder {
{
let (groups, next) =
Self::parse_groups(parser, nested_group_def, current_tag)?;
group.set_groups(groups);
#[allow(clippy::expect_used)]
group
.set_groups(groups)
.expect("groups are guaranteed to be valid at this point");
next
} else {
parser
Expand Down Expand Up @@ -316,11 +322,11 @@ impl MessageBuilder {
fn get_tags_for_component(
dict: &Dictionary,
component_name: &str,
) -> anyhow::Result<HashSet<TagU32>> {
) -> ParserResult<HashSet<TagU32>> {
let mut tags = HashSet::new();
let component = dict
.component_by_name(component_name)
.ok_or(ParserError::InvalidComponent(component_name.to_string()))?;
.ok_or_else(|| ParserError::InvalidComponent(component_name.to_string()))?;
for item in component.items() {
if let LayoutItemKind::Field(field) = item.kind() {
tags.insert(field.tag());
Expand Down Expand Up @@ -425,6 +431,7 @@ impl GroupSpecification {
}

pub fn delimiter_tag(&self) -> TagU32 {
#[allow(clippy::expect_used)]
self.fields
.first()
.expect("groups always have at least one field")
Expand Down Expand Up @@ -457,7 +464,7 @@ impl MessageSpecification {

fn build_message_specifications(
dict: &Dictionary,
) -> anyhow::Result<HashMap<String, MessageSpecification>> {
) -> ParserResult<HashMap<String, MessageSpecification>> {
let mut definitions = HashMap::new();

for message in dict.messages() {
Expand All @@ -467,26 +474,25 @@ fn build_message_specifications(
.flatten()
.collect();

let message_def = MessageSpecification {
fields,
groups: message.layout().fold(HashMap::new(), |mut acc, item| {
acc.extend(extract_groups(dict, item).unwrap());
acc
}),
};
let mut groups = HashMap::new();
for item in message.layout() {
groups.extend(extract_groups(dict, item)?);
}

let message_def = MessageSpecification { fields, groups };
definitions.insert(message.msg_type().to_string(), message_def);
}

Ok(definitions)
}

fn extract_fields(dict: &Dictionary, item: LayoutItem) -> anyhow::Result<Vec<FieldSpecification>> {
fn extract_fields(dict: &Dictionary, item: LayoutItem) -> ParserResult<Vec<FieldSpecification>> {
let is_required = item.required();
let fields = match item.kind() {
LayoutItemKind::Component(c) => {
let component = dict
.component_by_name(c.name())
.ok_or_else(|| anyhow!("missing component"))?;
.ok_or_else(|| ParserError::InvalidComponent(c.name().to_string()))?;
component
.items()
.flat_map(|i| extract_fields(dict, i))
Expand All @@ -509,18 +515,22 @@ fn extract_fields(dict: &Dictionary, item: LayoutItem) -> anyhow::Result<Vec<Fie
fn extract_groups(
dict: &Dictionary,
item: LayoutItem,
) -> anyhow::Result<HashMap<TagU32, GroupSpecification>> {
) -> ParserResult<HashMap<TagU32, GroupSpecification>> {
let mut groups = HashMap::new();
match item.kind() {
LayoutItemKind::Component(c) => {
let component = dict
.component_by_name(c.name())
.ok_or_else(|| anyhow!("missing component"))?;
component.items().for_each(|i| {
groups.extend(extract_groups(dict, i).unwrap());
})
.ok_or_else(|| ParserError::InvalidComponent(c.name().to_string()))?;
for i in component.items() {
groups.extend(extract_groups(dict, i)?);
}
}
LayoutItemKind::Group(field, items) => {
let mut nested_groups = HashMap::new();
for i in items.iter() {
nested_groups.extend(extract_groups(dict, i.clone())?);
}
groups.insert(
field.tag(),
GroupSpecification {
Expand All @@ -530,10 +540,7 @@ fn extract_groups(
.flat_map(|i| extract_fields(dict, i.clone()))
.flatten()
.collect(),
nested_groups: items.iter().fold(HashMap::new(), |mut acc, i| {
acc.extend(extract_groups(dict, i.clone()).unwrap());
acc
}),
nested_groups,
},
);
}
Expand Down
4 changes: 2 additions & 2 deletions crates/hotfix-message/src/encoder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -138,14 +138,14 @@ mod tests {
));
subparty_2.store_field(Field::new(fix44::PARTY_SUB_ID_TYPE.tag(), b"2".to_vec()));

party_1.set_groups(vec![subparty_1, subparty_2]);
party_1.set_groups(vec![subparty_1, subparty_2])?;

let mut party_2 = RepeatingGroup::new(fix44::NO_PARTY_I_DS, fix44::PARTY_ID);
party_2.store_field(Field::new(fix44::PARTY_ID.tag(), b"PARTY_B".to_vec()));
party_2.store_field(Field::new(fix44::PARTY_ID_SOURCE.tag(), b"D".to_vec()));
party_2.store_field(Field::new(fix44::PARTY_ROLE.tag(), b"2".to_vec()));

msg.body.set_groups(vec![party_1, party_2]);
msg.body.set_groups(vec![party_1, party_2])?;
let config = Config { separator: b'|' };
let raw_message = msg.encode(&config)?;

Expand Down
1 change: 1 addition & 0 deletions crates/hotfix-message/src/encoding/definitions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ pub struct HardCodedFixFieldDefinition {
impl dict::IsFieldDefinition for HardCodedFixFieldDefinition {
#[inline]
fn tag(&self) -> TagU32 {
#[allow(clippy::expect_used)]
TagU32::new(self.tag).expect("Invalid tag number 0.")
}

Expand Down
1 change: 1 addition & 0 deletions crates/hotfix-message/src/encoding/field_access.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ where
/// valid UTF-8. As such, you should only *ever* use this function for
/// [`FieldType`] implementors that are guaranteed to be representable with
/// valid UTF-8 (like numbers with ASCII digits).
#[allow(clippy::expect_used)]
fn to_string(&self) -> String {
String::from_utf8(self.to_bytes()).expect("Invalid UTF-8 representation of FIX field.")
}
Expand Down
8 changes: 6 additions & 2 deletions crates/hotfix-message/src/encoding/field_types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,8 +115,12 @@ where
{
let serialized = item.to_bytes();
let bytes = &serialized[..];
let deserialized = T::deserialize(bytes).ok().unwrap();
let deserialized_lossy = T::deserialize_lossy(bytes).ok().unwrap();
let Some(deserialized) = T::deserialize(bytes).ok() else {
return false;
};
let Some(deserialized_lossy) = T::deserialize_lossy(bytes).ok() else {
return false;
};
deserialized == item && deserialized_lossy == item
}

Expand Down
5 changes: 3 additions & 2 deletions crates/hotfix-message/src/encoding/field_types/tagu32.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ impl<'a> FieldType<'a> for TagU32 {
B: Buffer,
{
let initial_len = buffer.len();
write!(BufferWriter(buffer), "{self}").unwrap();
let _ = write!(BufferWriter(buffer), "{self}");
buffer.len() - initial_len
}

Expand All @@ -29,6 +29,7 @@ impl<'a> FieldType<'a> for TagU32 {
#[inline]
fn deserialize_lossy(data: &'a [u8]) -> Result<Self, Self::Error> {
let n = u32::deserialize_lossy(data)?;
Ok(TagU32::new(n.max(1)).unwrap())
#[allow(clippy::expect_used)]
Ok(TagU32::new(n.max(1)).expect("guaranteed to be non-zero"))
}
}
8 changes: 5 additions & 3 deletions crates/hotfix-message/src/encoding/field_types/timestamp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,20 @@ impl Timestamp {
}

/// Returns the current UTC system time with millisecond precision.
#[allow(clippy::expect_used)]
pub fn utc_now() -> Self {
use chrono::{Datelike, Timelike};
let utc: chrono::DateTime<chrono::Utc> = chrono::Utc::now();
let date = Date::new(utc.year() as u32, utc.month(), utc.day());
let date = Date::new(utc.year() as u32, utc.month(), utc.day())
.expect("chrono::Utc::now() always produces a valid date");
let time = Time::from_hmsm(
utc.hour(),
utc.minute(),
utc.second(),
utc.nanosecond() / 1_000_000,
)
.unwrap();
Self::new(date.unwrap(), time)
.expect("chrono::Utc::now() always produces a valid time");
Self::new(date, time)
}

/// Returns the date of `self`.
Expand Down
6 changes: 3 additions & 3 deletions crates/hotfix-message/src/encoding/field_types/tz.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ impl Tz {
#[cfg(feature = "utils-chrono")]
#[cfg_attr(doc_cfg, doc(cfg(feature = "utils-chrono")))]
pub fn to_chrono_offset(&self) -> chrono::FixedOffset {
// unwrap(): we already verified that the offset is within bounds during
// deserialization
chrono::FixedOffset::east_opt(self.offset().1.as_secs() as i32).unwrap()
#[allow(clippy::expect_used)]
chrono::FixedOffset::east_opt(self.offset().1.as_secs() as i32)
.expect("we already verified that the offset is within bounds during deserialisation")
}

/// Creates a [`Tz`] from a [`chrono::FixedOffset`].
Expand Down
20 changes: 8 additions & 12 deletions crates/hotfix-message/src/encoding/field_types/utils_chrono.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ impl Default for WithMilliseconds {
}

#[cfg_attr(doc_cfg, doc(cfg(feature = "utils-chrono")))]
impl<'a> FieldType<'a> for chrono::NaiveDateTime {
impl<'a> FieldType<'a> for NaiveDateTime {
type Error = &'static str;
type SerializeSettings = WithMilliseconds;

Expand All @@ -38,8 +38,7 @@ impl<'a> FieldType<'a> for chrono::NaiveDateTime {
}

let date = chrono::NaiveDate::deserialize(&data[..8])?;
let hyphen = <&[u8]>::deserialize(&data[8..9]).unwrap();
if hyphen != b"-" {
if data[8] != b'-' {
return Err("Hyphen in datetime not found.");
}
let time = chrono::NaiveTime::deserialize(&data[9..])?;
Expand All @@ -59,14 +58,13 @@ impl<'a> FieldType<'a> for chrono::NaiveDate {
B: Buffer,
{
use chrono::Datelike;
write!(
let _ = write!(
BufferWriter(buffer),
"{:04}{:02}{:02}",
self.year(),
self.month(),
self.day(),
)
.unwrap();
);
8
}

Expand All @@ -90,21 +88,19 @@ impl<'a> FieldType<'a> for chrono::NaiveTime {
where
B: Buffer,
{
write!(
let _ = write!(
BufferWriter(buffer),
"{:02}:{:02}:{:02}",
self.hour(),
self.minute(),
self.second()
)
.unwrap();
);
if with_millis {
write!(
let _ = write!(
BufferWriter(buffer),
".{:03}",
self.nanosecond() / 1_000_000
)
.unwrap();
);
12
} else {
8
Expand Down
10 changes: 10 additions & 0 deletions crates/hotfix-message/src/error.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use hotfix_dictionary::TagU32;
use std::collections::HashSet;
use std::io;
use thiserror::Error;

Expand Down Expand Up @@ -43,3 +45,11 @@ pub(crate) enum MessageIntegrityError {
#[error("Invalid CheckSum")]
InvalidCheckSum,
}

#[derive(Error, Debug)]
pub enum SetGroupsError {
#[error("Supplied empty vector of groups")]
EmptyGroups,
#[error("Supplied groups contain multiple start tags")]
MultipleStartTagsAndDelimiters(HashSet<(TagU32, TagU32)>),
}
4 changes: 4 additions & 0 deletions crates/hotfix-message/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
#![deny(clippy::expect_used)]
#![deny(clippy::panic)]
#![deny(clippy::unwrap_used)]

mod builder;
mod encoder;
mod encoding;
Expand Down
Loading