Skip to content

Commit

Permalink
review clippy lints
Browse files Browse the repository at this point in the history
  • Loading branch information
Ten0 committed Apr 1, 2024
1 parent 28f2d41 commit 970efb4
Show file tree
Hide file tree
Showing 17 changed files with 48 additions and 29 deletions.
7 changes: 7 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,10 @@

[workspace.lints.rust]
missing_docs = "warn"
[workspace.lints.clippy]
explicit_auto_deref = "allow"
missing_safety_doc = "deny"
single_match = "allow"
tabs_in_doc_comments = "allow"
undocumented_unsafe_blocks = "deny"
unwrap_or_default = "allow"
4 changes: 2 additions & 2 deletions serde_avro_derive_macros/src/build_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,10 @@ pub(crate) fn schema_impl(input: SchemaDeriveInput) -> Result<TokenStream, Error
);
};
if let Some(f) = &field.scale {
error(&f);
error(f);
}
if let Some(f) = &field.precision {
error(&f);
error(f);
}
}
quote! { builder.build_logical_type::<#ty>(#logical_type) }
Expand Down
4 changes: 2 additions & 2 deletions serde_avro_fast/src/de/deserializer/types/decimal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ where
if buf.get(start).map_or(false, |&v| v & 0x80 != 0) {
// This is a negative number in CA2 repr, we need to maintain that for the
// larger number
for i in 0..start {
buf[i] = 0xFF;
for v in &mut buf[0..start] {
*v = 0xFF;
}
}
let unscaled = i128::from_be_bytes(buf);
Expand Down
6 changes: 2 additions & 4 deletions serde_avro_fast/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,9 +149,7 @@ pub fn from_datum_slice<'a, T>(slice: &'a [u8], schema: &Schema) -> Result<T, de
where
T: serde::Deserialize<'a>,
{
serde::Deserialize::deserialize(
de::DeserializerState::from_slice(slice, &schema).deserializer(),
)
serde::Deserialize::deserialize(de::DeserializerState::from_slice(slice, schema).deserializer())
}

/// Deserialize from an avro "datum" (raw data, no headers...) `impl BufRead`
Expand All @@ -168,7 +166,7 @@ where
R: std::io::BufRead,
{
serde::Deserialize::deserialize(
de::DeserializerState::from_reader(reader, &schema).deserializer(),
de::DeserializerState::from_reader(reader, schema).deserializer(),
)
}

Expand Down
5 changes: 3 additions & 2 deletions serde_avro_fast/src/object_container_file_encoding/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,11 @@ impl CompressionLevel {
/// Specifies the compression level that will be used for the compression
/// algorithms
///
/// Panics if `level` is lower than `1` or greater than `9`
/// # Panics
/// If `level` is lower than `1` or greater than `9`
///
/// This is because all algorithms expect compression levels between `1`
/// (fast compression) and `9` (take as long as you'd like).
/// ("fast compression") and `9` ("take as long as you'd like").
pub const fn new(level: u8) -> Self {
match NonZeroU8::new(level) {
Some(n) if n.get() < 10 => Self { repr: n },
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ impl CompressionCodecState {
}
}

/// This is potentially a large enum due to the snap encoder's buffer
enum Kind {
Null,
#[cfg(feature = "deflate")]
Expand Down Expand Up @@ -124,7 +125,7 @@ impl CompressionCodecState {
return Err(error("Deflate", &"got BufError from flate2"));
}
flate2::Status::StreamEnd => {
assert_eq!(input.len(), written as usize);
assert_eq!(input.len(), written);
break;
}
}
Expand Down Expand Up @@ -166,7 +167,7 @@ impl CompressionCodecState {
));
}
bzip2::Status::FinishOk | bzip2::Status::StreamEnd => {
assert_eq!(input.len(), written as usize);
assert_eq!(input.len(), written);
*len = compress.total_out() as usize;
break;
}
Expand All @@ -181,8 +182,7 @@ impl CompressionCodecState {
.compress(input, &mut self.output_vec)
.map_err(|snappy_error| error("Snappy", &snappy_error))?;
self.output_vec.truncate(n);
self.output_vec
.extend(crc32fast::hash(&input).to_be_bytes());
self.output_vec.extend(crc32fast::hash(input).to_be_bytes());
}
#[cfg(feature = "xz")]
Kind::Xz { len, level } => {
Expand Down Expand Up @@ -220,7 +220,7 @@ impl CompressionCodecState {
));
}
xz2::stream::Status::StreamEnd => {
assert_eq!(input.len(), written as usize);
assert_eq!(input.len(), written);
*len = compress.total_out() as usize;
break;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -492,9 +492,10 @@ struct WriterInner<'c, 's> {
n_elements_in_block: u64,
aprox_block_size: u32,
sync_marker: [u8; 16],
compression_codec_state: CompressionCodecState,
block_header_buffer: [u8; 20],
block_header_size: Option<NonZeroUsize>,
/// This type goes at the end because it's potentially large
compression_codec_state: CompressionCodecState,
}

impl<'c, 's> WriterInner<'c, 's> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ fn write_all_vectored_inner<'a, W: Write>(
// to avoid calling write_vectored if there is no data to be written.
advance_slices(&mut slices, &mut bufs, 0);
while !bufs.is_empty() {
match writer.write_vectored(&bufs) {
match writer.write_vectored(bufs) {
Ok(0) => {
return Err(Error::new(
ErrorKind::WriteZero,
Expand Down
3 changes: 2 additions & 1 deletion serde_avro_fast/src/schema/safe/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ impl SchemaMut {
///
/// It is the first node of the `nodes` `Vec`.
///
/// Panics if the `nodes` `Vec` is empty.
/// # Panics
/// If the `nodes` `Vec` is empty.
/// This can only happen if you have updated it through
/// [`nodes_mut`](Self::nodes_mut), as parsing otherwise guarantees that
/// this cannot happen.
Expand Down
4 changes: 2 additions & 2 deletions serde_avro_fast/src/schema/safe/parsing/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ impl<'a> SchemaConstructionState<'a> {
None => enclosing_namespace,
},

name: &name,
name,
}
};
if let Some(_) = self.names.insert(name_key, idx) {
Expand Down Expand Up @@ -408,7 +408,7 @@ impl<'a> SchemaConstructionState<'a> {
} else {
NameKey {
namespace: enclosing_namespace,
name: &reference,
name: reference,
}
};
match self.names.get(&name_key) {
Expand Down
2 changes: 1 addition & 1 deletion serde_avro_fast/src/schema/safe/rabin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ mod tests {
fp = (fp >> 1)
^ (super::EMPTY64 & u64::from_ne_bytes((-((fp & 1) as i64)).to_ne_bytes()));
}
fp_table[i as usize] = fp as u64;
fp_table[i as usize] = fp;
println!("\t{:#018X},", fp);
}
assert!(super::FP_TABLE as &[_] == &fp_table as &[_]);
Expand Down
11 changes: 9 additions & 2 deletions serde_avro_fast/src/schema/self_referential.rs
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,11 @@ impl<'a, N> Clone for NodeRef<'a, N> {
*self
}
}
/// SAFETY: NonNull is !Send !Sync, but NodeRef is really just a reference, so
/// we can implement Sync and Send
unsafe impl<T: Sync> Sync for NodeRef<'_, T> {}
/// SAFETY: NonNull is !Send !Sync, but NodeRef is really just a reference, so
/// we can implement Sync and Send
unsafe impl<T: Sync> Send for NodeRef<'_, T> {}
impl<N> NodeRef<'static, N> {
const unsafe fn new(ptr: *mut N) -> Self {
Expand Down Expand Up @@ -351,7 +355,7 @@ impl TryFrom<super::safe::SchemaMut> for Schema {
// There cannot be nested logical types so there cannot be a second remapping
// Also we know the index is low enough because that has been checked
// when loading inner_type above
// But we're doing unsafe so let's still make sure that is true
// But we're doing unsafe so let's still make extra sure that is true
assert!(
idx < len,
"id should be low enough - bug in serde_avro_fast"
Expand Down Expand Up @@ -450,6 +454,7 @@ impl TryFrom<super::safe::SchemaMut> for Schema {
SafeSchemaType::Fixed(fixed) => SchemaNode::Fixed(fixed),
},
};
// SAFETY: see comment at beginning of loop
unsafe {
*curr_storage_node_ptr = new_node;
curr_storage_node_ptr = curr_storage_node_ptr.add(1);
Expand All @@ -458,7 +463,9 @@ impl TryFrom<super::safe::SchemaMut> for Schema {
// Now that all the nodes have been partially we can set the references to
// `Fixed` for `Decimal` nodes
for (i, to) in set_decimal_repr_to_fixed {
// SAFETY: indexes are valid
// SAFETY: indexes are valid, and there are no live references
// to this (live NodeRef don't count because they contain pointers, and that is
// allowed)
unsafe {
match *storage_start_ptr.add(i) {
SchemaNode::Decimal(ref mut decimal) => {
Expand Down
2 changes: 2 additions & 0 deletions serde_avro_fast/src/ser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,8 @@ impl<'s> SerializerConfig<'s> {
/// That is the one that will be used when building a serializer from this
/// `SerializerConfig`.
pub fn schema(&self) -> &'s Schema {
// A SerializerConfig with no schema can only be built within this
// crate - in which case we don't call `.schema()`
self.schema.expect("Unknown schema in SerializerConfig")
}
}
Expand Down
2 changes: 1 addition & 1 deletion serde_avro_fast/src/ser/serializer/seq_or_tuple.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ impl<'r, 'c, 's, W: Write> SerializeSeqOrTupleOrTupleStruct<'r, 'c, 's, W> {
Kind::BufferedBytes {
ref mut serializer_state,
ref buffer,
} => serializer_state.write_length_delimited(&buffer),
} => serializer_state.write_length_delimited(buffer),
Kind::Fixed { expected_len, .. } => {
if expected_len != 0 {
Err(SerError::new(
Expand Down
2 changes: 1 addition & 1 deletion serde_avro_fast/tests/duration.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ fn test<'de, T: serde::Serialize + serde::Deserialize<'de> + PartialEq + std::fm
to_datum_vec(&rust_value, &mut SerializerConfig::new(schema)).unwrap(),
datum
);
let avro_value = apache_avro::types::Value::Duration(avro_value.clone());
let avro_value = apache_avro::types::Value::Duration(avro_value);
assert_eq!(
apache_avro::from_avro_datum(apache_schema, &mut &*datum, None).unwrap(),
avro_value
Expand Down
6 changes: 4 additions & 2 deletions serde_avro_fast/tests/round_trips.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//! Port of tests from the apache-avro library
//! https://github.com/apache/avro/blob/5016cd5c3f2054ebacce7983785c228798e47f59/lang/rust/avro/tests/io.rs
#![allow(clippy::zero_prefixed_literal)]

use {
apache_avro::{types::Value, Schema},
lazy_static::lazy_static,
Expand Down Expand Up @@ -334,7 +336,7 @@ fn test_bytes_with_serde_json_value() {
let schema: serde_avro_fast::Schema = raw_schema.parse().unwrap();

let decoded: serde_json::Value = match value {
Value::Bytes(b) => b.into_iter().map(|b| *b as u64).collect(),
Value::Bytes(b) => b.iter().map(|&b| b as u64).collect(),
_ => unreachable!(),
};
let config = &mut serde_avro_fast::ser::SerializerConfig::new(&schema);
Expand All @@ -355,7 +357,7 @@ fn test_fixed_with_serde_json_value() {
let schema: serde_avro_fast::Schema = raw_schema.parse().unwrap();

let decoded: serde_json::Value = match value {
Value::Fixed(_, b) => b.into_iter().map(|b| *b as u64).collect(),
Value::Fixed(_, b) => b.iter().map(|&b| b as u64).collect(),
_ => unreachable!(),
};
let config = &mut serde_avro_fast::ser::SerializerConfig::new(&schema);
Expand Down
4 changes: 2 additions & 2 deletions serde_avro_fast/tests/single_object_encoding.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ fn avro_3642_test_single_object_reader_incomplete_reads() {
c: vec!["cat".into(), "dog".into()],
};
// The two-byte marker, to show that the message uses this single-record format
let to_read_1 = vec![0xC3, 0x01];
let to_read_1 = &[0xC3, 0x01];
let mut to_read_2 = Vec::<u8>::new();
to_read_2.extend_from_slice(
&APACHE_SCHEMA
Expand All @@ -106,7 +106,7 @@ fn avro_3642_test_single_object_reader_incomplete_reads() {
let mut to_read_3 = Vec::<u8>::new();
apache_encode(expected_value.clone(), &APACHE_SCHEMA, &mut to_read_3)
.expect("Encode should succeed");
let to_read = (&to_read_1[..]).chain(&to_read_2[..]).chain(&to_read_3[..]);
let to_read = (to_read_1).chain(&to_read_2[..]).chain(&to_read_3[..]);
let val: TestSingleObjectReader = from_single_object_reader(to_read, &SCHEMA).unwrap();
assert_eq!(expected_value, val);
}

0 comments on commit 970efb4

Please sign in to comment.