Skip to content

Fix Clippy in CI for Rust 1.87 release #7514

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions arrow-array/src/array/dictionary_array.rs
Original file line number Diff line number Diff line change
Expand Up @@ -485,6 +485,7 @@ impl<K: ArrowDictionaryKeyType> DictionaryArray<K> {

/// Returns `PrimitiveDictionaryBuilder` of this dictionary array for mutating
/// its keys and values if the underlying data buffer is not shared by others.
#[allow(clippy::result_large_err)]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this API purposely returns self on failure, which is large. Thus I ignored the clippy lint

pub fn into_primitive_dict_builder<V>(self) -> Result<PrimitiveDictionaryBuilder<K, V>, Self>
where
V: ArrowPrimitiveType,
Expand Down Expand Up @@ -541,6 +542,7 @@ impl<K: ArrowDictionaryKeyType> DictionaryArray<K> {
/// assert_eq!(typed.value(1), 11);
/// assert_eq!(typed.value(2), 21);
/// ```
#[allow(clippy::result_large_err)]
pub fn unary_mut<F, V>(self, op: F) -> Result<DictionaryArray<K>, DictionaryArray<K>>
where
V: ArrowPrimitiveType,
Expand Down
1 change: 1 addition & 0 deletions arrow-flight/examples/flight_sql_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ static TABLES: Lazy<Vec<&'static str>> = Lazy::new(|| vec!["flight_sql.example.t
pub struct FlightSqlServiceImpl {}

impl FlightSqlServiceImpl {
#[allow(clippy::result_large_err)]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is an example and follows the example of tonic status

fn check_token<T>(&self, req: &Request<T>) -> Result<(), Status> {
let metadata = req.metadata();
let auth = metadata.get("authorization").ok_or_else(|| {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ pub async fn run_scenario(host: &str, port: u16) -> Result {
Ok(())
}

#[allow(clippy::unnecessary_wraps)]
#[allow(clippy::result_large_err)]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

seems the previous lint has been fixed but now I need to add a new allow

fn middleware_interceptor(mut req: Request<()>) -> Result<Request<()>, Status> {
let metadata = req.metadata_mut();
metadata.insert("x-middleware", "expected value".parse().unwrap());
Expand Down
1 change: 1 addition & 0 deletions arrow-string/src/binary_predicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use memchr::memmem::Finder;
use std::iter::zip;

/// A binary based predicate
#[allow(clippy::large_enum_variant)]
pub enum BinaryPredicate<'a> {
Contains(Finder<'a>),
StartsWith(&'a [u8]),
Expand Down
1 change: 1 addition & 0 deletions arrow-string/src/predicate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ use regex::{Regex, RegexBuilder};
use std::iter::zip;

/// A string based predicate
#[allow(clippy::large_enum_variant)]
pub(crate) enum Predicate<'a> {
Eq(&'a str),
Contains(Finder<'a>),
Expand Down
2 changes: 1 addition & 1 deletion parquet/benches/arrow_reader_clickbench.rs
Original file line number Diff line number Diff line change
Expand Up @@ -711,7 +711,7 @@ impl ReadTest {
.schema_descr();

// Determine the correct selection ("ProjectionMask")
let projection_mask = if projection_columns.iter().any(|&name| name == "*") {
let projection_mask = if projection_columns.contains(&"*") {
// * means all columns
ProjectionMask::all()
} else {
Expand Down
18 changes: 4 additions & 14 deletions parquet/src/compression.rs
Original file line number Diff line number Diff line change
Expand Up @@ -702,26 +702,19 @@ mod lz4_hadoop_codec {
input_len -= PREFIX_LEN;

if input_len < expected_compressed_size as usize {
return Err(io::Error::new(
io::ErrorKind::Other,
"Not enough bytes for Hadoop frame",
));
return Err(io::Error::other("Not enough bytes for Hadoop frame"));
}

if output_len < expected_decompressed_size as usize {
return Err(io::Error::new(
io::ErrorKind::Other,
return Err(io::Error::other(
"Not enough bytes to hold advertised output",
));
}
let decompressed_size =
lz4_flex::decompress_into(&input[..expected_compressed_size as usize], output)
.map_err(|e| ParquetError::External(Box::new(e)))?;
if decompressed_size != expected_decompressed_size as usize {
return Err(io::Error::new(
io::ErrorKind::Other,
"Unexpected decompressed size",
));
return Err(io::Error::other("Unexpected decompressed size"));
}
input_len -= expected_compressed_size as usize;
output_len -= expected_decompressed_size as usize;
Expand All @@ -736,10 +729,7 @@ mod lz4_hadoop_codec {
if input_len == 0 {
Ok(read_bytes)
} else {
Err(io::Error::new(
io::ErrorKind::Other,
"Not all input are consumed",
))
Err(io::Error::other("Not all input are consumed"))
}
}

Expand Down
2 changes: 1 addition & 1 deletion parquet/src/errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ pub type Result<T, E = ParquetError> = result::Result<T, E>;

impl From<ParquetError> for io::Error {
fn from(e: ParquetError) -> Self {
io::Error::new(io::ErrorKind::Other, e)
io::Error::other(e)
}
}

Expand Down
Loading