Skip to content

Commit 6ac9e89

Browse files
authored
[AURON #1912] Clean up rust default lints (#2039)
<!-- - Start the PR title with the related issue ID, e.g. '[AURON #XXXX] Short summary...'. --> # Which issue does this PR close? Closes #1912 # Rationale for this change # What changes are included in this PR? # Are there any user-facing changes? # How was this patch tested?
1 parent cb4bc33 commit 6ac9e89

14 files changed

Lines changed: 24 additions & 30 deletions

File tree

Cargo.toml

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -27,14 +27,6 @@ members = [
2727
"native-engine/auron-memmgr",
2828
]
2929

30-
[workspace.lints.rust]
31-
# Pending processing (temporarily allow)
32-
unused_variables = "allow"
33-
dead_code = "allow"
34-
unused_imports = "allow"
35-
unused_must_use = "allow"
36-
deprecated = "allow"
37-
3830
[workspace.lints.clippy]
3931
unwrap_used = "deny"
4032
panic = "deny"

native-engine/auron-planner/src/planner.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ use arrow::{
2727
};
2828
use base64::{Engine, prelude::BASE64_URL_SAFE_NO_PAD};
2929
use datafusion::{
30-
common::{ExprSchema, Result, ScalarValue, stats::Precision},
30+
common::{Result, ScalarValue, stats::Precision},
3131
datasource::{
3232
file_format::file_compression_type::FileCompressionType,
3333
listing::{FileRange, PartitionedFile},
@@ -40,7 +40,7 @@ use datafusion::{
4040
expressions::{LikeExpr, SCAndExpr, SCOrExpr, in_list},
4141
},
4242
physical_plan::{
43-
ColumnStatistics, ExecutionPlan, PhysicalExpr, Statistics, expressions as phys_expr,
43+
ColumnStatistics, ExecutionPlan, Statistics, expressions as phys_expr,
4444
expressions::{
4545
BinaryExpr, CaseExpr, CastExpr, Column, IsNotNullExpr, IsNullExpr, Literal,
4646
NegativeExpr, NotExpr, PhysicalSortExpr,

native-engine/datafusion-ext-commons/src/arrow/eq_comparator.rs

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -743,11 +743,12 @@ pub mod tests {
743743
}
744744

745745
#[test]
746-
fn test_bytes() {
747-
test_bytes_impl::<Utf8Type>();
748-
test_bytes_impl::<LargeUtf8Type>();
749-
test_bytes_impl::<BinaryType>();
750-
test_bytes_impl::<LargeBinaryType>();
746+
fn test_bytes() -> Result<()> {
747+
let _ = test_bytes_impl::<Utf8Type>();
748+
let _ = test_bytes_impl::<LargeUtf8Type>();
749+
let _ = test_bytes_impl::<BinaryType>();
750+
let _ = test_bytes_impl::<LargeBinaryType>();
751+
Ok(())
751752
}
752753

753754
#[test]

native-engine/datafusion-ext-exprs/src/get_indexed_field.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use std::{
1717
any::Any,
1818
convert::TryInto,
1919
fmt::{Debug, Formatter},
20-
hash::{Hash, Hasher},
20+
hash::Hash,
2121
sync::Arc,
2222
};
2323

native-engine/datafusion-ext-exprs/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ pub mod string_contains;
3131
pub mod string_ends_with;
3232
pub mod string_starts_with;
3333

34+
#[allow(dead_code)]
3435
fn down_cast_any_ref(any: &dyn Any) -> &dyn Any {
3536
if any.is::<PhysicalExprRef>() {
3637
any.downcast_ref::<PhysicalExprRef>()

native-engine/datafusion-ext-exprs/src/spark_scalar_subquery_wrapper.rs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@ use std::{
1717
any::Any,
1818
fmt::{Debug, Display, Formatter},
1919
hash::{Hash, Hasher},
20-
io::Write,
2120
sync::Arc,
2221
};
2322

native-engine/datafusion-ext-functions/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ mod spark_unscaled_value;
3838
#[allow(clippy::panic)] // Temporarily allow panic to refactor to Result later
3939
pub fn create_auron_ext_function(
4040
name: &str,
41-
spark_partition_id: usize,
41+
#[allow(unused_variables)] spark_partition_id: usize,
4242
) -> Result<ScalarFunctionImplementation> {
4343
// auron ext functions, if used for spark should be start with 'Spark_',
4444
// if used for flink should be start with 'Flink_',

native-engine/datafusion-ext-plans/src/agg/spark_udaf_wrapper.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -16,15 +16,11 @@
1616
use std::{
1717
any::Any,
1818
fmt::{Debug, Display, Formatter},
19-
io::{Cursor, Read, Write},
2019
sync::Arc,
2120
};
2221

2322
use arrow::{
24-
array::{
25-
Array, ArrayAccessor, ArrayRef, BinaryArray, BinaryBuilder, StructArray, as_struct_array,
26-
make_array,
27-
},
23+
array::{Array, ArrayRef, StructArray, as_struct_array, make_array},
2824
datatypes::{DataType, Field, Schema, SchemaRef},
2925
ffi::{FFI_ArrowArray, FFI_ArrowSchema, from_ffi, from_ffi_and_data_type},
3026
record_batch::{RecordBatch, RecordBatchOptions},
@@ -39,7 +35,7 @@ use datafusion::{
3935
physical_expr::PhysicalExprRef,
4036
};
4137
use datafusion_ext_commons::{
42-
UninitializedInit, downcast_any,
38+
downcast_any,
4339
io::{read_len, write_len},
4440
};
4541
use jni::objects::{GlobalRef, JObject};

native-engine/datafusion-ext-plans/src/limit_exec.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ impl ExecutionPlan for LimitExec {
120120

121121
fn statistics(&self) -> Result<Statistics> {
122122
Statistics::with_fetch(
123-
self.input.statistics()?,
123+
self.input.partition_statistics(None)?,
124124
self.schema(),
125125
Some(self.limit),
126126
self.offset,
@@ -194,6 +194,7 @@ fn execute_limit_with_offset(
194194

195195
#[cfg(test)]
196196
mod test {
197+
#![allow(deprecated)]
197198
use std::sync::Arc;
198199

199200
use arrow::{

native-engine/datafusion-ext-plans/src/parquet_exec.rs

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717

1818
//! Execution plan for reading Parquet files
1919
20+
#![allow(deprecated)] // Deprecated method: fetch_parquet_metadata
2021
use std::{any::Any, fmt, fmt::Formatter, ops::Range, pin::Pin, sync::Arc};
2122

2223
use arrow::datatypes::SchemaRef;
@@ -465,6 +466,7 @@ impl AsyncFileReader for ParquetFileReaderRef {
465466
}
466467
}
467468

469+
#[allow(dead_code)]
468470
fn expr_contains_decimal_type(expr: &PhysicalExprRef, schema: &SchemaRef) -> Result<bool> {
469471
if matches!(expr.data_type(schema)?, DataType::Decimal128(..)) {
470472
return Ok(true);

0 commit comments

Comments
 (0)