Skip to content

Commit

Permalink
refactor(cubesql): Fix clippy warnings in cubesql, part 2 (#9262)
Browse files Browse the repository at this point in the history
* refactor(cubesql): Fix if_same_then_else warning

* refactor(cubesql): Fix into_iter_on_ref warning

* refactor(cubesql): Fix iter_cloned_collect warning

* refactor(cubesql): Fix iter_next_slice warning

* refactor(cubesql): Fix manual_flatten warning

* refactor(cubesql): Fix manual_range_contains warning

* refactor(cubesql): Fix map_clone warning

* refactor(cubesql): Fix map_flatten warning

* refactor(cubesql): Fix map_identity warning

* refactor(cubesql): Fix useless_vec warning

* refactor(cubesql): Fix useless_conversion warning

* refactor(cubesql): Fix unwrap_or_default warning

* refactor(cubesql): Fix clone_on_copy warning

* refactor(cubesql): Fix op_ref warning
  • Loading branch information
mcheshkov authored Feb 24, 2025
1 parent e12dc44 commit 31a7a04
Show file tree
Hide file tree
Showing 33 changed files with 531 additions and 620 deletions.
14 changes: 0 additions & 14 deletions rust/cubesql/cubesql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -93,25 +93,15 @@ harness = false
# Feel free to remove any rule from here and fix all warnings with it
# Or to write a comment why rule should stay disabled
[lints.clippy]
clone_on_copy = "allow"
collapsible_if = "allow"
collapsible_match = "allow"
collapsible_else_if = "allow"
comparison_chain = "allow"
derive_ord_xor_partial_ord = "allow"
field_reassign_with_default = "allow"
if_same_then_else = "allow"
into_iter_on_ref = "allow"
iter_cloned_collect = "allow"
iter_next_slice = "allow"
len_without_is_empty = "allow"
len_zero = "allow"
let_and_return = "allow"
manual_flatten = "allow"
manual_range_contains = "allow"
map_clone = "allow"
map_flatten = "allow"
map_identity = "allow"
match_like_matches_macro = "allow"
match_ref_pats = "allow"
match_single_binding = "allow"
Expand All @@ -129,7 +119,6 @@ new_without_default = "allow"
non_canonical_partial_ord_impl = "allow"
nonminimal_bool = "allow"
only_used_in_recursion = "allow"
op_ref = "allow"
option_as_ref_deref = "allow"
partialeq_ne_impl = "allow"
ptr_arg = "allow"
Expand All @@ -149,8 +138,5 @@ unnecessary_mut_passed = "allow"
unnecessary_to_owned = "allow"
unnecessary_unwrap = "allow"
unused_unit = "allow"
unwrap_or_default = "allow"
useless_conversion = "allow"
useless_format = "allow"
useless_vec = "allow"
wrong_self_convention = "allow"
4 changes: 2 additions & 2 deletions rust/cubesql/cubesql/e2e/tests/postgres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ impl PostgresIntegrationTestSuite {
for row in res.into_iter() {
let mut values: Vec<String> = Vec::new();

for (idx, column) in row.columns().into_iter().enumerate() {
for (idx, column) in row.columns().iter().enumerate() {
if !description_done {
description.push(format!(
"{} type: {} ({})",
Expand Down Expand Up @@ -1272,7 +1272,7 @@ impl AsyncTestSuite for PostgresIntegrationTestSuite {
let columns = rows.first().unwrap().columns();
assert_eq!(
columns
.into_iter()
.iter()
.map(|col| col.type_().oid())
.collect::<Vec<u32>>(),
vec![1184, 1114]
Expand Down
2 changes: 1 addition & 1 deletion rust/cubesql/cubesql/e2e/tests/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ pub fn escape_snapshot_name(name: String) -> String {

// Windows limit
if name.len() > 200 {
name.chars().into_iter().take(200).collect()
name.chars().take(200).collect()
} else {
name
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ fn filter_push_down(
// let predicates = split_predicates(predicate)
let predicates = vec![predicate.clone()]
.into_iter()
.chain(predicates.into_iter())
.chain(predicates)
.collect::<Vec<_>>();
let mut pushable_predicates = vec![];
let mut non_pushable_predicates = vec![];
Expand Down Expand Up @@ -326,10 +326,10 @@ fn filter_push_down(
optimizer_config,
)?),
on: on.clone(),
join_type: join_type.clone(),
join_constraint: join_constraint.clone(),
join_type: *join_type,
join_constraint: *join_constraint,
schema: schema.clone(),
null_equals_null: null_equals_null.clone(),
null_equals_null: *null_equals_null,
}),
)
}
Expand Down Expand Up @@ -483,8 +483,8 @@ fn filter_push_down(
issue_filter(
predicates,
LogicalPlan::Limit(Limit {
skip: skip.clone(),
fetch: fetch.clone(),
skip: *skip,
fetch: *fetch,
input: Arc::new(filter_push_down(
optimizer,
input,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -203,10 +203,10 @@ fn limit_push_down(
optimizer_config,
)?),
on: on.clone(),
join_type: join_type.clone(),
join_constraint: join_constraint.clone(),
join_type: *join_type,
join_constraint: *join_constraint,
schema: schema.clone(),
null_equals_null: null_equals_null.clone(),
null_equals_null: *null_equals_null,
}),
)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ fn sort_push_down(
} => Ok(if is_column_expr(expr) {
rewrite(expr, &rewrite_map)?.map(|expr| Expr::Sort {
expr: Box::new(expr),
asc: asc.clone(),
nulls_first: nulls_first.clone(),
asc: *asc,
nulls_first: *nulls_first,
})
} else {
None
Expand Down Expand Up @@ -199,10 +199,10 @@ fn sort_push_down(
)?),
right: Arc::new(sort_push_down(optimizer, right, None, optimizer_config)?),
on: on.clone(),
join_type: join_type.clone(),
join_constraint: join_constraint.clone(),
join_type: *join_type,
join_constraint: *join_constraint,
schema: schema.clone(),
null_equals_null: null_equals_null.clone(),
null_equals_null: *null_equals_null,
}));
}
}
Expand All @@ -213,10 +213,10 @@ fn sort_push_down(
left: Arc::new(sort_push_down(optimizer, left, None, optimizer_config)?),
right: Arc::new(sort_push_down(optimizer, right, None, optimizer_config)?),
on: on.clone(),
join_type: join_type.clone(),
join_constraint: join_constraint.clone(),
join_type: *join_type,
join_constraint: *join_constraint,
schema: schema.clone(),
null_equals_null: null_equals_null.clone(),
null_equals_null: *null_equals_null,
}),
)
}
Expand Down Expand Up @@ -285,8 +285,8 @@ fn sort_push_down(
issue_sort(
sort_expr,
LogicalPlan::Limit(Limit {
skip: skip.clone(),
fetch: fetch.clone(),
skip: *skip,
fetch: *fetch,
input: Arc::new(sort_push_down(optimizer, input, None, optimizer_config)?),
}),
)
Expand Down
47 changes: 23 additions & 24 deletions rust/cubesql/cubesql/src/compile/engine/df/optimizers/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(left, right)| Expr::BinaryExpr {
left: Box::new(left),
op: op.clone(),
op: *op,
right: Box::new(right),
})
}
Expand All @@ -60,9 +60,9 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(left, right)| Expr::AnyExpr {
left: Box::new(left),
op: op.clone(),
op: *op,
right: Box::new(right),
all: all.clone(),
all: *all,
})
}
Expr::Like(Like {
Expand All @@ -77,10 +77,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(expr, pattern)| {
Expr::Like(Like {
negated: negated.clone(),
negated: *negated,
expr: Box::new(expr),
pattern: Box::new(pattern),
escape_char: escape_char.clone(),
escape_char: *escape_char,
})
})
}
Expand All @@ -96,10 +96,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(expr, pattern)| {
Expr::ILike(Like {
negated: negated.clone(),
negated: *negated,
expr: Box::new(expr),
pattern: Box::new(pattern),
escape_char: escape_char.clone(),
escape_char: *escape_char,
})
})
}
Expand All @@ -115,10 +115,10 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(expr, pattern)| {
Expr::SimilarTo(Like {
negated: negated.clone(),
negated: *negated,
expr: Box::new(expr),
pattern: Box::new(pattern),
escape_char: escape_char.clone(),
escape_char: *escape_char,
})
})
}
Expand Down Expand Up @@ -148,7 +148,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
};
rewrites.map(|(expr, low, high)| Expr::Between {
expr: Box::new(expr),
negated: negated.clone(),
negated: *negated,
low: Box::new(low),
high: Box::new(high),
})
Expand Down Expand Up @@ -211,8 +211,8 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
nulls_first,
} => rewrite(expr, map)?.map(|expr| Expr::Sort {
expr: Box::new(expr),
asc: asc.clone(),
nulls_first: nulls_first.clone(),
asc: *asc,
nulls_first: *nulls_first,
}),
Expr::ScalarFunction { fun, args } => args
.iter()
Expand Down Expand Up @@ -249,7 +249,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
.map(|args| Expr::AggregateFunction {
fun: fun.clone(),
args,
distinct: distinct.clone(),
distinct: *distinct,
}),
Expr::WindowFunction {
fun,
Expand Down Expand Up @@ -283,7 +283,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
args,
partition_by,
order_by,
window_frame: window_frame.clone(),
window_frame: *window_frame,
})
}
Expr::AggregateUDF { fun, args } => args
Expand All @@ -310,7 +310,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
.map(|list| Expr::InList {
expr: Box::new(expr),
list,
negated: negated.clone(),
negated: *negated,
})
}
// As rewrites are used to push things down or up the plan, wildcards
Expand All @@ -329,7 +329,7 @@ pub fn rewrite(expr: &Expr, map: &HashMap<Column, Option<Expr>>) -> Result<Optio
rewrites.map(|(expr, subquery)| Expr::InSubquery {
expr: Box::new(expr),
subquery: Box::new(subquery),
negated: negated.clone(),
negated: *negated,
})
}
})
Expand Down Expand Up @@ -415,25 +415,25 @@ pub fn get_expr_columns(expr: &Expr) -> Vec<Column> {
Expr::BinaryExpr { left, right, .. } | Expr::AnyExpr { left, right, .. } => {
get_expr_columns(left)
.into_iter()
.chain(get_expr_columns(right).into_iter())
.chain(get_expr_columns(right))
.collect()
}
Expr::Like(Like { expr, pattern, .. })
| Expr::ILike(Like { expr, pattern, .. })
| Expr::SimilarTo(Like { expr, pattern, .. }) => get_expr_columns(expr)
.into_iter()
.chain(get_expr_columns(pattern).into_iter())
.chain(get_expr_columns(pattern))
.collect(),
Expr::GetIndexedField { expr, key } => get_expr_columns(expr)
.into_iter()
.chain(get_expr_columns(key).into_iter())
.chain(get_expr_columns(key))
.collect(),
Expr::Between {
expr, low, high, ..
} => get_expr_columns(expr)
.into_iter()
.chain(get_expr_columns(low).into_iter())
.chain(get_expr_columns(high).into_iter())
.chain(get_expr_columns(low))
.chain(get_expr_columns(high))
.collect(),
Expr::Case {
expr,
Expand All @@ -447,15 +447,14 @@ pub fn get_expr_columns(expr: &Expr) -> Vec<Column> {
.chain(when_then_expr.iter().flat_map(|(when, then)| {
get_expr_columns(when)
.into_iter()
.chain(get_expr_columns(then).into_iter())
.chain(get_expr_columns(then))
.collect::<Vec<_>>()
}))
.chain(
else_expr
.as_ref()
.map(|else_expr| get_expr_columns(else_expr))
.unwrap_or(vec![])
.into_iter(),
.unwrap_or(vec![]),
)
.collect(),
Expr::ScalarFunction { args, .. }
Expand Down
Loading

0 comments on commit 31a7a04

Please sign in to comment.