|
| 1 | +use super::MANUAL_MEMCPY; |
| 2 | +use crate::loops::manual_memcpy::{ |
| 3 | + apply_offset, get_assignment, get_assignments, get_details_from_idx, get_loop_counters, get_slice_like_element_ty, |
| 4 | + IndexExpr, MinifyingSugg, Start, StartKind, |
| 5 | +}; |
| 6 | +use clippy_utils::diagnostics::span_lint_and_sugg; |
| 7 | +use clippy_utils::source::snippet; |
| 8 | +use clippy_utils::sugg::Sugg; |
| 9 | +use clippy_utils::ty::is_copy; |
| 10 | +use clippy_utils::{higher, path_to_local, sugg}; |
| 11 | +use if_chain::if_chain; |
| 12 | +use rustc_ast::ast; |
| 13 | +use rustc_errors::Applicability; |
| 14 | +use rustc_hir::{BinOpKind, Expr, ExprKind, Pat, PatKind}; |
| 15 | +use rustc_lint::LateContext; |
| 16 | +use rustc_middle::ty; |
| 17 | +use rustc_span::symbol::sym; |
| 18 | +use std::iter::Iterator; |
| 19 | + |
| 20 | +/// Checks for for loops that sequentially copy items within a slice |
| 21 | +pub(super) fn check<'tcx>( |
| 22 | + cx: &LateContext<'tcx>, |
| 23 | + pat: &'tcx Pat<'_>, |
| 24 | + arg: &'tcx Expr<'_>, |
| 25 | + body: &'tcx Expr<'_>, |
| 26 | + expr: &'tcx Expr<'_>, |
| 27 | +) -> bool { |
| 28 | + if let Some(higher::Range { |
| 29 | + start: Some(start), |
| 30 | + end: Some(end), |
| 31 | + limits, |
| 32 | + }) = higher::Range::hir(arg) |
| 33 | + { |
| 34 | + // the var must be a single name |
| 35 | + if let PatKind::Binding(_, canonical_id, _, _) = pat.kind { |
| 36 | + let starts = &[Start { |
| 37 | + id: canonical_id, |
| 38 | + kind: StartKind::Range, |
| 39 | + }]; |
| 40 | + |
| 41 | + // This is one of few ways to return different iterators |
| 42 | + // derived from: https://stackoverflow.com/questions/29760668/conditionally-iterate-over-one-of-several-possible-iterators/52064434#52064434 |
| 43 | + let mut iter_a = None; |
| 44 | + let mut iter_b = None; |
| 45 | + |
| 46 | + if let ExprKind::Block(block, _) = body.kind { |
| 47 | + if let Some(mut loop_counters) = get_loop_counters(cx, block, expr) { |
| 48 | + // we currently do not support loop counters at all, as we would need to know |
| 49 | + // their initial value to assess whether the copy is safe to do (same reason we |
| 50 | + // require positive offsets in source) |
| 51 | + if loop_counters.next().is_some() { |
| 52 | + return false; |
| 53 | + } |
| 54 | + } |
| 55 | + iter_a = Some(get_assignments(block, starts)); |
| 56 | + } else { |
| 57 | + iter_b = Some(get_assignment(body)); |
| 58 | + } |
| 59 | + |
| 60 | + let assignments = iter_a.into_iter().flatten().chain(iter_b.into_iter()); |
| 61 | + |
| 62 | + let big_sugg = assignments |
| 63 | + // The only statements in the for loops can be indexed assignments from |
| 64 | + // indexed retrievals (except increments of loop counters). |
| 65 | + .map(|o| { |
| 66 | + o.and_then(|(lhs, rhs)| { |
| 67 | + if_chain! { |
| 68 | + if let ExprKind::Index(base_left, idx_left) = lhs.kind; |
| 69 | + if let ExprKind::Index(base_right, idx_right) = rhs.kind; |
| 70 | + // Source and destination must be same |
| 71 | + if let Some(base_left_local) = path_to_local(base_left); |
| 72 | + if let Some(base_right_local) = path_to_local(base_right); |
| 73 | + if base_left_local == base_right_local; |
| 74 | + if let Some(ty) = get_slice_like_element_ty(cx, cx.typeck_results().expr_ty(base_left)); |
| 75 | + if let Some((start_left, offset_left)) = get_details_from_idx(cx, idx_left, starts); |
| 76 | + if let Some((start_right, offset_right)) = get_details_from_idx(cx, idx_right, starts); |
| 77 | + |
| 78 | + if left_is_smaller_than_right(cx, idx_left, idx_right); |
| 79 | + |
| 80 | + if let StartKind::Range = start_left; |
| 81 | + if let StartKind::Range = start_right; |
| 82 | + |
| 83 | + if is_copy(cx, ty); |
| 84 | + |
| 85 | + then { |
| 86 | + Some((IndexExpr { base: base_left, idx: start_left, idx_offset: offset_left }, |
| 87 | + IndexExpr { base: base_right, idx: start_right, idx_offset: offset_right })) |
| 88 | + } else { |
| 89 | + None |
| 90 | + } |
| 91 | + } |
| 92 | + }) |
| 93 | + }) |
| 94 | + .map(|o| o.map(|(dst, src)| build_manual_memmove_suggestion(cx, start, end, limits, &dst, &src))) |
| 95 | + .collect::<Option<Vec<_>>>() |
| 96 | + .filter(|v| { |
| 97 | + // we currently do not support more than one assignment, as it's "too hard" to |
| 98 | + // prove that the to-be-moved slices (+ their destinations) are nonoverlapping |
| 99 | + v.len() == 1 |
| 100 | + }) |
| 101 | + .map(|v| v.join("\n ")); |
| 102 | + |
| 103 | + if let Some(big_sugg) = big_sugg { |
| 104 | + span_lint_and_sugg( |
| 105 | + cx, |
| 106 | + MANUAL_MEMCPY, |
| 107 | + expr.span, |
| 108 | + "it looks like you're manually copying within a slice", |
| 109 | + "try replacing the loop by", |
| 110 | + big_sugg, |
| 111 | + Applicability::Unspecified, |
| 112 | + ); |
| 113 | + return true; |
| 114 | + } |
| 115 | + } |
| 116 | + } |
| 117 | + false |
| 118 | +} |
| 119 | + |
| 120 | +fn build_manual_memmove_suggestion<'tcx>( |
| 121 | + cx: &LateContext<'tcx>, |
| 122 | + start: &Expr<'_>, |
| 123 | + end: &Expr<'_>, |
| 124 | + limits: ast::RangeLimits, |
| 125 | + src: &IndexExpr<'_>, |
| 126 | + dst: &IndexExpr<'_>, |
| 127 | +) -> String { |
| 128 | + fn print_offset(offset: MinifyingSugg<'static>) -> MinifyingSugg<'static> { |
| 129 | + if offset.to_string() == "0" { |
| 130 | + sugg::EMPTY.into() |
| 131 | + } else { |
| 132 | + offset |
| 133 | + } |
| 134 | + } |
| 135 | + |
| 136 | + let print_limit = |end: &Expr<'_>, end_str: &str, base: &Expr<'_>, sugg: MinifyingSugg<'static>| { |
| 137 | + if_chain! { |
| 138 | + if let ExprKind::MethodCall(method, _, len_args, _) = end.kind; |
| 139 | + if method.ident.name == sym::len; |
| 140 | + if len_args.len() == 1; |
| 141 | + if let Some(arg) = len_args.get(0); |
| 142 | + if path_to_local(arg) == path_to_local(base); |
| 143 | + then { |
| 144 | + if sugg.to_string() == end_str { |
| 145 | + sugg::EMPTY.into() |
| 146 | + } else { |
| 147 | + sugg |
| 148 | + } |
| 149 | + } else { |
| 150 | + match limits { |
| 151 | + ast::RangeLimits::Closed => { |
| 152 | + sugg + &sugg::ONE.into() |
| 153 | + }, |
| 154 | + ast::RangeLimits::HalfOpen => sugg, |
| 155 | + } |
| 156 | + } |
| 157 | + } |
| 158 | + }; |
| 159 | + |
| 160 | + let start_str = Sugg::hir(cx, start, "").into(); |
| 161 | + let end_str: MinifyingSugg<'_> = Sugg::hir(cx, end, "").into(); |
| 162 | + |
| 163 | + let (src_offset, src_limit) = match src.idx { |
| 164 | + StartKind::Range => ( |
| 165 | + print_offset(apply_offset( |
| 166 | + &apply_offset(&start_str, &src.idx_offset), |
| 167 | + &dst.idx_offset, |
| 168 | + )) |
| 169 | + .into_sugg(), |
| 170 | + print_limit( |
| 171 | + end, |
| 172 | + end_str.to_string().as_str(), |
| 173 | + src.base, |
| 174 | + apply_offset(&apply_offset(&end_str, &src.idx_offset), &dst.idx_offset), |
| 175 | + ) |
| 176 | + .into_sugg(), |
| 177 | + ), |
| 178 | + StartKind::Counter { initializer } => { |
| 179 | + let counter_start = Sugg::hir(cx, initializer, "").into(); |
| 180 | + ( |
| 181 | + print_offset(apply_offset( |
| 182 | + &apply_offset(&counter_start, &src.idx_offset), |
| 183 | + &dst.idx_offset, |
| 184 | + )) |
| 185 | + .into_sugg(), |
| 186 | + print_limit( |
| 187 | + end, |
| 188 | + end_str.to_string().as_str(), |
| 189 | + src.base, |
| 190 | + apply_offset(&apply_offset(&end_str, &src.idx_offset), &dst.idx_offset) + &counter_start |
| 191 | + - &start_str, |
| 192 | + ) |
| 193 | + .into_sugg(), |
| 194 | + ) |
| 195 | + }, |
| 196 | + }; |
| 197 | + |
| 198 | + let src_base_str = snippet(cx, src.base.span, "???"); |
| 199 | + |
| 200 | + format!( |
| 201 | + "{}.copy_within({}..{}, {});", |
| 202 | + src_base_str, |
| 203 | + src_offset.maybe_par(), |
| 204 | + src_limit.maybe_par(), |
| 205 | + start_str, |
| 206 | + ) |
| 207 | +} |
| 208 | + |
| 209 | +fn left_is_smaller_than_right<'tcx>( |
| 210 | + cx: &LateContext<'tcx>, |
| 211 | + idx_left: &'tcx Expr<'_>, |
| 212 | + idx_right: &'tcx Expr<'_>, |
| 213 | +) -> bool { |
| 214 | + // in order to be a memmove-type loop, read indices must be iterated |
| 215 | + // over at a later point than written indices. we currently enforce |
| 216 | + // this by ensuring that: |
| 217 | + // |
| 218 | + // - rhs is `path + offset` with offset >= 0 |
| 219 | + // - lhs is just `path` (same path as rhs) |
| 220 | + if_chain! { |
| 221 | + if let ExprKind::Binary(idx_right_op, idx_right_lhs, idx_right_rhs) = idx_right.kind; |
| 222 | + if idx_right_op.node == BinOpKind::Add; |
| 223 | + if let Some(idx_left_local) = path_to_local(idx_left); |
| 224 | + if let Some(idx_right_local) = path_to_local(idx_right_lhs); |
| 225 | + if idx_right_local == idx_left_local; |
| 226 | + if let ty::Uint(_) = cx.typeck_results().expr_ty(idx_right_rhs).kind(); |
| 227 | + then { |
| 228 | + true |
| 229 | + } else { |
| 230 | + false |
| 231 | + } |
| 232 | + } |
| 233 | +} |
0 commit comments