1use clippy_utils::diagnostics::span_lint_and_then;2use clippy_utils::res::MaybeResPath;3use clippy_utils::source::SpanRangeExt;4use clippy_utils::{SpanlessEq, fulfill_or_allowed, hash_expr, is_lint_allowed, search_same};5use core::cmp::Ordering;6use core::{iter, slice};7use itertools::Itertools;8use rustc_arena::DroplessArena;9use rustc_ast::ast::LitKind;10use rustc_errors::Applicability;11use rustc_hir::def_id::DefId;12use rustc_hir::{Arm, Expr, HirId, HirIdMap, HirIdMapEntry, HirIdSet, Pat, PatExpr, PatExprKind, PatKind, RangeEnd};13use rustc_lint::builtin::NON_EXHAUSTIVE_OMITTED_PATTERNS;14use rustc_lint::{LateContext, LintContext};15use rustc_middle::ty::{self, TypeckResults};16use rustc_span::{ByteSymbol, ErrorGuaranteed, Span, Symbol};1718use super::MATCH_SAME_ARMS;1920#[expect(clippy::too_many_lines)]21pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {22 let hash = |&(_, arm): &(_, &Arm<'_>)| hash_expr(cx, arm.body);2324 let arena = DroplessArena::default();25 let normalized_pats: Vec<_> = arms26 .iter()27 .map(|a| NormalizedPat::from_pat(cx, &arena, a.pat))28 .collect();2930 // The furthest forwards a pattern can move without semantic changes31 let forwards_blocking_idxs: Vec<_> = normalized_pats32 .iter()33 .enumerate()34 .map(|(i, pat)| {35 (normalized_pats[i + 1..].iter().enumerate())36 .find_map(|(j, other)| pat.has_overlapping_values(other).then_some(i + 1 + j))37 .unwrap_or(normalized_pats.len())38 })39 .collect();4041 // The furthest backwards a pattern can move without semantic changes42 let backwards_blocking_idxs: Vec<_> = normalized_pats43 .iter()44 .enumerate()45 .map(|(i, pat)| {46 iter::zip(47 normalized_pats[..i].iter().enumerate().rev(),48 forwards_blocking_idxs[..i].iter().copied().rev(),49 )50 .skip_while(|&(_, forward_block)| forward_block > i)51 .find_map(|((j, other), forward_block)| {52 (forward_block == i || pat.has_overlapping_values(other)).then_some(j)53 })54 .unwrap_or(0)55 })56 .collect();5758 let eq = |&(lindex, lhs): &(usize, &Arm<'_>), &(rindex, rhs): &(usize, &Arm<'_>)| -> bool {59 let min_index = usize::min(lindex, rindex);60 let max_index = usize::max(lindex, rindex);6162 let check_eq_with_pat = |expr_a: &Expr<'_>, expr_b: &Expr<'_>| {63 let mut local_map: HirIdMap<HirId> = HirIdMap::default();64 let eq_fallback = |a_typeck_results: &TypeckResults<'tcx>,65 a: &Expr<'_>,66 b_typeck_results: &TypeckResults<'tcx>,67 b: &Expr<'_>| {68 if let Some(a_id) = a.res_local_id()69 && let Some(b_id) = b.res_local_id()70 && let entry = match local_map.entry(a_id) {71 HirIdMapEntry::Vacant(entry) => entry,72 // check if using the same bindings as before73 HirIdMapEntry::Occupied(entry) => return *entry.get() == b_id,74 }75 // the names technically don't have to match; this makes the lint more conservative76 && cx.tcx.hir_name(a_id) == cx.tcx.hir_name(b_id)77 && a_typeck_results.expr_ty(a) == b_typeck_results.expr_ty(b)78 && pat_contains_local(lhs.pat, a_id)79 && pat_contains_local(rhs.pat, b_id)80 {81 entry.insert(b_id);82 true83 } else {84 false85 }86 };8788 SpanlessEq::new(cx)89 .expr_fallback(eq_fallback)90 .eq_expr(expr_a, expr_b)91 // these checks could be removed to allow unused bindings92 && bindings_eq(lhs.pat, local_map.keys().copied().collect())93 && bindings_eq(rhs.pat, local_map.values().copied().collect())94 };9596 let check_same_guard = || match (&lhs.guard, &rhs.guard) {97 (None, None) => true,98 (Some(lhs_guard), Some(rhs_guard)) => check_eq_with_pat(lhs_guard, rhs_guard),99 _ => false,100 };101102 let check_same_body = || check_eq_with_pat(lhs.body, rhs.body);103104 // Arms with different guard are ignored, those can’t always be merged together105 // If both arms overlap with an arm in between then these can't be merged either.106 !(backwards_blocking_idxs[max_index] > min_index && forwards_blocking_idxs[min_index] < max_index)107 && check_same_guard()108 && check_same_body()109 };110111 let indexed_arms: Vec<(usize, &Arm<'_>)> = arms.iter().enumerate().collect();112 for mut group in search_same(&indexed_arms, hash, eq) {113 // Filter out (and fulfill) `#[allow]`ed and `#[expect]`ed arms114 group.retain(|(_, arm)| !fulfill_or_allowed(cx, MATCH_SAME_ARMS, [arm.hir_id]));115116 if group.len() < 2 {117 continue;118 }119120 span_lint_and_then(121 cx,122 MATCH_SAME_ARMS,123 group.iter().map(|(_, arm)| arm.span).collect_vec(),124 "these match arms have identical bodies",125 |diag| {126 diag.help("if this is unintentional make the arms return different values");127128 if let [prev @ .., (_, last)] = group.as_slice()129 && is_wildcard_arm(last.pat)130 && is_lint_allowed(cx, NON_EXHAUSTIVE_OMITTED_PATTERNS, last.hir_id)131 {132 diag.span_label(last.span, "the wildcard arm");133134 let s = if prev.len() > 1 { "s" } else { "" };135 diag.multipart_suggestion(136 format!("otherwise remove the non-wildcard arm{s}"),137 prev.iter()138 .map(|(_, arm)| (adjusted_arm_span(cx, arm.span), String::new()))139 .collect(),140 Applicability::MaybeIncorrect,141 );142 } else if let &[&(first_idx, _), .., &(last_idx, _)] = group.as_slice() {143 let back_block = backwards_blocking_idxs[last_idx];144 let split = if back_block < first_idx145 || (back_block == 0 && forwards_blocking_idxs[first_idx] <= last_idx)146 {147 group.split_first()148 } else {149 group.split_last()150 };151152 if let Some(((_, dest), src)) = split153 && let Some(pat_snippets) = group154 .iter()155 .map(|(_, arm)| arm.pat.span.get_source_text(cx))156 .collect::<Option<Vec<_>>>()157 {158 let suggs = src159 .iter()160 .map(|(_, arm)| (adjusted_arm_span(cx, arm.span), String::new()))161 .chain([(dest.pat.span, pat_snippets.iter().join(" | "))])162 .collect_vec();163164 diag.multipart_suggestion(165 "otherwise merge the patterns into a single arm",166 suggs,167 Applicability::MaybeIncorrect,168 );169 }170 }171 },172 );173 }174}175176/// Extend arm's span to include the comma and whitespaces after it.177fn adjusted_arm_span(cx: &LateContext<'_>, span: Span) -> Span {178 let source_map = cx.sess().source_map();179 source_map180 .span_extend_while(span, |c| c == ',' || c.is_ascii_whitespace())181 .unwrap_or(span)182}183184#[derive(Clone, Copy)]185enum NormalizedPat<'a> {186 Wild,187 Never,188 Struct(Option<DefId>, &'a [(Symbol, Self)]),189 Tuple(Option<DefId>, &'a [Self]),190 Or(&'a [Self]),191 Path(Option<DefId>),192 LitStr(Symbol),193 LitBytes(ByteSymbol),194 LitInt(u128),195 LitBool(bool),196 Range(PatRange),197 /// A slice pattern. If the second value is `None`, then this matches an exact size. Otherwise198 /// the first value contains everything before the `..` wildcard pattern, and the second value199 /// contains everything afterwards. Note that either side, or both sides, may contain zero200 /// patterns.201 Slice(&'a [Self], Option<&'a [Self]>),202 /// A placeholder for a pattern that wasn't well formed in some way.203 Err(ErrorGuaranteed),204}205206#[derive(Clone, Copy)]207struct PatRange {208 start: u128,209 end: u128,210 bounds: RangeEnd,211}212impl PatRange {213 fn contains(&self, x: u128) -> bool {214 x >= self.start215 && match self.bounds {216 RangeEnd::Included => x <= self.end,217 RangeEnd::Excluded => x < self.end,218 }219 }220221 fn overlaps(&self, other: &Self) -> bool {222 // Note: Empty ranges are impossible, so this is correct even though it would return true if an223 // empty exclusive range were to reside within an inclusive range.224 (match self.bounds {225 RangeEnd::Included => self.end >= other.start,226 RangeEnd::Excluded => self.end > other.start,227 } && match other.bounds {228 RangeEnd::Included => self.start <= other.end,229 RangeEnd::Excluded => self.start < other.end,230 })231 }232}233234/// Iterates over the pairs of fields with matching names.235fn iter_matching_struct_fields<'a>(236 left: &'a [(Symbol, NormalizedPat<'a>)],237 right: &'a [(Symbol, NormalizedPat<'a>)],238) -> impl Iterator<Item = (&'a NormalizedPat<'a>, &'a NormalizedPat<'a>)> + 'a {239 struct Iter<'a>(240 slice::Iter<'a, (Symbol, NormalizedPat<'a>)>,241 slice::Iter<'a, (Symbol, NormalizedPat<'a>)>,242 );243 impl<'a> Iterator for Iter<'a> {244 type Item = (&'a NormalizedPat<'a>, &'a NormalizedPat<'a>);245 fn next(&mut self) -> Option<Self::Item> {246 // Note: all the fields in each slice are sorted by symbol value.247 let mut left = self.0.next()?;248 let mut right = self.1.next()?;249 loop {250 match left.0.cmp(&right.0) {251 Ordering::Equal => return Some((&left.1, &right.1)),252 Ordering::Less => left = self.0.next()?,253 Ordering::Greater => right = self.1.next()?,254 }255 }256 }257 }258 Iter(left.iter(), right.iter())259}260261#[expect(clippy::similar_names, clippy::too_many_lines)]262impl<'a> NormalizedPat<'a> {263 fn from_pat(cx: &LateContext<'_>, arena: &'a DroplessArena, pat: &'a Pat<'_>) -> Self {264 match pat.kind {265 PatKind::Missing => unreachable!(),266 PatKind::Wild | PatKind::Binding(.., None) => Self::Wild,267 PatKind::Binding(.., Some(pat))268 | PatKind::Box(pat)269 | PatKind::Deref(pat)270 | PatKind::Ref(pat, _, _)271 | PatKind::Guard(pat, _) => Self::from_pat(cx, arena, pat),272 PatKind::Never => Self::Never,273 PatKind::Struct(ref path, fields, _) => {274 let fields =275 arena.alloc_from_iter(fields.iter().map(|f| (f.ident.name, Self::from_pat(cx, arena, f.pat))));276 fields.sort_by_key(|&(name, _)| name);277 Self::Struct(cx.qpath_res(path, pat.hir_id).opt_def_id(), fields)278 },279 PatKind::TupleStruct(ref path, pats, wild_idx) => {280 let Some(adt) = cx.typeck_results().pat_ty(pat).ty_adt_def() else {281 return Self::Wild;282 };283 let (var_id, variant) = if adt.is_enum() {284 match cx.qpath_res(path, pat.hir_id).opt_def_id() {285 Some(x) => (Some(x), adt.variant_with_ctor_id(x)),286 None => return Self::Wild,287 }288 } else {289 (None, adt.non_enum_variant())290 };291 let (front, back) = match wild_idx.as_opt_usize() {292 Some(i) => pats.split_at(i),293 None => (pats, [].as_slice()),294 };295 let pats = arena.alloc_from_iter(296 front297 .iter()298 .map(|pat| Self::from_pat(cx, arena, pat))299 .chain(iter::repeat_with(|| Self::Wild).take(variant.fields.len() - pats.len()))300 .chain(back.iter().map(|pat| Self::from_pat(cx, arena, pat))),301 );302 Self::Tuple(var_id, pats)303 },304 PatKind::Or(pats) => Self::Or(arena.alloc_from_iter(pats.iter().map(|pat| Self::from_pat(cx, arena, pat)))),305 PatKind::Expr(PatExpr {306 kind: PatExprKind::Path(path),307 hir_id,308 ..309 }) => Self::Path(cx.qpath_res(path, *hir_id).opt_def_id()),310 PatKind::Tuple(pats, wild_idx) => {311 let field_count = match cx.typeck_results().pat_ty(pat).kind() {312 ty::Tuple(subs) => subs.len(),313 _ => return Self::Wild,314 };315 let (front, back) = match wild_idx.as_opt_usize() {316 Some(i) => pats.split_at(i),317 None => (pats, [].as_slice()),318 };319 let pats = arena.alloc_from_iter(320 front321 .iter()322 .map(|pat| Self::from_pat(cx, arena, pat))323 .chain(iter::repeat_with(|| Self::Wild).take(field_count - pats.len()))324 .chain(back.iter().map(|pat| Self::from_pat(cx, arena, pat))),325 );326 Self::Tuple(None, pats)327 },328 PatKind::Expr(e) => match &e.kind {329 // TODO: Handle negative integers. They're currently treated as a wild match.330 PatExprKind::Lit { lit, negated: false } => match lit.node {331 LitKind::Str(sym, _) => Self::LitStr(sym),332 LitKind::ByteStr(byte_sym, _) | LitKind::CStr(byte_sym, _) => Self::LitBytes(byte_sym),333 LitKind::Byte(val) => Self::LitInt(val.into()),334 LitKind::Char(val) => Self::LitInt(val.into()),335 LitKind::Int(val, _) => Self::LitInt(val.get()),336 LitKind::Bool(val) => Self::LitBool(val),337 LitKind::Float(..) => Self::Wild,338 LitKind::Err(guar) => Self::Err(guar),339 },340 _ => Self::Wild,341 },342 PatKind::Range(start, end, bounds) => {343 // TODO: Handle negative integers. They're currently treated as a wild match.344 let start = match start {345 None => 0,346 Some(e) => match &e.kind {347 PatExprKind::Lit { lit, negated: false } => match lit.node {348 LitKind::Int(val, _) => val.get(),349 LitKind::Char(val) => val.into(),350 LitKind::Byte(val) => val.into(),351 _ => return Self::Wild,352 },353 _ => return Self::Wild,354 },355 };356 let (end, bounds) = match end {357 None => (u128::MAX, RangeEnd::Included),358 Some(e) => match &e.kind {359 PatExprKind::Lit { lit, negated: false } => match lit.node {360 LitKind::Int(val, _) => (val.get(), bounds),361 LitKind::Char(val) => (val.into(), bounds),362 LitKind::Byte(val) => (val.into(), bounds),363 _ => return Self::Wild,364 },365 _ => return Self::Wild,366 },367 };368 Self::Range(PatRange { start, end, bounds })369 },370 PatKind::Slice(front, wild_pat, back) => Self::Slice(371 arena.alloc_from_iter(front.iter().map(|pat| Self::from_pat(cx, arena, pat))),372 wild_pat.map(|_| &*arena.alloc_from_iter(back.iter().map(|pat| Self::from_pat(cx, arena, pat)))),373 ),374 PatKind::Err(guar) => Self::Err(guar),375 }376 }377378 /// Checks if two patterns overlap in the values they can match assuming they are for the same379 /// type.380 fn has_overlapping_values(&self, other: &Self) -> bool {381 match (*self, *other) {382 (Self::Wild, _) | (_, Self::Wild) | (Self::Never, Self::Never) => true,383 (Self::Or(pats), ref other) | (ref other, Self::Or(pats)) => {384 pats.iter().any(|pat| pat.has_overlapping_values(other))385 },386 (Self::Struct(lpath, lfields), Self::Struct(rpath, rfields)) => {387 if lpath != rpath {388 return false;389 }390 iter_matching_struct_fields(lfields, rfields).all(|(lpat, rpat)| lpat.has_overlapping_values(rpat))391 },392 (Self::Tuple(lpath, lpats), Self::Tuple(rpath, rpats)) => {393 if lpath != rpath {394 return false;395 }396 iter::zip(lpats, rpats).all(|(lpat, rpat)| lpat.has_overlapping_values(rpat))397 },398 (Self::Path(x), Self::Path(y)) => x == y,399 (Self::LitStr(x), Self::LitStr(y)) => x == y,400 (Self::LitBytes(x), Self::LitBytes(y)) => x == y,401 (Self::LitInt(x), Self::LitInt(y)) => x == y,402 (Self::LitBool(x), Self::LitBool(y)) => x == y,403 (Self::Range(ref x), Self::Range(ref y)) => x.overlaps(y),404 (Self::Range(ref range), Self::LitInt(x)) | (Self::LitInt(x), Self::Range(ref range)) => range.contains(x),405 (Self::Slice(lpats, None), Self::Slice(rpats, None)) => {406 lpats.len() == rpats.len() && iter::zip(lpats, rpats).all(|(x, y)| x.has_overlapping_values(y))407 },408 (Self::Slice(pats, None), Self::Slice(front, Some(back)))409 | (Self::Slice(front, Some(back)), Self::Slice(pats, None)) => {410 // Here `pats` is an exact size match. If the combined lengths of `front` and `back` are greater411 // then the minimum length required will be greater than the length of `pats`.412 if pats.len() < front.len() + back.len() {413 return false;414 }415 iter::zip(&pats[..front.len()], front)416 .chain(iter::zip(&pats[pats.len() - back.len()..], back))417 .all(|(x, y)| x.has_overlapping_values(y))418 },419 (Self::Slice(lfront, Some(lback)), Self::Slice(rfront, Some(rback))) => iter::zip(lfront, rfront)420 .chain(iter::zip(lback.iter().rev(), rback.iter().rev()))421 .all(|(x, y)| x.has_overlapping_values(y)),422423 // Enums can mix unit variants with tuple/struct variants. These can never overlap.424 (Self::Path(_), Self::Tuple(..) | Self::Struct(..))425 | (Self::Tuple(..) | Self::Struct(..), Self::Path(_)) => false,426427 // Tuples can be matched like a struct.428 (Self::Tuple(x, _), Self::Struct(y, _)) | (Self::Struct(x, _), Self::Tuple(y, _)) => {429 // TODO: check fields here.430 x == y431 },432433 // TODO: Lit* with Path, Range with Path, LitBytes with Slice434 _ => true,435 }436 }437}438439fn pat_contains_local(pat: &Pat<'_>, id: HirId) -> bool {440 let mut result = false;441 pat.walk_short(|p| {442 result |= matches!(p.kind, PatKind::Binding(_, binding_id, ..) if binding_id == id);443 !result444 });445 result446}447448/// Returns true if all the bindings in the `Pat` are in `ids` and vice versa449fn bindings_eq(pat: &Pat<'_>, mut ids: HirIdSet) -> bool {450 let mut result = true;451 // FIXME(rust/#120456) - is `swap_remove` correct?452 pat.each_binding_or_first(&mut |_, id, _, _| result &= ids.swap_remove(&id));453 result && ids.is_empty()454}455456fn is_wildcard_arm(pat: &Pat<'_>) -> bool {457 match pat.kind {458 PatKind::Wild => true,459 PatKind::Or([.., last]) => matches!(last.kind, PatKind::Wild),460 _ => false,461 }462}
Code quality findings 25
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>]) {
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
(normalized_pats[i + 1..].iter().enumerate())
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
normalized_pats[..i].iter().enumerate().rev(),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
forwards_blocking_idxs[..i].iter().copied().rev(),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
!(backwards_blocking_idxs[max_index] > min_index && forwards_blocking_idxs[min_index] < max_index)
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
if let [prev @ .., (_, last)] = group.as_slice()
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
let back_block = backwards_blocking_idxs[last_idx];
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
|| (back_block == 0 && forwards_blocking_idxs[first_idx] <= last_idx)
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
Struct(Option<DefId>, &'a [(Symbol, Self)]),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
Tuple(Option<DefId>, &'a [Self]),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
Or(&'a [Self]),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
Slice(&'a [Self], Option<&'a [Self]>),
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
left: &'a [(Symbol, NormalizedPat<'a>)],
Warning: Direct indexing (e.g., `vec[i]`, `slice[i]`) panics on out-of-bounds access. Prefer using `.get(index)` or `.get_mut(index)` which return Option<&T>/Option<&mut T>.
warning
correctness
unchecked-indexing
right: &'a [(Symbol, NormalizedPat<'a>)],
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
let check_same_guard = || match (&lhs.guard, &rhs.guard) {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
let field_count = match cx.typeck_results().pat_ty(pat).kind() {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
let (front, back) = match wild_idx.as_opt_usize() {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
PatExprKind::Lit { lit, negated: false } => match lit.node {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
let start = match start {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
Some(e) => match &e.kind {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
PatExprKind::Lit { lit, negated: false } => match lit.node {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
let (end, bounds) = match end {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
Some(e) => match &e.kind {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
PatExprKind::Lit { lit, negated: false } => match lit.node {
Info: Ensure 'match' statements are exhaustive. If matching on enums, consider adding a wildcard arm `_ => {}` only if necessary and intentional, as it suppresses warnings about unhandled variants.
info
correctness
match-wildcard
match pat.kind {