Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion compiler/rustc_borrowck/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,7 @@ impl<'cx, 'tcx> BorrowckInferCtxt<'cx, 'tcx> {
where
F: Fn() -> RegionCtxt,
{
let next_region = self.infcx.next_nll_region_var(origin.clone());
let next_region = self.infcx.next_nll_region_var(origin);
let vid = next_region.as_var();

if cfg!(debug_assertions) && !self.inside_canonicalization_ctxt() {
Expand Down
5 changes: 2 additions & 3 deletions compiler/rustc_hir_analysis/src/collect/predicates_of.rs
Original file line number Diff line number Diff line change
Expand Up @@ -645,9 +645,8 @@ pub(super) fn implied_predicates_with_filter(
};

// Combine the two lists to form the complete set of superbounds:
let implied_bounds = &*tcx
.arena
.alloc_from_iter(superbounds.predicates().into_iter().chain(where_bounds_that_match));
let implied_bounds =
&*tcx.arena.alloc_from_iter(superbounds.predicates().chain(where_bounds_that_match));
debug!(?implied_bounds);

// Now require that immediate supertraits are converted,
Expand Down
30 changes: 13 additions & 17 deletions compiler/rustc_hir_typeck/src/fn_ctxt/adjust_fulfillment_errors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -164,24 +164,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
for param in
[param_to_point_at, fallback_param_to_point_at, self_param_to_point_at]
.into_iter()
.flatten()
{
if let Some(param) = param {
let refined_expr = self.point_at_field_if_possible(
def_id,
param,
variant_def_id,
fields,
);

match refined_expr {
None => {}
Some((refined_expr, _)) => {
error.obligation.cause.span = refined_expr
.span
.find_ancestor_in_same_ctxt(error.obligation.cause.span)
.unwrap_or(refined_expr.span);
return true;
}
let refined_expr =
self.point_at_field_if_possible(def_id, param, variant_def_id, fields);

match refined_expr {
None => {}
Some((refined_expr, _)) => {
error.obligation.cause.span = refined_expr
.span
.find_ancestor_in_same_ctxt(error.obligation.cause.span)
.unwrap_or(refined_expr.span);
return true;
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_monomorphize/src/partitioning/default.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl<'tcx> Partitioner<'tcx> for DefaultPartitioning {
}

PreInliningPartitioning {
codegen_units: codegen_units.into_values().map(|codegen_unit| codegen_unit).collect(),
codegen_units: codegen_units.into_values().collect(),
roots,
internalization_candidates,
}
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_query_impl/src/profiling_support.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ pub(crate) fn alloc_self_profile_query_strings_for_query_cache<'tcx, C>(
// locked while doing so. Instead we copy out the
// `(query_key, dep_node_index)` pairs and release the lock again.
let mut query_keys_and_indices = Vec::new();
query_cache.iter(&mut |k, _, i| query_keys_and_indices.push((k.clone(), i)));
query_cache.iter(&mut |k, _, i| query_keys_and_indices.push((*k, i)));

// Now actually allocate the strings. If allocating the strings
// generates new entries in the query cache, we'll miss them but
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/passes/collect_intra_doc_links.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1349,7 +1349,7 @@ impl LinkCollector<'_, '_> {
if has_derive_trait_collision {
candidates.macro_ns = None;
}
candidates.into_iter().filter_map(|res| res).flatten().collect::<Vec<_>>()
candidates.into_iter().flatten().flatten().collect::<Vec<_>>()
}
}
}
Expand Down