Skip to content

Commit

Permalink
Auto merge of #54859 - pietroalbini:rollup, r=pietroalbini
Browse files Browse the repository at this point in the history
Rollup of 11 pull requests

Successful merges:

 - #54078 (Expand the documentation for the `std::sync` module)
 - #54717 (Cleanup rustc/ty part 1)
 - #54781 (Add examples to `TyKind::FnDef` and `TyKind::FnPtr` docs)
 - #54787 (Only warn about unused `mut` in user-written code)
 - #54804 (add suggestion for inverted function parameters)
 - #54812 (Regression test for #32382.)
 - #54833 (make `Parser::parse_foreign_item()` return a foreign item or error)
 - #54834 (rustdoc: overflow:auto doesn't work nicely on small screens)
 - #54838 (Fix typo in src/libsyntax/parse/parser.rs)
 - #54851 (Fix a regression in 1.30 by reverting #53564)
 - #54853 (Remove unneccessary error from test, revealing NLL error.)

Failed merges:

r? @ghost
  • Loading branch information
bors committed Oct 6, 2018
2 parents fddcd31 + 51334c9 commit ac841e7
Show file tree
Hide file tree
Showing 31 changed files with 796 additions and 503 deletions.
52 changes: 5 additions & 47 deletions src/liballoc/collections/vec_deque.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

use core::cmp::Ordering;
use core::fmt;
use core::isize;
use core::iter::{repeat, FromIterator, FusedIterator};
use core::mem;
use core::ops::Bound::{Excluded, Included, Unbounded};
Expand Down Expand Up @@ -203,33 +202,6 @@ impl<T> VecDeque<T> {
len);
}

/// Copies all values from `src` to the back of `self`, wrapping around if needed.
///
/// # Safety
///
/// The capacity must be sufficient to hold self.len() + src.len() elements.
/// If so, this function never panics.
#[inline]
unsafe fn copy_slice(&mut self, src: &[T]) {
/// This is guaranteed by `RawVec`.
debug_assert!(self.capacity() <= isize::MAX as usize);

let expected_new_len = self.len() + src.len();
debug_assert!(self.capacity() >= expected_new_len);

let dst_high_ptr = self.ptr().add(self.head);
let dst_high_len = self.cap() - self.head;

let split = cmp::min(src.len(), dst_high_len);
let (src_high, src_low) = src.split_at(split);

ptr::copy_nonoverlapping(src_high.as_ptr(), dst_high_ptr, src_high.len());
ptr::copy_nonoverlapping(src_low.as_ptr(), self.ptr(), src_low.len());

self.head = self.wrap_add(self.head, src.len());
debug_assert!(self.len() == expected_new_len);
}

/// Copies a potentially wrapping block of memory len long from src to dest.
/// (abs(dst - src) + len) must be no larger than cap() (There must be at
/// most one continuous overlapping region between src and dest).
Expand Down Expand Up @@ -1052,7 +1024,7 @@ impl<T> VecDeque<T> {
iter: Iter {
tail: drain_tail,
head: drain_head,
ring: unsafe { self.buffer_as_slice() },
ring: unsafe { self.buffer_as_mut_slice() },
},
}
}
Expand Down Expand Up @@ -1862,22 +1834,8 @@ impl<T> VecDeque<T> {
#[inline]
#[stable(feature = "append", since = "1.4.0")]
pub fn append(&mut self, other: &mut Self) {
unsafe {
// Guarantees there is space in `self` for `other`.
self.reserve(other.len());

{
let (src_high, src_low) = other.as_slices();

// This is only safe because copy_slice never panics when capacity is sufficient.
self.copy_slice(src_low);
self.copy_slice(src_high);
}

// Some values now exist in both `other` and `self` but are made inaccessible
// in`other`.
other.tail = other.head;
}
// naive impl
self.extend(other.drain(..));
}

/// Retains only the elements specified by the predicate.
Expand Down Expand Up @@ -2635,8 +2593,8 @@ impl<T> From<VecDeque<T>> for Vec<T> {
let mut right_offset = 0;
for i in left_edge..right_edge {
right_offset = (i - left_edge) % (cap - right_edge);
let src = right_edge + right_offset;
ptr::swap(buf.add(i), buf.add(src));
let src: isize = (right_edge + right_offset) as isize;
ptr::swap(buf.add(i), buf.offset(src));
}
let n_ops = right_edge - left_edge;
left_edge += n_ops;
Expand Down
25 changes: 16 additions & 9 deletions src/librustc/hir/lowering.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4132,16 +4132,16 @@ impl<'a> LoweringContext<'a> {
// expand <head>
let head = self.lower_expr(head);
let head_sp = head.span;
let desugared_span = self.allow_internal_unstable(
CompilerDesugaringKind::ForLoop,
head_sp,
);

let iter = self.str_to_ident("iter");

let next_ident = self.str_to_ident("__next");
let next_sp = self.allow_internal_unstable(
CompilerDesugaringKind::ForLoop,
head_sp,
);
let next_pat = self.pat_ident_binding_mode(
next_sp,
desugared_span,
next_ident,
hir::BindingAnnotation::Mutable,
);
Expand Down Expand Up @@ -4170,8 +4170,11 @@ impl<'a> LoweringContext<'a> {
};

// `mut iter`
let iter_pat =
self.pat_ident_binding_mode(head_sp, iter, hir::BindingAnnotation::Mutable);
let iter_pat = self.pat_ident_binding_mode(
desugared_span,
iter,
hir::BindingAnnotation::Mutable
);

// `match ::std::iter::Iterator::next(&mut iter) { ... }`
let match_expr = {
Expand Down Expand Up @@ -4200,8 +4203,12 @@ impl<'a> LoweringContext<'a> {
let next_expr = P(self.expr_ident(head_sp, next_ident, next_pat.id));

// `let mut __next`
let next_let =
self.stmt_let_pat(head_sp, None, next_pat, hir::LocalSource::ForLoopDesugar);
let next_let = self.stmt_let_pat(
desugared_span,
None,
next_pat,
hir::LocalSource::ForLoopDesugar,
);

// `let <pat> = __next`
let pat = self.lower_pat(pat);
Expand Down
22 changes: 17 additions & 5 deletions src/librustc/traits/error_reporting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ use ty::subst::Subst;
use ty::SubtypePredicate;
use util::nodemap::{FxHashMap, FxHashSet};

use syntax_pos::{DUMMY_SP, Span};
use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnFormat};

impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
pub fn report_fulfillment_errors(&self,
Expand All @@ -68,18 +68,30 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
}).collect();

for (index, error) in errors.iter().enumerate() {
error_map.entry(error.obligation.cause.span).or_default().push(
// We want to ignore desugarings here: spans are equivalent even
// if one is the result of a desugaring and the other is not.
let mut span = error.obligation.cause.span;
if let Some(ExpnInfo {
format: ExpnFormat::CompilerDesugaring(_),
def_site: Some(def_span),
..
}) = span.ctxt().outer().expn_info() {
span = def_span;
}

error_map.entry(span).or_default().push(
ErrorDescriptor {
predicate: error.obligation.predicate.clone(),
index: Some(index)
});
}
);

self.reported_trait_errors.borrow_mut()
.entry(error.obligation.cause.span).or_default()
.entry(span).or_default()
.push(error.obligation.predicate.clone());
}

// We do this in 2 passes because we want to display errors in order, tho
// We do this in 2 passes because we want to display errors in order, though
// maybe it *is* better to sort errors by span or something.
let mut is_suppressed = vec![false; errors.len()];
for (_, error_set) in error_map.iter() {
Expand Down
28 changes: 14 additions & 14 deletions src/librustc/ty/codec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,19 +178,19 @@ pub fn decode_predicates<'a, 'tcx, D>(decoder: &mut D)
Ok(ty::GenericPredicates {
parent: Decodable::decode(decoder)?,
predicates: (0..decoder.read_usize()?).map(|_| {
// Handle shorthands first, if we have an usize > 0x80.
let predicate = if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?;
assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET;

decoder.with_position(shorthand, ty::Predicate::decode)
} else {
ty::Predicate::decode(decoder)
}?;
Ok((predicate, Decodable::decode(decoder)?))
})
.collect::<Result<Vec<_>, _>>()?,
// Handle shorthands first, if we have an usize > 0x80.
let predicate = if decoder.positioned_at_shorthand() {
let pos = decoder.read_usize()?;
assert!(pos >= SHORTHAND_OFFSET);
let shorthand = pos - SHORTHAND_OFFSET;

decoder.with_position(shorthand, ty::Predicate::decode)
} else {
ty::Predicate::decode(decoder)
}?;
Ok((predicate, Decodable::decode(decoder)?))
})
.collect::<Result<Vec<_>, _>>()?,
})
}

Expand Down Expand Up @@ -267,7 +267,7 @@ pub fn decode_const<'a, 'tcx, D>(decoder: &mut D)

#[inline]
pub fn decode_allocation<'a, 'tcx, D>(decoder: &mut D)
-> Result<&'tcx Allocation, D::Error>
-> Result<&'tcx Allocation, D::Error>
where D: TyDecoder<'a, 'tcx>,
'tcx: 'a,
{
Expand Down
44 changes: 20 additions & 24 deletions src/librustc/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -190,8 +190,8 @@ impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
// types/regions in the global interner
if local as *const _ as usize == global as *const _ as usize {
bug!("Attempted to intern `{:?}` which contains \
inference types/regions in the global type context",
&ty_struct);
inference types/regions in the global type context",
&ty_struct);
}

// Don't be &mut TyS.
Expand Down Expand Up @@ -272,9 +272,9 @@ fn validate_hir_id_for_typeck_tables(local_id_root: Option<DefId>,

bug!("node {} with HirId::owner {:?} cannot be placed in \
TypeckTables with local_id_root {:?}",
tcx.hir.node_to_string(node_id),
DefId::local(hir_id.owner),
local_id_root)
tcx.hir.node_to_string(node_id),
DefId::local(hir_id.owner),
local_id_root)
});
}
} else {
Expand Down Expand Up @@ -540,16 +540,13 @@ impl<'tcx> TypeckTables<'tcx> {
}

pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
match self.node_id_to_type_opt(id) {
Some(ty) => ty,
None => {
bug!("node_id_to_type: no type for node `{}`",
tls::with(|tcx| {
let id = tcx.hir.hir_to_node_id(id);
tcx.hir.node_to_string(id)
}))
}
}
self.node_id_to_type_opt(id).unwrap_or_else(||
bug!("node_id_to_type: no type for node `{}`",
tls::with(|tcx| {
let id = tcx.hir.hir_to_node_id(id);
tcx.hir.node_to_string(id)
}))
)
}

pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
Expand Down Expand Up @@ -686,7 +683,7 @@ impl<'tcx> TypeckTables<'tcx> {
}

pub fn pat_adjustments_mut(&mut self)
-> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
-> LocalTableInContextMut<'_, Vec<Ty<'tcx>>> {
LocalTableInContextMut {
local_id_root: self.local_id_root,
data: &mut self.pat_adjustments,
Expand Down Expand Up @@ -1199,8 +1196,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
let hir_id = hir.node_to_hir_id(k);
let map = trait_map.entry(hir_id.owner).or_default();
Lrc::get_mut(map).unwrap()
.insert(hir_id.local_id,
Lrc::new(StableVec::new(v)));
.insert(hir_id.local_id,
Lrc::new(StableVec::new(v)));
}

let gcx = &GlobalCtxt {
Expand Down Expand Up @@ -2188,7 +2185,6 @@ macro_rules! sty_debug_print {
};
$(let mut $variant = total;)*


for &Interned(t) in tcx.interners.type_.borrow().iter() {
let variant = match t.sty {
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
Expand All @@ -2207,7 +2203,7 @@ macro_rules! sty_debug_print {
}
println!("Ty interner total ty region both");
$(println!(" {:18}: {uses:6} {usespc:4.1}%, \
{ty:4.1}% {region:5.1}% {both:4.1}%",
{ty:4.1}% {region:5.1}% {both:4.1}%",
stringify!($variant),
uses = $variant.total,
usespc = $variant.total as f64 * 100.0 / total.total as f64,
Expand All @@ -2216,7 +2212,7 @@ macro_rules! sty_debug_print {
both = $variant.both_infer as f64 * 100.0 / total.total as f64);
)*
println!(" total {uses:6} \
{ty:4.1}% {region:5.1}% {both:4.1}%",
{ty:4.1}% {region:5.1}% {both:4.1}%",
uses = total.total,
ty = total.ty_infer as f64 * 100.0 / total.total as f64,
region = total.region_infer as f64 * 100.0 / total.total as f64,
Expand Down Expand Up @@ -2653,7 +2649,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}

pub fn mk_closure(self, closure_id: DefId, closure_substs: ClosureSubsts<'tcx>)
-> Ty<'tcx> {
-> Ty<'tcx> {
self.mk_ty(Closure(closure_id, closure_substs))
}

Expand Down Expand Up @@ -2686,8 +2682,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
}

pub fn mk_ty_param(self,
index: u32,
name: InternedString) -> Ty<'tcx> {
index: u32,
name: InternedString) -> Ty<'tcx> {
self.mk_ty(Param(ParamTy { idx: index, name: name }))
}

Expand Down
Loading

0 comments on commit ac841e7

Please sign in to comment.