Skip to content

Commit

Permalink
Auto merge of #80449 - m-ou-se:rollup-kp2e5n8, r=m-ou-se
Browse files Browse the repository at this point in the history
Rollup of 11 pull requests

Successful merges:

 - #80383 (clarify wrapping ptr arithmetic docs)
 - #80390 (BTreeMap: rename the area access methods)
 - #80393 (Add links to the source for the rustc and rustdoc books.)
 - #80398 (Use raw version of align_of in rc data_offset)
 - #80402 (Document `InferTy` & co.)
 - #80403 (fix: small typo error in chalk/mod.rs)
 - #80410 (rustdoc book: fix example)
 - #80419 (Add regression test for #80375)
 - #80430 (Add "length" as doc alias to len methods)
 - #80431 (Add "chr" as doc alias to char::from_u32)
 - #80448 (Fix stabilization version of deque_range feature.)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
  • Loading branch information
bors committed Dec 29, 2020
2 parents 2987785 + e3d26e0 commit d75f48e
Show file tree
Hide file tree
Showing 25 changed files with 231 additions and 131 deletions.
33 changes: 30 additions & 3 deletions compiler/rustc_middle/src/ty/sty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1424,28 +1424,33 @@ pub struct EarlyBoundRegion {
pub name: Symbol,
}

/// A **ty**pe **v**ariable **ID**.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
pub struct TyVid {
pub index: u32,
}

/// A **`const`** **v**ariable **ID**.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
pub struct ConstVid<'tcx> {
pub index: u32,
pub phantom: PhantomData<&'tcx ()>,
}

/// An **int**egral (`u32`, `i32`, `usize`, etc.) type **v**ariable **ID**.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
pub struct IntVid {
pub index: u32,
}

/// An **float**ing-point (`f32` or `f64`) type **v**ariable **ID**.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
pub struct FloatVid {
pub index: u32,
}

rustc_index::newtype_index! {
/// A **region** (lifetime) **v**ariable **ID**.
pub struct RegionVid {
DEBUG_FORMAT = custom,
}
Expand All @@ -1457,18 +1462,40 @@ impl Atom for RegionVid {
}
}

/// A placeholder for a type that hasn't been inferred yet.
///
/// E.g., if we have an empty array (`[]`), then we create a fresh
/// type variable for the element type since we won't know until it's
/// used what the element type is supposed to be.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub enum InferTy {
/// A type variable.
TyVar(TyVid),
/// An integral type variable (`{integer}`).
///
/// These are created when the compiler sees an integer literal like
/// `1` that could be several different types (`u8`, `i32`, `u32`, etc.).
/// We don't know until it's used what type it's supposed to be, so
/// we create a fresh type variable.
IntVar(IntVid),
/// A floating-point type variable (`{float}`).
///
/// These are created when the compiler sees an float literal like
/// `1.0` that could be either an `f32` or an `f64`.
/// We don't know until it's used what type it's supposed to be, so
/// we create a fresh type variable.
FloatVar(FloatVid),

/// A `FreshTy` is one that is generated as a replacement for an
/// unbound type variable. This is convenient for caching etc. See
/// `infer::freshen` for more details.
/// A [`FreshTy`][Self::FreshTy] is one that is generated as a replacement
/// for an unbound type variable. This is convenient for caching etc. See
/// `rustc_infer::infer::freshen` for more details.
///
/// Compare with [`TyVar`][Self::TyVar].
FreshTy(u32),
/// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`IntVar`][Self::IntVar].
FreshIntTy(u32),
/// Like [`FreshTy`][Self::FreshTy], but as a replacement for [`FloatVar`][Self::FloatVar].
FreshFloatTy(u32),
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_traits/src/chalk/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ crate fn evaluate_goal<'tcx>(
let mut solver = chalk_engine::solve::SLGSolver::new(32, None);
let db = ChalkRustIrDatabase { interner, reempty_placeholder };
let solution = solver.solve(&db, &lowered_goal);
debug!(?obligation, ?solution, "evaluatate goal");
debug!(?obligation, ?solution, "evaluate goal");

// Ideally, the code to convert *back* to rustc types would live close to
// the code to convert *from* rustc types. Right now though, we don't
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/binary_heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -915,6 +915,7 @@ impl<T> BinaryHeap<T> {
///
/// assert_eq!(heap.len(), 2);
/// ```
#[doc(alias = "length")]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
self.data.len()
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2132,6 +2132,7 @@ impl<K, V> BTreeMap<K, V> {
/// a.insert(1, "a");
/// assert_eq!(a.len(), 1);
/// ```
#[doc(alias = "length")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
pub const fn len(&self) -> usize {
Expand Down
98 changes: 48 additions & 50 deletions library/alloc/src/collections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -489,7 +489,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
///
/// # Safety
/// `index` is in bounds of 0..CAPACITY
unsafe fn key_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
unsafe fn key_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
where
I: SliceIndex<[MaybeUninit<K>], Output = Output>,
{
Expand All @@ -503,7 +503,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
///
/// # Safety
/// `index` is in bounds of 0..CAPACITY
unsafe fn val_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
unsafe fn val_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
where
I: SliceIndex<[MaybeUninit<V>], Output = Output>,
{
Expand All @@ -519,7 +519,7 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
///
/// # Safety
/// `index` is in bounds of 0..CAPACITY + 1
unsafe fn edge_area_mut_at<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
unsafe fn edge_area_mut<I, Output: ?Sized>(&mut self, index: I) -> &mut Output
where
I: SliceIndex<[MaybeUninit<BoxedNode<K, V>>], Output = Output>,
{
Expand Down Expand Up @@ -583,8 +583,8 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
assert!(idx < CAPACITY);
*len += 1;
unsafe {
self.key_area_mut_at(idx).write(key);
self.val_area_mut_at(idx).write(val);
self.key_area_mut(idx).write(key);
self.val_area_mut(idx).write(val);
}
}

Expand All @@ -593,8 +593,8 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
let new_len = self.len() + 1;
assert!(new_len <= CAPACITY);
unsafe {
slice_insert(self.key_area_mut_at(..new_len), 0, key);
slice_insert(self.val_area_mut_at(..new_len), 0, val);
slice_insert(self.key_area_mut(..new_len), 0, key);
slice_insert(self.val_area_mut(..new_len), 0, val);
*self.len_mut() = new_len as u16;
}
}
Expand Down Expand Up @@ -627,9 +627,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
assert!(idx < CAPACITY);
*len += 1;
unsafe {
self.key_area_mut_at(idx).write(key);
self.val_area_mut_at(idx).write(val);
self.edge_area_mut_at(idx + 1).write(edge.node);
self.key_area_mut(idx).write(key);
self.val_area_mut(idx).write(val);
self.edge_area_mut(idx + 1).write(edge.node);
Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
}
}
Expand All @@ -642,9 +642,9 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
assert!(new_len <= CAPACITY);

unsafe {
slice_insert(self.key_area_mut_at(..new_len), 0, key);
slice_insert(self.val_area_mut_at(..new_len), 0, val);
slice_insert(self.edge_area_mut_at(..new_len + 1), 0, edge.node);
slice_insert(self.key_area_mut(..new_len), 0, key);
slice_insert(self.val_area_mut(..new_len), 0, val);
slice_insert(self.edge_area_mut(..new_len + 1), 0, edge.node);
*self.len_mut() = new_len as u16;
}

Expand All @@ -662,12 +662,12 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let idx = self.len() - 1;

unsafe {
let key = self.key_area_mut_at(idx).assume_init_read();
let val = self.val_area_mut_at(idx).assume_init_read();
let key = self.key_area_mut(idx).assume_init_read();
let val = self.val_area_mut(idx).assume_init_read();
let edge = match self.reborrow_mut().force() {
ForceResult::Leaf(_) => None,
ForceResult::Internal(mut internal) => {
let node = internal.edge_area_mut_at(idx + 1).assume_init_read();
let node = internal.edge_area_mut(idx + 1).assume_init_read();
let mut edge = Root { node, height: internal.height - 1, _marker: PhantomData };
// Currently, clearing the parent link is superfluous, because we will
// insert the node elsewhere and set its parent link again.
Expand All @@ -690,12 +690,12 @@ impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
let old_len = self.len();

unsafe {
let key = slice_remove(self.key_area_mut_at(..old_len), 0);
let val = slice_remove(self.val_area_mut_at(..old_len), 0);
let key = slice_remove(self.key_area_mut(..old_len), 0);
let val = slice_remove(self.val_area_mut(..old_len), 0);
let edge = match self.reborrow_mut().force() {
ForceResult::Leaf(_) => None,
ForceResult::Internal(mut internal) => {
let node = slice_remove(internal.edge_area_mut_at(..old_len + 1), 0);
let node = slice_remove(internal.edge_area_mut(..old_len + 1), 0);
let mut edge = Root { node, height: internal.height - 1, _marker: PhantomData };
// Currently, clearing the parent link is superfluous, because we will
// insert the node elsewhere and set its parent link again.
Expand Down Expand Up @@ -919,11 +919,11 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
let new_len = self.node.len() + 1;

unsafe {
slice_insert(self.node.key_area_mut_at(..new_len), self.idx, key);
slice_insert(self.node.val_area_mut_at(..new_len), self.idx, val);
slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
*self.node.len_mut() = new_len as u16;

self.node.val_area_mut_at(self.idx).assume_init_mut()
self.node.val_area_mut(self.idx).assume_init_mut()
}
}
}
Expand Down Expand Up @@ -978,9 +978,9 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
let new_len = self.node.len() + 1;

unsafe {
slice_insert(self.node.key_area_mut_at(..new_len), self.idx, key);
slice_insert(self.node.val_area_mut_at(..new_len), self.idx, val);
slice_insert(self.node.edge_area_mut_at(..new_len + 1), self.idx + 1, edge.node);
slice_insert(self.node.key_area_mut(..new_len), self.idx, key);
slice_insert(self.node.val_area_mut(..new_len), self.idx, val);
slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node);
*self.node.len_mut() = new_len as u16;

self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1);
Expand Down Expand Up @@ -1085,7 +1085,7 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeTyp

impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
pub fn key_mut(&mut self) -> &mut K {
unsafe { self.node.key_area_mut_at(self.idx).assume_init_mut() }
unsafe { self.node.key_area_mut(self.idx).assume_init_mut() }
}

pub fn into_val_mut(self) -> &'a mut V {
Expand Down Expand Up @@ -1127,16 +1127,16 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>
let new_len = self.node.len() - self.idx - 1;
new_node.len = new_len as u16;
unsafe {
let k = self.node.key_area_mut_at(self.idx).assume_init_read();
let v = self.node.val_area_mut_at(self.idx).assume_init_read();
let k = self.node.key_area_mut(self.idx).assume_init_read();
let v = self.node.val_area_mut(self.idx).assume_init_read();

ptr::copy_nonoverlapping(
self.node.key_area_mut_at(self.idx + 1..).as_ptr(),
self.node.key_area_mut(self.idx + 1..).as_ptr(),
new_node.keys.as_mut_ptr(),
new_len,
);
ptr::copy_nonoverlapping(
self.node.val_area_mut_at(self.idx + 1..).as_ptr(),
self.node.val_area_mut(self.idx + 1..).as_ptr(),
new_node.vals.as_mut_ptr(),
new_len,
);
Expand Down Expand Up @@ -1173,8 +1173,8 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, mark
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let old_len = self.node.len();
unsafe {
let k = slice_remove(self.node.key_area_mut_at(..old_len), self.idx);
let v = slice_remove(self.node.val_area_mut_at(..old_len), self.idx);
let k = slice_remove(self.node.key_area_mut(..old_len), self.idx);
let v = slice_remove(self.node.val_area_mut(..old_len), self.idx);
*self.node.len_mut() = (old_len - 1) as u16;
((k, v), self.left_edge())
}
Expand All @@ -1195,7 +1195,7 @@ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>,
let kv = self.split_leaf_data(&mut new_node.data);
let new_len = usize::from(new_node.data.len);
ptr::copy_nonoverlapping(
self.node.edge_area_mut_at(self.idx + 1..).as_ptr(),
self.node.edge_area_mut(self.idx + 1..).as_ptr(),
new_node.edges.as_mut_ptr(),
new_len + 1,
);
Expand Down Expand Up @@ -1321,25 +1321,23 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
unsafe {
*left_node.len_mut() = new_left_len as u16;

let parent_key =
slice_remove(parent_node.key_area_mut_at(..old_parent_len), parent_idx);
left_node.key_area_mut_at(old_left_len).write(parent_key);
let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx);
left_node.key_area_mut(old_left_len).write(parent_key);
ptr::copy_nonoverlapping(
right_node.key_area_mut_at(..).as_ptr(),
left_node.key_area_mut_at(old_left_len + 1..).as_mut_ptr(),
right_node.key_area_mut(..).as_ptr(),
left_node.key_area_mut(old_left_len + 1..).as_mut_ptr(),
right_len,
);

let parent_val =
slice_remove(parent_node.val_area_mut_at(..old_parent_len), parent_idx);
left_node.val_area_mut_at(old_left_len).write(parent_val);
let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx);
left_node.val_area_mut(old_left_len).write(parent_val);
ptr::copy_nonoverlapping(
right_node.val_area_mut_at(..).as_ptr(),
left_node.val_area_mut_at(old_left_len + 1..).as_mut_ptr(),
right_node.val_area_mut(..).as_ptr(),
left_node.val_area_mut(old_left_len + 1..).as_mut_ptr(),
right_len,
);

slice_remove(&mut parent_node.edge_area_mut_at(..old_parent_len + 1), parent_idx + 1);
slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1);
parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len);
*parent_node.len_mut() -= 1;

Expand All @@ -1349,8 +1347,8 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked();
let mut right_node = right_node.cast_to_internal_unchecked();
ptr::copy_nonoverlapping(
right_node.edge_area_mut_at(..).as_ptr(),
left_node.edge_area_mut_at(old_left_len + 1..).as_mut_ptr(),
right_node.edge_area_mut(..).as_ptr(),
left_node.edge_area_mut(old_left_len + 1..).as_mut_ptr(),
right_len + 1,
);

Expand Down Expand Up @@ -1458,7 +1456,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) {
(ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
// Make room for stolen edges.
let right_edges = right.edge_area_mut_at(..).as_mut_ptr();
let right_edges = right.edge_area_mut(..).as_mut_ptr();
ptr::copy(right_edges, right_edges.add(count), old_right_len + 1);
right.correct_childrens_parent_links(count..new_right_len + 1);

Expand Down Expand Up @@ -1518,7 +1516,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> {
move_edges(right.reborrow_mut(), 0, left, old_left_len + 1, count);

// Fill gap where stolen edges used to be.
let right_edges = right.edge_area_mut_at(..).as_mut_ptr();
let right_edges = right.edge_area_mut(..).as_mut_ptr();
ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
right.correct_childrens_parent_links(0..=new_right_len);
}
Expand Down Expand Up @@ -1551,8 +1549,8 @@ unsafe fn move_edges<'a, K: 'a, V: 'a>(
count: usize,
) {
unsafe {
let source_ptr = source.edge_area_mut_at(..).as_ptr();
let dest_ptr = dest.edge_area_mut_at(dest_offset..).as_mut_ptr();
let source_ptr = source.edge_area_mut(..).as_ptr();
let dest_ptr = dest.edge_area_mut(dest_offset..).as_mut_ptr();
ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr, count);
dest.correct_childrens_parent_links(dest_offset..dest_offset + count);
}
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/btree/set.rs
Original file line number Diff line number Diff line change
Expand Up @@ -975,6 +975,7 @@ impl<T> BTreeSet<T> {
/// v.insert(1);
/// assert_eq!(v.len(), 1);
/// ```
#[doc(alias = "length")]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
pub const fn len(&self) -> usize {
Expand Down
1 change: 1 addition & 0 deletions library/alloc/src/collections/linked_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -593,6 +593,7 @@ impl<T> LinkedList<T> {
/// dl.push_back(3);
/// assert_eq!(dl.len(), 3);
/// ```
#[doc(alias = "length")]
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn len(&self) -> usize {
Expand Down
Loading

0 comments on commit d75f48e

Please sign in to comment.