diff --git a/src/doc/unstable-book/src/SUMMARY.md b/src/doc/unstable-book/src/SUMMARY.md index 44eda204a3d29..7820d44920829 100644 --- a/src/doc/unstable-book/src/SUMMARY.md +++ b/src/doc/unstable-book/src/SUMMARY.md @@ -71,6 +71,7 @@ - [repr_simd](repr-simd.md) - [rustc_attrs](rustc-attrs.md) - [rustc_diagnostic_macros](rustc-diagnostic-macros.md) +- [rvalue_static_promotion](rvalue-static-promotion.md) - [sanitizer_runtime](sanitizer-runtime.md) - [simd](simd.md) - [simd_ffi](simd-ffi.md) diff --git a/src/doc/unstable-book/src/allocator.md b/src/doc/unstable-book/src/allocator.md index 7261641698f48..cfcf8e22d7088 100644 --- a/src/doc/unstable-book/src/allocator.md +++ b/src/doc/unstable-book/src/allocator.md @@ -51,6 +51,11 @@ pub extern fn __rust_allocate(size: usize, _align: usize) -> *mut u8 { unsafe { libc::malloc(size as libc::size_t) as *mut u8 } } +#[no_mangle] +pub extern fn __rust_allocate_zeroed(size: usize, _align: usize) -> *mut u8 { + unsafe { libc::calloc(size as libc::size_t, 1) as *mut u8 } +} + #[no_mangle] pub extern fn __rust_deallocate(ptr: *mut u8, _old_size: usize, _align: usize) { unsafe { libc::free(ptr as *mut libc::c_void) } diff --git a/src/doc/unstable-book/src/rvalue-static-promotion.md b/src/doc/unstable-book/src/rvalue-static-promotion.md new file mode 100644 index 0000000000000..2583d350ebe11 --- /dev/null +++ b/src/doc/unstable-book/src/rvalue-static-promotion.md @@ -0,0 +1,23 @@ +# `rvalue_static_promotion` + +The tracking issue for this feature is: [#38865] + +[#38865]: https://github.com/rust-lang/rust/issues/38865 + +------------------------ + +The `rvalue_static_promotion` feature allows directly creating `'static` references to +constant `rvalue`s, which in particular allowing for more concise code in the common case +in which a `'static` reference is all that's needed. + + +## Examples + +```rust +#![feature(rvalue_static_promotion)] + +fn main() { + let DEFAULT_VALUE: &'static u32 = &42; + assert_eq!(*DEFAULT_VALUE, 42); +} +``` diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 38d843263ffda..1d616233881b4 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -287,17 +287,15 @@ impl Arc { /// # Examples /// /// ``` - /// #![feature(rc_raw)] - /// /// use std::sync::Arc; /// /// let x = Arc::new(10); /// let x_ptr = Arc::into_raw(x); /// assert_eq!(unsafe { *x_ptr }, 10); /// ``` - #[unstable(feature = "rc_raw", issue = "37197")] - pub fn into_raw(this: Self) -> *mut T { - let ptr = unsafe { &mut (**this.ptr).data as *mut _ }; + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr = unsafe { &(**this.ptr).data as *const _ }; mem::forget(this); ptr } @@ -315,8 +313,6 @@ impl Arc { /// # Examples /// /// ``` - /// #![feature(rc_raw)] - /// /// use std::sync::Arc; /// /// let x = Arc::new(10); @@ -332,11 +328,14 @@ impl Arc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[unstable(feature = "rc_raw", issue = "37197")] - pub unsafe fn from_raw(ptr: *mut T) -> Self { + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { // To find the corresponding pointer to the `ArcInner` we need to subtract the offset of the // `data` field from the pointer. - Arc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(ArcInner, data)) as *mut _) } + let ptr = (ptr as *const u8).offset(-offset_of!(ArcInner, data)); + Arc { + ptr: Shared::new(ptr as *const _), + } } } @@ -448,7 +447,7 @@ impl Arc { // Non-inlined part of `drop`. #[inline(never)] unsafe fn drop_slow(&mut self) { - let ptr = *self.ptr; + let ptr = self.ptr.as_mut_ptr(); // Destroy the data at this time, even though we may not free the box // allocation itself (there may still be weak pointers lying around). @@ -461,17 +460,13 @@ impl Arc { } #[inline] - #[unstable(feature = "ptr_eq", - reason = "newly added", - issue = "36497")] + #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns true if the two `Arc`s point to the same value (not /// just values that compare as equal). /// /// # Examples /// /// ``` - /// #![feature(ptr_eq)] - /// /// use std::sync::Arc; /// /// let five = Arc::new(5); @@ -628,7 +623,7 @@ impl Arc { // As with `get_mut()`, the unsafety is ok because our reference was // either unique to begin with, or became one upon cloning the contents. unsafe { - let inner = &mut **this.ptr; + let inner = &mut *this.ptr.as_mut_ptr(); &mut inner.data } } @@ -671,7 +666,7 @@ impl Arc { // the Arc itself to be `mut`, so we're returning the only possible // reference to the inner data. unsafe { - let inner = &mut **this.ptr; + let inner = &mut *this.ptr.as_mut_ptr(); Some(&mut inner.data) } } else { diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 51e6f2f8bd7a6..9ec3535881341 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -23,6 +23,7 @@ use core::intrinsics::{min_align_of_val, size_of_val}; extern "C" { #[allocator] fn __rust_allocate(size: usize, align: usize) -> *mut u8; + fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8; fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; fn __rust_reallocate_inplace(ptr: *mut u8, @@ -59,6 +60,20 @@ pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { __rust_allocate(size, align) } +/// Return a pointer to `size` bytes of memory aligned to `align` and +/// initialized to zeroes. +/// +/// On failure, return a null pointer. +/// +/// Behavior is undefined if the requested size is 0 or the alignment is not a +/// power of 2. The alignment must be no larger than the largest supported page +/// size on the platform. +#[inline] +pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + check_size_and_alignment(size, align); + __rust_allocate_zeroed(size, align) +} + /// Resize the allocation referenced by `ptr` to `size` bytes. /// /// On failure, return a null pointer and leave the original allocation intact. @@ -162,6 +177,23 @@ mod tests { use boxed::Box; use heap; + #[test] + fn allocate_zeroed() { + unsafe { + let size = 1024; + let mut ptr = heap::allocate_zeroed(size, 1); + if ptr.is_null() { + ::oom() + } + let end = ptr.offset(size as isize); + while ptr < end { + assert_eq!(*ptr, 0); + ptr = ptr.offset(1); + } + heap::deallocate(ptr, size, 1); + } + } + #[test] fn basic_reallocate_inplace_noop() { unsafe { diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 357a2724e0020..54da527976715 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -82,6 +82,16 @@ impl RawVec { /// /// Aborts on OOM pub fn with_capacity(cap: usize) -> Self { + RawVec::allocate(cap, false) + } + + /// Like `with_capacity` but guarantees the buffer is zeroed. + pub fn with_capacity_zeroed(cap: usize) -> Self { + RawVec::allocate(cap, true) + } + + #[inline] + fn allocate(cap: usize, zeroed: bool) -> Self { unsafe { let elem_size = mem::size_of::(); @@ -93,7 +103,11 @@ impl RawVec { heap::EMPTY as *mut u8 } else { let align = mem::align_of::(); - let ptr = heap::allocate(alloc_size, align); + let ptr = if zeroed { + heap::allocate_zeroed(alloc_size, align) + } else { + heap::allocate(alloc_size, align) + }; if ptr.is_null() { oom() } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index eb449b2660679..e9b59017692eb 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -364,17 +364,15 @@ impl Rc { /// # Examples /// /// ``` - /// #![feature(rc_raw)] - /// /// use std::rc::Rc; /// /// let x = Rc::new(10); /// let x_ptr = Rc::into_raw(x); /// assert_eq!(unsafe { *x_ptr }, 10); /// ``` - #[unstable(feature = "rc_raw", issue = "37197")] - pub fn into_raw(this: Self) -> *mut T { - let ptr = unsafe { &mut (**this.ptr).value as *mut _ }; + #[stable(feature = "rc_raw", since = "1.17.0")] + pub fn into_raw(this: Self) -> *const T { + let ptr = unsafe { &mut (*this.ptr.as_mut_ptr()).value as *const _ }; mem::forget(this); ptr } @@ -392,8 +390,6 @@ impl Rc { /// # Examples /// /// ``` - /// #![feature(rc_raw)] - /// /// use std::rc::Rc; /// /// let x = Rc::new(10); @@ -409,11 +405,11 @@ impl Rc { /// /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling! /// ``` - #[unstable(feature = "rc_raw", issue = "37197")] - pub unsafe fn from_raw(ptr: *mut T) -> Self { + #[stable(feature = "rc_raw", since = "1.17.0")] + pub unsafe fn from_raw(ptr: *const T) -> Self { // To find the corresponding pointer to the `RcBox` we need to subtract the offset of the // `value` field from the pointer. - Rc { ptr: Shared::new((ptr as *mut u8).offset(-offset_of!(RcBox, value)) as *mut _) } + Rc { ptr: Shared::new((ptr as *const u8).offset(-offset_of!(RcBox, value)) as *const _) } } } @@ -543,7 +539,7 @@ impl Rc { #[stable(feature = "rc_unique", since = "1.4.0")] pub fn get_mut(this: &mut Self) -> Option<&mut T> { if Rc::is_unique(this) { - let inner = unsafe { &mut **this.ptr }; + let inner = unsafe { &mut *this.ptr.as_mut_ptr() }; Some(&mut inner.value) } else { None @@ -551,17 +547,13 @@ impl Rc { } #[inline] - #[unstable(feature = "ptr_eq", - reason = "newly added", - issue = "36497")] + #[stable(feature = "ptr_eq", since = "1.17.0")] /// Returns true if the two `Rc`s point to the same value (not /// just values that compare as equal). /// /// # Examples /// /// ``` - /// #![feature(ptr_eq)] - /// /// use std::rc::Rc; /// /// let five = Rc::new(5); @@ -631,7 +623,7 @@ impl Rc { // reference count is guaranteed to be 1 at this point, and we required // the `Rc` itself to be `mut`, so we're returning the only possible // reference to the inner value. - let inner = unsafe { &mut **this.ptr }; + let inner = unsafe { &mut *this.ptr.as_mut_ptr() }; &mut inner.value } } @@ -677,7 +669,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc { /// ``` fn drop(&mut self) { unsafe { - let ptr = *self.ptr; + let ptr = self.ptr.as_mut_ptr(); self.dec_strong(); if self.strong() == 0 { diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs index a7a67ef76d4f7..5b0aa176799c2 100644 --- a/src/liballoc_jemalloc/lib.rs +++ b/src/liballoc_jemalloc/lib.rs @@ -38,6 +38,10 @@ mod imp { target_os = "dragonfly", target_os = "windows"), link_name = "je_mallocx")] fn mallocx(size: size_t, flags: c_int) -> *mut c_void; + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_calloc")] + fn calloc(size: size_t, flags: c_int) -> *mut c_void; #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", target_os = "dragonfly", target_os = "windows"), link_name = "je_rallocx")] @@ -56,6 +60,8 @@ mod imp { fn nallocx(size: size_t, flags: c_int) -> size_t; } + const MALLOCX_ZERO: c_int = 0x40; + // The minimum alignment guaranteed by the architecture. This value is used to // add fast paths for low alignment values. In practice, the alignment is a // constant at the call site and the branch will be optimized out. @@ -91,6 +97,16 @@ mod imp { unsafe { mallocx(size as size_t, flags) as *mut u8 } } + #[no_mangle] + pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 { + if align <= MIN_ALIGN { + unsafe { calloc(size as size_t, 1) as *mut u8 } + } else { + let flags = align_to_flags(align) | MALLOCX_ZERO; + unsafe { mallocx(size as size_t, flags) as *mut u8 } + } + } + #[no_mangle] pub extern "C" fn __rust_reallocate(ptr: *mut u8, _old_size: usize, @@ -135,6 +151,11 @@ mod imp { bogus() } + #[no_mangle] + pub extern "C" fn __rust_allocate_zeroed(_size: usize, _align: usize) -> *mut u8 { + bogus() + } + #[no_mangle] pub extern "C" fn __rust_reallocate(_ptr: *mut u8, _old_size: usize, diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index de2b75f62b68a..e012caf870a97 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -1,4 +1,4 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -44,6 +44,11 @@ pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 { unsafe { imp::allocate(size, align) } } +#[no_mangle] +pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 { + unsafe { imp::allocate_zeroed(size, align) } +} + #[no_mangle] pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) { unsafe { imp::deallocate(ptr, old_size, align) } @@ -121,6 +126,18 @@ mod imp { } } + pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + if align <= MIN_ALIGN { + libc::calloc(size as libc::size_t, 1) as *mut u8 + } else { + let ptr = aligned_malloc(size, align); + if !ptr.is_null() { + ptr::write_bytes(ptr, 0, size); + } + ptr + } + } + pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8 @@ -173,6 +190,8 @@ mod imp { #[repr(C)] struct Header(*mut u8); + + const HEAP_ZERO_MEMORY: DWORD = 0x00000008; const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010; unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { @@ -185,11 +204,12 @@ mod imp { aligned } - pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { + #[inline] + unsafe fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 { if align <= MIN_ALIGN { - HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8 + HeapAlloc(GetProcessHeap(), flags, size as SIZE_T) as *mut u8 } else { - let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8; + let ptr = HeapAlloc(GetProcessHeap(), flags, (size + align) as SIZE_T) as *mut u8; if ptr.is_null() { return ptr; } @@ -197,6 +217,14 @@ mod imp { } } + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { + allocate_with_flags(size, align, 0) + } + + pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + allocate_with_flags(size, align, HEAP_ZERO_MEMORY) + } + pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8 diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index 7218d15ded5f8..53fe6b4bc9f4f 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -338,6 +338,7 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> { } /// An iterator over a sub-range of BTreeMap's entries. +#[stable(feature = "btree_range", since = "1.17.0")] pub struct Range<'a, K: 'a, V: 'a> { front: Handle, K, V, marker::Leaf>, marker::Edge>, back: Handle, K, V, marker::Leaf>, marker::Edge>, @@ -351,6 +352,7 @@ impl<'a, K: 'a + fmt::Debug, V: 'a + fmt::Debug> fmt::Debug for Range<'a, K, V> } /// A mutable iterator over a sub-range of BTreeMap's entries. +#[stable(feature = "btree_range", since = "1.17.0")] pub struct RangeMut<'a, K: 'a, V: 'a> { front: Handle, K, V, marker::Leaf>, marker::Edge>, back: Handle, K, V, marker::Leaf>, marker::Edge>, @@ -724,8 +726,6 @@ impl BTreeMap { /// Basic usage: /// /// ``` - /// #![feature(btree_range, collections_bound)] - /// /// use std::collections::BTreeMap; /// use std::collections::Bound::Included; /// @@ -738,9 +738,7 @@ impl BTreeMap { /// } /// assert_eq!(Some((&5, &"b")), map.range(4..).next()); /// ``` - #[unstable(feature = "btree_range", - reason = "matches collection reform specification, waiting for dust to settle", - issue = "27787")] + #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range where T: Ord, K: Borrow, R: RangeArgument { @@ -768,8 +766,6 @@ impl BTreeMap { /// Basic usage: /// /// ``` - /// #![feature(btree_range)] - /// /// use std::collections::BTreeMap; /// /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"].iter() @@ -782,9 +778,7 @@ impl BTreeMap { /// println!("{} => {}", name, balance); /// } /// ``` - #[unstable(feature = "btree_range", - reason = "matches collection reform specification, waiting for dust to settle", - issue = "27787")] + #[stable(feature = "btree_range", since = "1.17.0")] pub fn range_mut(&mut self, range: R) -> RangeMut where T: Ord, K: Borrow, R: RangeArgument { diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index e3c990c80decf..72d25f87bca95 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -113,6 +113,7 @@ pub struct IntoIter { /// [`BTreeSet`]: struct.BTreeSet.html /// [`range`]: struct.BTreeSet.html#method.range #[derive(Debug)] +#[stable(feature = "btree_range", since = "1.17.0")] pub struct Range<'a, T: 'a> { iter: ::btree_map::Range<'a, T, ()>, } @@ -264,8 +265,6 @@ impl BTreeSet { /// # Examples /// /// ``` - /// #![feature(btree_range, collections_bound)] - /// /// use std::collections::BTreeSet; /// use std::collections::Bound::Included; /// @@ -278,9 +277,7 @@ impl BTreeSet { /// } /// assert_eq!(Some(&5), set.range(4..).next()); /// ``` - #[unstable(feature = "btree_range", - reason = "matches collection reform specification, waiting for dust to settle", - issue = "27787")] + #[stable(feature = "btree_range", since = "1.17.0")] pub fn range(&self, range: R) -> Range where K: Ord, T: Borrow, R: RangeArgument { diff --git a/src/libcollections/fmt.rs b/src/libcollections/fmt.rs index 1ec1749c3aa64..62a8816462191 100644 --- a/src/libcollections/fmt.rs +++ b/src/libcollections/fmt.rs @@ -367,6 +367,10 @@ //! like `{:08}` would yield `00000001` for the integer `1`, while the //! same format would yield `-0000001` for the integer `-1`. Notice that //! the negative version has one fewer zero than the positive version. +//! Note that padding zeroes are always placed after the sign (if any) +//! and before the digits. When used together with the `#` flag, a similar +//! rule applies: padding zeroes are inserted after the prefix but before +//! the digits. //! //! ## Width //! diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index f88bdd0ecf382..fb07152909408 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -42,6 +42,7 @@ #![feature(fused)] #![feature(generic_param_attrs)] #![feature(heap_api)] +#![feature(i128_type)] #![feature(inclusive_range)] #![feature(lang_items)] #![feature(nonzero)] @@ -129,14 +130,17 @@ mod std { } /// An endpoint of a range of keys. -#[unstable(feature = "collections_bound", issue = "27787")] +#[stable(feature = "collections_bound", since = "1.17.0")] #[derive(Clone, Copy, Debug, Hash, PartialEq, Eq)] pub enum Bound { /// An inclusive bound. + #[stable(feature = "collections_bound", since = "1.17.0")] Included(T), /// An exclusive bound. + #[stable(feature = "collections_bound", since = "1.17.0")] Excluded(T), /// An infinite endpoint. Indicates that there is no bound in this direction. + #[stable(feature = "collections_bound", since = "1.17.0")] Unbounded, } diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index d4f77d625b361..f58c87b801f55 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -142,7 +142,7 @@ impl LinkedList { match self.head { None => self.tail = node, - Some(head) => (**head).prev = node, + Some(head) => (*head.as_mut_ptr()).prev = node, } self.head = node; @@ -154,12 +154,12 @@ impl LinkedList { #[inline] fn pop_front_node(&mut self) -> Option>> { self.head.map(|node| unsafe { - let node = Box::from_raw(*node); + let node = Box::from_raw(node.as_mut_ptr()); self.head = node.next; match self.head { None => self.tail = None, - Some(head) => (**head).prev = None, + Some(head) => (*head.as_mut_ptr()).prev = None, } self.len -= 1; @@ -177,7 +177,7 @@ impl LinkedList { match self.tail { None => self.head = node, - Some(tail) => (**tail).next = node, + Some(tail) => (*tail.as_mut_ptr()).next = node, } self.tail = node; @@ -189,12 +189,12 @@ impl LinkedList { #[inline] fn pop_back_node(&mut self) -> Option>> { self.tail.map(|node| unsafe { - let node = Box::from_raw(*node); + let node = Box::from_raw(node.as_mut_ptr()); self.tail = node.prev; match self.tail { None => self.head = None, - Some(tail) => (**tail).next = None, + Some(tail) => (*tail.as_mut_ptr()).next = None, } self.len -= 1; @@ -269,8 +269,8 @@ impl LinkedList { Some(tail) => { if let Some(other_head) = other.head.take() { unsafe { - (**tail).next = Some(other_head); - (**other_head).prev = Some(tail); + (*tail.as_mut_ptr()).next = Some(other_head); + (*other_head.as_mut_ptr()).prev = Some(tail); } self.tail = other.tail.take(); @@ -484,7 +484,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn front_mut(&mut self) -> Option<&mut T> { - self.head.map(|node| unsafe { &mut (**node).element }) + self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element }) } /// Provides a reference to the back element, or `None` if the list is @@ -530,7 +530,7 @@ impl LinkedList { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn back_mut(&mut self) -> Option<&mut T> { - self.tail.map(|node| unsafe { &mut (**node).element }) + self.tail.map(|node| unsafe { &mut (*node.as_mut_ptr()).element }) } /// Adds an element first in the list. @@ -675,9 +675,9 @@ impl LinkedList { let second_part_head; unsafe { - second_part_head = (**split_node.unwrap()).next.take(); + second_part_head = (*split_node.unwrap().as_mut_ptr()).next.take(); if let Some(head) = second_part_head { - (**head).prev = None; + (*head.as_mut_ptr()).prev = None; } } @@ -816,7 +816,7 @@ impl<'a, T> Iterator for IterMut<'a, T> { None } else { self.head.map(|node| unsafe { - let node = &mut **node; + let node = &mut *node.as_mut_ptr(); self.len -= 1; self.head = node.next; &mut node.element @@ -838,7 +838,7 @@ impl<'a, T> DoubleEndedIterator for IterMut<'a, T> { None } else { self.tail.map(|node| unsafe { - let node = &mut **node; + let node = &mut *node.as_mut_ptr(); self.len -= 1; self.tail = node.prev; &mut node.element @@ -896,8 +896,8 @@ impl<'a, T> IterMut<'a, T> { element: element, }))); - (**prev).next = node; - (**head).prev = node; + (*prev.as_mut_ptr()).next = node; + (*head.as_mut_ptr()).prev = node; self.list.len += 1; }, @@ -929,7 +929,7 @@ impl<'a, T> IterMut<'a, T> { if self.len == 0 { None } else { - self.head.map(|node| unsafe { &mut (**node).element }) + self.head.map(|node| unsafe { &mut (*node.as_mut_ptr()).element }) } } } diff --git a/src/libcollections/range.rs b/src/libcollections/range.rs index e4b94a1d70ee4..31e4d001397bf 100644 --- a/src/libcollections/range.rs +++ b/src/libcollections/range.rs @@ -29,7 +29,6 @@ pub trait RangeArgument { /// ``` /// #![feature(collections)] /// #![feature(collections_range)] - /// #![feature(collections_bound)] /// /// extern crate collections; /// @@ -52,7 +51,6 @@ pub trait RangeArgument { /// ``` /// #![feature(collections)] /// #![feature(collections_range)] - /// #![feature(collections_bound)] /// /// extern crate collections; /// diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 11fc1d553f28e..7d3bd1cc569be 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -437,8 +437,8 @@ impl [T] { /// The caller must ensure that the slice outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// - /// Modifying the slice may cause its buffer to be reallocated, which - /// would also make any pointers to it invalid. + /// Modifying the container referenced by this slice may cause its buffer + /// to be reallocated, which would also make any pointers to it invalid. /// /// # Examples /// @@ -463,8 +463,8 @@ impl [T] { /// The caller must ensure that the slice outlives the pointer this /// function returns, or else it will end up pointing to garbage. /// - /// Modifying the slice may cause its buffer to be reallocated, which - /// would also make any pointers to it invalid. + /// Modifying the container referenced by this slice may cause its buffer + /// to be reallocated, which would also make any pointers to it invalid. /// /// # Examples /// diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index a717163f45ef5..e5fda685350c8 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1369,11 +1369,72 @@ impl Vec { #[doc(hidden)] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_elem(elem: T, n: usize) -> Vec { - let mut v = Vec::with_capacity(n); - v.extend_with_element(n, elem); - v + ::from_elem(elem, n) } +// Specialization trait used for Vec::from_elem +trait SpecFromElem: Sized { + fn from_elem(elem: Self, n: usize) -> Vec; +} + +impl SpecFromElem for T { + default fn from_elem(elem: Self, n: usize) -> Vec { + let mut v = Vec::with_capacity(n); + v.extend_with_element(n, elem); + v + } +} + +impl SpecFromElem for u8 { + #[inline] + fn from_elem(elem: u8, n: usize) -> Vec { + if elem == 0 { + return Vec { + buf: RawVec::with_capacity_zeroed(n), + len: n, + } + } + unsafe { + let mut v = Vec::with_capacity(n); + ptr::write_bytes(v.as_mut_ptr(), elem, n); + v.set_len(n); + v + } + } +} + +macro_rules! impl_spec_from_elem_int { + ($t: ty) => { + impl SpecFromElem for $t { + #[inline] + fn from_elem(elem: $t, n: usize) -> Vec<$t> { + if elem == 0 { + return Vec { + buf: RawVec::with_capacity_zeroed(n), + len: n, + } + } + let mut v = Vec::with_capacity(n); + v.extend_with_element(n, elem); + v + } + } + } +} + +impl_spec_from_elem_int!(i8); +impl_spec_from_elem_int!(i16); +impl_spec_from_elem_int!(i32); +impl_spec_from_elem_int!(i64); +impl_spec_from_elem_int!(i128); +impl_spec_from_elem_int!(isize); + +impl_spec_from_elem_int!(u16); +impl_spec_from_elem_int!(u32); +impl_spec_from_elem_int!(u64); +impl_spec_from_elem_int!(u128); +impl_spec_from_elem_int!(usize); + //////////////////////////////////////////////////////////////////////////////// // Common trait implementations for Vec //////////////////////////////////////////////////////////////////////////////// @@ -2120,7 +2181,7 @@ unsafe impl<#[may_dangle] T> Drop for IntoIter { for _x in self.by_ref() {} // RawVec handles deallocation - let _ = unsafe { RawVec::from_raw_parts(*self.buf, self.cap) }; + let _ = unsafe { RawVec::from_raw_parts(self.buf.as_mut_ptr(), self.cap) }; } } @@ -2185,7 +2246,7 @@ impl<'a, T> Drop for Drain<'a, T> { if self.tail_len > 0 { unsafe { - let source_vec = &mut **self.vec; + let source_vec = &mut *self.vec.as_mut_ptr(); // memmove back untouched tail, update to new length let start = source_vec.len(); let tail = self.tail_start; diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index 1985be7f901c6..6a04d47a345e8 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -2125,7 +2125,7 @@ impl<'a, T: 'a> Drop for Drain<'a, T> { fn drop(&mut self) { for _ in self.by_ref() {} - let source_deque = unsafe { &mut **self.deque }; + let source_deque = unsafe { &mut *self.deque.as_mut_ptr() }; // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head // diff --git a/src/libcollectionstest/lib.rs b/src/libcollectionstest/lib.rs index 98d0b1c8e1565..618eb386c0f4c 100644 --- a/src/libcollectionstest/lib.rs +++ b/src/libcollectionstest/lib.rs @@ -13,11 +13,9 @@ #![feature(binary_heap_extras)] #![feature(binary_heap_peek_mut_pop)] #![feature(box_syntax)] -#![feature(btree_range)] #![feature(inclusive_range_syntax)] #![feature(collection_placement)] #![feature(collections)] -#![feature(collections_bound)] #![feature(const_fn)] #![feature(exact_size_is_empty)] #![feature(pattern)] diff --git a/src/libcompiler_builtins/lib.rs b/src/libcompiler_builtins/lib.rs index fb42b915c7694..58aba11e4394f 100644 --- a/src/libcompiler_builtins/lib.rs +++ b/src/libcompiler_builtins/lib.rs @@ -34,8 +34,8 @@ pub mod reimpls { macro_rules! ashl { ($a:expr, $b:expr, $ty:ty) => {{ let (a, b) = ($a, $b); - let bits = (::core::mem::size_of::<$ty>() * 8) as $ty; - let half_bits = bits >> 1; + let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty; + let half_bits = bits.wrapping_shr(1); if b & half_bits != 0 { <$ty>::from_parts(0, a.low().wrapping_shl( b.wrapping_sub(half_bits) as u32)) @@ -58,8 +58,8 @@ pub mod reimpls { macro_rules! ashr { ($a: expr, $b: expr, $ty:ty) => {{ let (a, b) = ($a, $b); - let bits = (::core::mem::size_of::<$ty>() * 8) as $ty; - let half_bits = bits >> 1; + let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty; + let half_bits = bits.wrapping_shr(1); if b & half_bits != 0 { <$ty>::from_parts(a.high().wrapping_shr(b.wrapping_sub(half_bits) as u32) as <$ty as LargeInt>::LowHalf, @@ -83,8 +83,8 @@ pub mod reimpls { macro_rules! lshr { ($a: expr, $b: expr, $ty:ty) => {{ let (a, b) = ($a, $b); - let bits = (::core::mem::size_of::<$ty>() * 8) as $ty; - let half_bits = bits >> 1; + let bits = ::core::mem::size_of::<$ty>().wrapping_mul(8) as $ty; + let half_bits = bits.wrapping_shr(1); if b & half_bits != 0 { <$ty>::from_parts(a.high().wrapping_shr(b.wrapping_sub(half_bits) as u32), 0) } else if b == 0 { @@ -370,7 +370,7 @@ pub mod reimpls { macro_rules! mul { ($a:expr, $b:expr, $ty: ty, $tyh: ty) => {{ let (a, b) = ($a, $b); - let half_bits = ((::core::mem::size_of::<$tyh>() * 8) / 2) as u32; + let half_bits = ::core::mem::size_of::<$tyh>().wrapping_mul(4) as u32; let lower_mask = (!0u64).wrapping_shr(half_bits); let mut low = (a.low() & lower_mask).wrapping_mul(b.low() & lower_mask); let mut t = low.wrapping_shr(half_bits); @@ -478,7 +478,7 @@ pub mod reimpls { let mantissa_fraction = repr & <$fromty as FloatStuff>::MANTISSA_MASK; let mantissa = mantissa_fraction | <$fromty as FloatStuff>::MANTISSA_LEAD_BIT; if sign == -1.0 || exponent < 0 { return 0 as u128; } - if exponent > ::core::mem::size_of::<$outty>() as i32 * 8 { + if exponent > ::core::mem::size_of::<$outty>().wrapping_mul(8) as i32 { return !(0 as u128); } (if exponent < (<$fromty as FloatStuff>::MANTISSA_BITS) as i32 { @@ -503,7 +503,7 @@ pub mod reimpls { let mantissa = mantissa_fraction | <$fromty as FloatStuff>::MANTISSA_LEAD_BIT; if exponent < 0 { return 0 as i128; } - if exponent > ::core::mem::size_of::<$outty>() as i32 * 8 { + if exponent > ::core::mem::size_of::<$outty>().wrapping_mul(8) as i32 { let ret = if sign > 0.0 { <$outty>::max_value() } else { <$outty>::min_value() }; return ret } diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 736797d162b1d..0186d9727828d 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -394,7 +394,6 @@ impl Cell { /// # Examples /// /// ``` - /// #![feature(move_cell)] /// use std::cell::Cell; /// /// let c1 = Cell::new(5i32); @@ -404,7 +403,7 @@ impl Cell { /// assert_eq!(5, c2.get()); /// ``` #[inline] - #[unstable(feature = "move_cell", issue = "39264")] + #[stable(feature = "move_cell", since = "1.17.0")] pub fn swap(&self, other: &Self) { if ptr::eq(self, other) { return; @@ -419,7 +418,6 @@ impl Cell { /// # Examples /// /// ``` - /// #![feature(move_cell)] /// use std::cell::Cell; /// /// let c = Cell::new(5); @@ -427,7 +425,7 @@ impl Cell { /// /// assert_eq!(5, old); /// ``` - #[unstable(feature = "move_cell", issue = "39264")] + #[stable(feature = "move_cell", since = "1.17.0")] pub fn replace(&self, val: T) -> T { mem::replace(unsafe { &mut *self.value.get() }, val) } @@ -437,7 +435,6 @@ impl Cell { /// # Examples /// /// ``` - /// #![feature(move_cell)] /// use std::cell::Cell; /// /// let c = Cell::new(5); @@ -445,7 +442,7 @@ impl Cell { /// /// assert_eq!(five, 5); /// ``` - #[unstable(feature = "move_cell", issue = "39264")] + #[stable(feature = "move_cell", since = "1.17.0")] pub fn into_inner(self) -> T { unsafe { self.value.into_inner() } } @@ -457,7 +454,6 @@ impl Cell { /// # Examples /// /// ``` - /// #![feature(move_cell)] /// use std::cell::Cell; /// /// let c = Cell::new(5); @@ -466,7 +462,7 @@ impl Cell { /// assert_eq!(five, 5); /// assert_eq!(c.into_inner(), 0); /// ``` - #[unstable(feature = "move_cell", issue = "39264")] + #[stable(feature = "move_cell", since = "1.17.0")] pub fn take(&self) -> T { self.replace(Default::default()) } diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index b43fe757d8473..cb39796eecd7d 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -255,8 +255,6 @@ impl Ordering { /// # Examples /// /// ``` - /// #![feature(ordering_chaining)] - /// /// use std::cmp::Ordering; /// /// let result = Ordering::Equal.then(Ordering::Less); @@ -278,7 +276,7 @@ impl Ordering { /// assert_eq!(result, Ordering::Less); /// ``` #[inline] - #[unstable(feature = "ordering_chaining", issue = "37053")] + #[stable(feature = "ordering_chaining", since = "1.17.0")] pub fn then(self, other: Ordering) -> Ordering { match self { Equal => other, @@ -294,8 +292,6 @@ impl Ordering { /// # Examples /// /// ``` - /// #![feature(ordering_chaining)] - /// /// use std::cmp::Ordering; /// /// let result = Ordering::Equal.then_with(|| Ordering::Less); @@ -317,7 +313,7 @@ impl Ordering { /// assert_eq!(result, Ordering::Less); /// ``` #[inline] - #[unstable(feature = "ordering_chaining", issue = "37053")] + #[stable(feature = "ordering_chaining", since = "1.17.0")] pub fn then_with Ordering>(self, f: F) -> Ordering { match self { Equal => f(), diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 1657342ff6ac6..0bfab92fa5d51 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -1045,6 +1045,7 @@ impl<'a> Formatter<'a> { // is zero Some(min) if self.sign_aware_zero_pad() => { self.fill = '0'; + self.align = rt::v1::Alignment::Right; write_prefix(self)?; self.with_padding(min - width, rt::v1::Alignment::Right, |f| { f.buf.write_str(buf) @@ -1153,8 +1154,9 @@ impl<'a> Formatter<'a> { // for the sign-aware zero padding, we render the sign first and // behave as if we had no sign from the beginning. let mut formatted = formatted.clone(); - let mut align = self.align; let old_fill = self.fill; + let old_align = self.align; + let mut align = old_align; if self.sign_aware_zero_pad() { // a sign always goes first let sign = unsafe { str::from_utf8_unchecked(formatted.sign) }; @@ -1165,6 +1167,7 @@ impl<'a> Formatter<'a> { width = if width < sign.len() { 0 } else { width - sign.len() }; align = rt::v1::Alignment::Right; self.fill = '0'; + self.align = rt::v1::Alignment::Right; } // remaining parts go through the ordinary padding process. @@ -1177,6 +1180,7 @@ impl<'a> Formatter<'a> { }) }; self.fill = old_fill; + self.align = old_align; ret } else { // this is the common case and we take a shortcut diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 12410c08f399b..f8d067e9696fd 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1238,6 +1238,15 @@ extern "rust-intrinsic" { /// undefined behavior where y = 0 or x = `T::min_value()` and y = -1 pub fn unchecked_rem(x: T, y: T) -> T; + /// Performs an unchecked left shift, resulting in undefined behavior when + /// y < 0 or y >= N, where N is the width of T in bits. + #[cfg(not(stage0))] + pub fn unchecked_shl(x: T, y: T) -> T; + /// Performs an unchecked right shift, resulting in undefined behavior when + /// y < 0 or y >= N, where N is the width of T in bits. + #[cfg(not(stage0))] + pub fn unchecked_shr(x: T, y: T) -> T; + /// Returns (a + b) mod 2^N, where N is the width of T in bits. /// The stabilized versions of this intrinsic are available on the integer /// primitives via the `wrapping_add` method. For example, diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 81c80ba51152d..804baac3257db 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -177,7 +177,7 @@ macro_rules! checked_op { // `Int` + `SignedInt` implemented for signed integers macro_rules! int_impl { - ($ActualT:ident, $UnsignedT:ty, $BITS:expr, + ($SelfT:ty, $ActualT:ident, $UnsignedT:ty, $BITS:expr, $add_with_overflow:path, $sub_with_overflow:path, $mul_with_overflow:path) => { @@ -850,6 +850,17 @@ macro_rules! int_impl { /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] + #[cfg(not(stage0))] + pub fn wrapping_shl(self, rhs: u32) -> Self { + unsafe { + intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT) + } + } + + /// Stage 0 + #[stable(feature = "num_wrapping", since = "1.2.0")] + #[inline(always)] + #[cfg(stage0)] pub fn wrapping_shl(self, rhs: u32) -> Self { self.overflowing_shl(rhs).0 } @@ -875,6 +886,17 @@ macro_rules! int_impl { /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] + #[cfg(not(stage0))] + pub fn wrapping_shr(self, rhs: u32) -> Self { + unsafe { + intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT) + } + } + + /// Stage 0 + #[stable(feature = "num_wrapping", since = "1.2.0")] + #[inline(always)] + #[cfg(stage0)] pub fn wrapping_shr(self, rhs: u32) -> Self { self.overflowing_shr(rhs).0 } @@ -1089,6 +1111,15 @@ macro_rules! int_impl { /// ``` #[inline] #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(not(stage0))] + pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) { + (self.wrapping_shl(rhs), (rhs > ($BITS - 1))) + } + + /// Stage 0 + #[inline] + #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(stage0)] pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) { (self << (rhs & ($BITS - 1)), (rhs > ($BITS - 1))) } @@ -1111,6 +1142,15 @@ macro_rules! int_impl { /// ``` #[inline] #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(not(stage0))] + pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) { + (self.wrapping_shr(rhs), (rhs > ($BITS - 1))) + } + + /// Stage 0 + #[inline] + #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(stage0)] pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) { (self >> (rhs & ($BITS - 1)), (rhs > ($BITS - 1))) } @@ -1268,7 +1308,7 @@ macro_rules! int_impl { #[lang = "i8"] impl i8 { - int_impl! { i8, u8, 8, + int_impl! { i8, i8, u8, 8, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1276,7 +1316,7 @@ impl i8 { #[lang = "i16"] impl i16 { - int_impl! { i16, u16, 16, + int_impl! { i16, i16, u16, 16, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1284,7 +1324,7 @@ impl i16 { #[lang = "i32"] impl i32 { - int_impl! { i32, u32, 32, + int_impl! { i32, i32, u32, 32, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1292,7 +1332,7 @@ impl i32 { #[lang = "i64"] impl i64 { - int_impl! { i64, u64, 64, + int_impl! { i64, i64, u64, 64, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1300,7 +1340,7 @@ impl i64 { #[lang = "i128"] impl i128 { - int_impl! { i128, u128, 128, + int_impl! { i128, i128, u128, 128, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1309,7 +1349,7 @@ impl i128 { #[cfg(target_pointer_width = "16")] #[lang = "isize"] impl isize { - int_impl! { i16, u16, 16, + int_impl! { isize, i16, u16, 16, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1318,7 +1358,7 @@ impl isize { #[cfg(target_pointer_width = "32")] #[lang = "isize"] impl isize { - int_impl! { i32, u32, 32, + int_impl! { isize, i32, u32, 32, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1327,7 +1367,7 @@ impl isize { #[cfg(target_pointer_width = "64")] #[lang = "isize"] impl isize { - int_impl! { i64, u64, 64, + int_impl! { isize, i64, u64, 64, intrinsics::add_with_overflow, intrinsics::sub_with_overflow, intrinsics::mul_with_overflow } @@ -1335,7 +1375,7 @@ impl isize { // `Int` + `UnsignedInt` implemented for unsigned integers macro_rules! uint_impl { - ($ActualT:ty, $BITS:expr, + ($SelfT:ty, $ActualT:ty, $BITS:expr, $ctpop:path, $ctlz:path, $cttz:path, @@ -1978,6 +2018,17 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] + #[cfg(not(stage0))] + pub fn wrapping_shl(self, rhs: u32) -> Self { + unsafe { + intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT) + } + } + + /// Stage 0 + #[stable(feature = "num_wrapping", since = "1.2.0")] + #[inline(always)] + #[cfg(stage0)] pub fn wrapping_shl(self, rhs: u32) -> Self { self.overflowing_shl(rhs).0 } @@ -2003,6 +2054,17 @@ macro_rules! uint_impl { /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] + #[cfg(not(stage0))] + pub fn wrapping_shr(self, rhs: u32) -> Self { + unsafe { + intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT) + } + } + + /// Stage 0 + #[stable(feature = "num_wrapping", since = "1.2.0")] + #[inline(always)] + #[cfg(stage0)] pub fn wrapping_shr(self, rhs: u32) -> Self { self.overflowing_shr(rhs).0 } @@ -2170,6 +2232,15 @@ macro_rules! uint_impl { /// ``` #[inline] #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(not(stage0))] + pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) { + (self.wrapping_shl(rhs), (rhs > ($BITS - 1))) + } + + /// Stage 0 + #[inline] + #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(stage0)] pub fn overflowing_shl(self, rhs: u32) -> (Self, bool) { (self << (rhs & ($BITS - 1)), (rhs > ($BITS - 1))) } @@ -2192,6 +2263,16 @@ macro_rules! uint_impl { /// ``` #[inline] #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(not(stage0))] + pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) { + (self.wrapping_shr(rhs), (rhs > ($BITS - 1))) + + } + + /// Stage 0 + #[inline] + #[stable(feature = "wrapping", since = "1.7.0")] + #[cfg(stage0)] pub fn overflowing_shr(self, rhs: u32) -> (Self, bool) { (self >> (rhs & ($BITS - 1)), (rhs > ($BITS - 1))) } @@ -2292,7 +2373,7 @@ macro_rules! uint_impl { #[lang = "u8"] impl u8 { - uint_impl! { u8, 8, + uint_impl! { u8, u8, 8, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2304,7 +2385,7 @@ impl u8 { #[lang = "u16"] impl u16 { - uint_impl! { u16, 16, + uint_impl! { u16, u16, 16, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2316,7 +2397,7 @@ impl u16 { #[lang = "u32"] impl u32 { - uint_impl! { u32, 32, + uint_impl! { u32, u32, 32, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2328,7 +2409,7 @@ impl u32 { #[lang = "u64"] impl u64 { - uint_impl! { u64, 64, + uint_impl! { u64, u64, 64, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2340,7 +2421,7 @@ impl u64 { #[lang = "u128"] impl u128 { - uint_impl! { u128, 128, + uint_impl! { u128, u128, 128, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2353,7 +2434,7 @@ impl u128 { #[cfg(target_pointer_width = "16")] #[lang = "usize"] impl usize { - uint_impl! { u16, 16, + uint_impl! { usize, u16, 16, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2365,7 +2446,7 @@ impl usize { #[cfg(target_pointer_width = "32")] #[lang = "usize"] impl usize { - uint_impl! { u32, 32, + uint_impl! { usize, u32, 32, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, @@ -2378,7 +2459,7 @@ impl usize { #[cfg(target_pointer_width = "64")] #[lang = "usize"] impl usize { - uint_impl! { u64, 64, + uint_impl! { usize, u64, 64, intrinsics::ctpop, intrinsics::ctlz, intrinsics::cttz, diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 0b7aa4fa9117c..909e44df20abb 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -161,8 +161,6 @@ pub unsafe fn read(src: *const T) -> T { /// Basic usage: /// /// ``` -/// #![feature(ptr_unaligned)] -/// /// let x = 12; /// let y = &x as *const i32; /// @@ -171,7 +169,7 @@ pub unsafe fn read(src: *const T) -> T { /// } /// ``` #[inline(always)] -#[unstable(feature = "ptr_unaligned", issue = "37955")] +#[stable(feature = "ptr_unaligned", since = "1.17.0")] pub unsafe fn read_unaligned(src: *const T) -> T { let mut tmp: T = mem::uninitialized(); copy_nonoverlapping(src as *const u8, @@ -243,8 +241,6 @@ pub unsafe fn write(dst: *mut T, src: T) { /// Basic usage: /// /// ``` -/// #![feature(ptr_unaligned)] -/// /// let mut x = 0; /// let y = &mut x as *mut i32; /// let z = 12; @@ -255,7 +251,7 @@ pub unsafe fn write(dst: *mut T, src: T) { /// } /// ``` #[inline] -#[unstable(feature = "ptr_unaligned", issue = "37955")] +#[stable(feature = "ptr_unaligned", since = "1.17.0")] pub unsafe fn write_unaligned(dst: *mut T, src: T) { copy_nonoverlapping(&src as *const T as *const u8, dst as *mut u8, @@ -662,7 +658,6 @@ impl Eq for *mut T {} /// # Examples /// /// ``` -/// #![feature(ptr_eq)] /// use std::ptr; /// /// let five = 5; @@ -677,7 +672,7 @@ impl Eq for *mut T {} /// assert!(ptr::eq(five_ref, same_five_ref)); /// assert!(!ptr::eq(five_ref, other_five_ref)); /// ``` -#[unstable(feature = "ptr_eq", reason = "newly added", issue = "36497")] +#[stable(feature = "ptr_eq", since = "1.17.0")] #[inline] pub fn eq(a: *const T, b: *const T) -> bool { a == b @@ -972,11 +967,19 @@ impl Shared { /// # Safety /// /// `ptr` must be non-null. - pub unsafe fn new(ptr: *mut T) -> Self { + pub unsafe fn new(ptr: *const T) -> Self { Shared { pointer: NonZero::new(ptr), _marker: PhantomData } } } +#[unstable(feature = "shared", issue = "27730")] +impl Shared { + /// Acquires the underlying pointer as a `*mut` pointer. + pub unsafe fn as_mut_ptr(&self) -> *mut T { + **self as _ + } +} + #[unstable(feature = "shared", issue = "27730")] impl Clone for Shared { fn clone(&self) -> Self { @@ -992,10 +995,10 @@ impl CoerceUnsized> for Shared where T: Unsiz #[unstable(feature = "shared", issue = "27730")] impl Deref for Shared { - type Target = *mut T; + type Target = *const T; #[inline] - fn deref(&self) -> &*mut T { + fn deref(&self) -> &*const T { unsafe { mem::transmute(&*self.pointer) } } } diff --git a/src/libcore/result.rs b/src/libcore/result.rs index a05db9b489ca1..00ff2fd2ce5ef 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -803,12 +803,11 @@ impl Result { /// Basic usage: /// /// ```{.should_panic} - /// # #![feature(result_expect_err)] /// let x: Result = Ok(10); /// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10` /// ``` #[inline] - #[unstable(feature = "result_expect_err", issue = "39041")] + #[stable(feature = "result_expect_err", since = "1.17.0")] pub fn expect_err(self, msg: &str) -> E { match self { Ok(t) => unwrap_failed(msg, t), diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs index e06b757691e5a..d84a1e227560e 100644 --- a/src/libcoretest/lib.rs +++ b/src/libcoretest/lib.rs @@ -23,7 +23,6 @@ #![feature(nonzero)] #![feature(rand)] #![feature(raw)] -#![feature(result_expect_err)] #![feature(sip_hash_13)] #![feature(slice_patterns)] #![feature(step_by)] @@ -31,9 +30,6 @@ #![feature(try_from)] #![feature(unicode)] #![feature(unique)] -#![feature(ordering_chaining)] -#![feature(ptr_unaligned)] -#![feature(move_cell)] #![feature(fmt_internals)] extern crate core; diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs index e904290957619..a6dad64125331 100644 --- a/src/libproc_macro_plugin/lib.rs +++ b/src/libproc_macro_plugin/lib.rs @@ -13,62 +13,64 @@ //! A library for procedural macro writers. //! //! ## Usage -//! This crate provides the `qquote!` macro for syntax creation. +//! This crate provides the `quote!` macro for syntax creation. //! -//! The `qquote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` +//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` //! at the crate root. This is a temporary solution until we have better hygiene. //! //! ## Quasiquotation //! //! The quasiquoter creates output that, when run, constructs the tokenstream specified as -//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will +//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will //! construct the TokenStream `5 | + | 5`. //! //! ### Unquoting //! -//! Unquoting is currently done as `unquote`, and works by taking the single next -//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works -//! fine, but `unquote foo` is also supported. +//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. +//! To quote `$` itself, use `$$`. //! -//! A simple example might be: +//! A simple example is: //! //!``` //!fn double(tmp: TokenStream) -> TokenStream { -//! qquote!(unquote(tmp) * 2) +//! quote!($tmp * 2) //!} //!``` //! -//! ### Large Example: Implementing Scheme's `cond` +//! ### Large example: Scheme's `cond` //! -//! Below is the full implementation of Scheme's `cond` operator. +//! Below is an example implementation of Scheme's `cond`. //! //! ``` -//! fn cond_rec(input: TokenStream) -> TokenStream { -//! if input.is_empty() { return quote!(); } -//! -//! let next = input.slice(0..1); -//! let rest = input.slice_from(1..); -//! -//! let clause : TokenStream = match next.maybe_delimited() { -//! Some(ts) => ts, -//! _ => panic!("Invalid input"), -//! }; -//! -//! // clause is ([test]) [rhs] -//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } -//! -//! let test: TokenStream = clause.slice(0..1); -//! let rhs: TokenStream = clause.slice_from(1..); -//! -//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { -//! quote!({unquote(rhs)}) -//! } else { -//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) -//! } +//! fn cond(input: TokenStream) -> TokenStream { +//! let mut conds = Vec::new(); +//! let mut input = input.trees().peekable(); +//! while let Some(tree) = input.next() { +//! let mut cond = match tree { +//! TokenTree::Delimited(_, ref delimited) => delimited.stream(), +//! _ => panic!("Invalid input"), +//! }; +//! let mut trees = cond.trees(); +//! let test = trees.next(); +//! let rhs = trees.collect::(); +//! if rhs.is_empty() { +//! panic!("Invalid macro usage in cond: {}", cond); +//! } +//! let is_else = match test { +//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, +//! _ => false, +//! }; +//! conds.push(if is_else || input.peek().is_none() { +//! quote!({ $rhs }) +//! } else { +//! let test = test.unwrap(); +//! quote!(if $test { $rhs } else) +//! }); +//! } +//! +//! conds.into_iter().collect() //! } //! ``` -//! - #![crate_name = "proc_macro_plugin"] #![unstable(feature = "rustc_private", issue = "27812")] #![feature(plugin_registrar)] @@ -87,8 +89,8 @@ extern crate rustc_plugin; extern crate syntax; extern crate syntax_pos; -mod qquote; -use qquote::qquote; +mod quote; +use quote::quote; use rustc_plugin::Registry; use syntax::ext::base::SyntaxExtension; @@ -99,6 +101,6 @@ use syntax::symbol::Symbol; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("qquote"), - SyntaxExtension::ProcMacro(Box::new(qquote))); + reg.register_syntax_extension(Symbol::intern("quote"), + SyntaxExtension::ProcMacro(Box::new(quote))); } diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/quote.rs similarity index 86% rename from src/libproc_macro_plugin/qquote.rs rename to src/libproc_macro_plugin/quote.rs index 0276587ed52b1..ad71584b61a0f 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/quote.rs @@ -19,7 +19,7 @@ use syntax_pos::DUMMY_SP; use std::iter; -pub fn qquote<'cx>(stream: TokenStream) -> TokenStream { +pub fn quote<'cx>(stream: TokenStream) -> TokenStream { stream.quote() } @@ -72,28 +72,32 @@ impl Quote for TokenStream { return quote!(::syntax::tokenstream::TokenStream::empty()); } - struct Quote(iter::Peekable); + struct Quoter(iter::Peekable); - impl Iterator for Quote { + impl Iterator for Quoter { type Item = TokenStream; fn next(&mut self) -> Option { - let is_unquote = match self.0.peek() { - Some(&TokenTree::Token(_, Token::Ident(ident))) if ident.name == "unquote" => { - self.0.next(); - true + let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() { + self.0.next(); + match self.0.next() { + Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()), + Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()), + // FIXME(jseyfried): improve these diagnostics + Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"), + None => panic!("unexpected trailing `$` in `quote!`"), } - _ => false, + } else { + self.0.next().as_ref().map(Quote::quote) }; - self.0.next().map(|tree| { - let quoted_tree = if is_unquote { tree.into() } else { tree.quote() }; + quoted_tree.map(|quoted_tree| { quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) }) } } - let quoted = Quote(self.trees().peekable()).collect::(); + let quoted = Quoter(self.trees().peekable()).collect::(); quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) } } diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 6f5f548aa7802..54ae947214091 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -120,11 +120,12 @@ impl<'a> CheckAttrVisitor<'a> { } fn check_attribute(&self, attr: &ast::Attribute, target: Target) { - let name: &str = &attr.name().as_str(); - match name { - "inline" => self.check_inline(attr, target), - "repr" => self.check_repr(attr, target), - _ => (), + if let Some(name) = attr.name() { + match &*name.as_str() { + "inline" => self.check_inline(attr, target), + "repr" => self.check_repr(attr, target), + _ => (), + } } } } diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index aedb8fef2885c..7bab4a8d725dc 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -12,6 +12,7 @@ use hir::def_id::DefId; use util::nodemap::NodeMap; use syntax::ast; use syntax::ext::base::MacroKind; +use syntax_pos::Span; use hir; #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] @@ -116,6 +117,7 @@ pub type ExportMap = NodeMap>; pub struct Export { pub name: ast::Name, // The name of the target. pub def: Def, // The definition of the target. + pub span: Span, // The span of the target definition. } impl CtorKind { diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index d92eaee6f0c8e..d966f4899e50f 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -1296,7 +1296,7 @@ impl<'a> LoweringContext<'a> { let attrs = self.lower_attrs(&i.attrs); let mut vis = self.lower_visibility(&i.vis); if let ItemKind::MacroDef(ref tts) = i.node { - if i.attrs.iter().any(|attr| attr.name() == "macro_export") { + if i.attrs.iter().any(|attr| attr.path == "macro_export") { self.exported_macros.push(hir::MacroDef { name: name, attrs: attrs, id: i.id, span: i.span, body: tts.clone().into(), }); diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 708a74c791af7..843f3a53f33e5 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -408,14 +408,14 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec Vec> { let mut out = vec![]; - let level = match Level::from_str(&attr.name().as_str()) { + let level = match attr.name().and_then(|name| Level::from_str(&name.as_str())) { None => return out, Some(lvl) => lvl, }; + let meta = unwrap_or!(attr.meta(), return out); attr::mark_used(attr); - let meta = &attr.value; let metas = if let Some(metas) = meta.meta_item_list() { metas } else { diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index b0c85e2ef4cd4..0cf53826dd427 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -195,6 +195,21 @@ pub struct cmt_<'tcx> { pub type cmt<'tcx> = Rc>; impl<'tcx> cmt_<'tcx> { + pub fn get_def(&self) -> Option { + match self.cat { + Categorization::Deref(ref cmt, ..) | + Categorization::Interior(ref cmt, _) | + Categorization::Downcast(ref cmt, _) => { + if let Categorization::Local(nid) = cmt.cat { + Some(nid) + } else { + None + } + } + _ => None + } + } + pub fn get_field(&self, name: ast::Name) -> Option { match self.cat { Categorization::Deref(ref cmt, ..) | @@ -843,11 +858,10 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { let promotable = self.tcx().rvalue_promotable_to_static.borrow().get(&id).cloned() .unwrap_or(false); - // Only promote `[T; 0]` before an RFC for rvalue promotions - // is accepted. + // When the corresponding feature isn't toggled, only promote `[T; 0]`. let promotable = match expr_ty.sty { ty::TyArray(_, 0) => true, - _ => promotable & false + _ => promotable && self.tcx().sess.features.borrow().rvalue_static_promotion, }; // Compute maximum lifetime of this rvalue. This is 'static if diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index baa22d7061435..1fb5371402574 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -197,7 +197,7 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { } else { // Emit errors for non-staged-api crates. for attr in attrs { - let tag = attr.name(); + let tag = unwrap_or!(attr.name(), continue); if tag == "unstable" || tag == "stable" || tag == "rustc_deprecated" { attr::mark_used(attr); self.tcx.sess.span_err(attr.span(), "stability attributes may not be used \ @@ -402,7 +402,7 @@ impl<'a, 'tcx> Index<'tcx> { let mut is_staged_api = false; for attr in &krate.attrs { - if attr.name() == "stable" || attr.name() == "unstable" { + if attr.path == "stable" || attr.path == "unstable" { is_staged_api = true; break } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index d7a765fb82215..62be2cdbcc42c 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -636,6 +636,8 @@ macro_rules! options { Some("either `panic` or `abort`"); pub const parse_sanitizer: Option<&'static str> = Some("one of: `address`, `leak`, `memory` or `thread`"); + pub const parse_optimization_fuel: Option<&'static str> = + Some("crate=integer"); } #[allow(dead_code)] @@ -772,6 +774,21 @@ macro_rules! options { } true } + + fn parse_optimization_fuel(slot: &mut Option<(String, u64)>, v: Option<&str>) -> bool { + match v { + None => false, + Some(s) => { + let parts = s.split('=').collect::>(); + if parts.len() != 2 { return false; } + let crate_name = parts[0].to_string(); + let fuel = parts[1].parse::(); + if fuel.is_err() { return false; } + *slot = Some((crate_name, fuel.unwrap())); + true + } + } + } } ) } @@ -974,6 +991,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "pass `-install_name @rpath/...` to the macOS linker"), sanitizer: Option = (None, parse_sanitizer, [TRACKED], "Use a sanitizer"), + fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED], + "Set the optimization fuel quota for a crate."), + print_fuel: Option = (None, parse_opt_string, [TRACKED], + "Make Rustc print the total optimization fuel used by a crate."), } pub fn default_lib_output() -> CrateType { @@ -1766,11 +1787,13 @@ mod dep_tracking { impl_dep_tracking_hash_via_hash!(bool); impl_dep_tracking_hash_via_hash!(usize); + impl_dep_tracking_hash_via_hash!(u64); impl_dep_tracking_hash_via_hash!(String); impl_dep_tracking_hash_via_hash!(lint::Level); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); + impl_dep_tracking_hash_via_hash!(Option<(String, u64)>); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); @@ -1792,6 +1815,7 @@ mod dep_tracking { impl_dep_tracking_hash_for_sortable_vec_of!((String, lint::Level)); impl_dep_tracking_hash_for_sortable_vec_of!((String, Option, Option)); + impl_dep_tracking_hash_for_sortable_vec_of!((String, u64)); impl DepTrackingHash for SearchPaths { fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) { let mut elems: Vec<_> = self diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 3ba82f34c3266..072301eeda5aa 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -123,6 +123,20 @@ pub struct Session { pub code_stats: RefCell, next_node_id: Cell, + + /// If -zfuel=crate=n is specified, Some(crate). + optimization_fuel_crate: Option, + /// If -zfuel=crate=n is specified, initially set to n. Otherwise 0. + optimization_fuel_limit: Cell, + /// We're rejecting all further optimizations. + out_of_fuel: Cell, + + // The next two are public because the driver needs to read them. + + /// If -zprint-fuel=crate, Some(crate). + pub print_fuel_crate: Option, + /// Always set to zero and incremented so that we can print fuel expended by a crate. + pub print_fuel: Cell, } pub struct PerfStats { @@ -504,6 +518,33 @@ impl Session { println!("Total time spent decoding DefPath tables: {}", duration_to_secs_str(self.perf_stats.decode_def_path_tables_time.get())); } + + /// We want to know if we're allowed to do an optimization for crate foo from -z fuel=foo=n. + /// This expends fuel if applicable, and records fuel if applicable. + pub fn consider_optimizing String>(&self, crate_name: &str, msg: T) -> bool { + let mut ret = true; + match self.optimization_fuel_crate { + Some(ref c) if c == crate_name => { + let fuel = self.optimization_fuel_limit.get(); + ret = fuel != 0; + if fuel == 0 && !self.out_of_fuel.get(){ + println!("optimization-fuel-exhausted: {}", msg()); + self.out_of_fuel.set(true); + } + else if fuel > 0{ + self.optimization_fuel_limit.set(fuel-1); + } + } + _ => {} + } + match self.print_fuel_crate { + Some(ref c) if c == crate_name=> { + self.print_fuel.set(self.print_fuel.get()+1); + }, + _ => {} + } + ret + } } pub fn build_session(sopts: config::Options, @@ -599,6 +640,12 @@ pub fn build_session_(sopts: config::Options, } ); + let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone()); + let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref() + .map(|i| i.1).unwrap_or(0)); + let print_fuel_crate = sopts.debugging_opts.print_fuel.clone(); + let print_fuel = Cell::new(0); + let sess = Session { dep_graph: dep_graph.clone(), target: target_cfg, @@ -640,6 +687,11 @@ pub fn build_session_(sopts: config::Options, decode_def_path_tables_time: Cell::new(Duration::from_secs(0)), }, code_stats: RefCell::new(CodeStats::new()), + optimization_fuel_crate: optimization_fuel_crate, + optimization_fuel_limit: optimization_fuel_limit, + print_fuel_crate: print_fuel_crate, + print_fuel: print_fuel, + out_of_fuel: Cell::new(false), }; init_llvm(&sess); diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 0e5c786cd8dcf..27525d550ff20 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -274,7 +274,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { .filter(|a| a.check_name("rustc_on_unimplemented")) .next() { - let err_sp = item.meta().span.substitute_dummy(span); + let err_sp = item.span.substitute_dummy(span); let trait_str = self.tcx.item_path_str(trait_ref.def_id); if let Some(istring) = item.value_str() { let istring = &*istring.as_str(); diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index a0aeb4107c156..47801c3921d2a 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -728,6 +728,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { ast_ty_to_ty_cache: RefCell::new(NodeMap()), }, f) } + + pub fn consider_optimizing String>(&self, msg: T) -> bool { + let cname = self.crate_name(LOCAL_CRATE).as_str(); + self.sess.consider_optimizing(&cname, msg) + } } impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> { diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 123db6e89476c..ade67e3efbe07 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -555,7 +555,6 @@ enum StructKind { } impl<'a, 'gcx, 'tcx> Struct { - // FIXME(camlorn): reprs need a better representation to deal with multiple reprs on one type. fn new(dl: &TargetDataLayout, fields: &Vec<&'a Layout>, repr: &ReprOptions, kind: StructKind, scapegoat: Ty<'gcx>) -> Result> { @@ -573,12 +572,8 @@ impl<'a, 'gcx, 'tcx> Struct { // Neither do 1-member and 2-member structs. // In addition, code in trans assume that 2-element structs can become pairs. // It's easier to just short-circuit here. - let mut can_optimize = (fields.len() > 2 || StructKind::EnumVariant == kind) - && ! (repr.c || repr.packed); - - // Disable field reordering until we can decide what to do. - // The odd pattern here avoids a warning about the value never being read. - if can_optimize { can_optimize = false; } + let can_optimize = (fields.len() > 2 || StructKind::EnumVariant == kind) + && ! (repr.c || repr.packed || repr.linear || repr.simd); let (optimize, sort_ascending) = match kind { StructKind::AlwaysSizedUnivariant => (can_optimize, false), diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 3c37c7353d683..c2d977a793bba 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -1376,6 +1376,8 @@ pub struct ReprOptions { pub packed: bool, pub simd: bool, pub int: Option, + // Internal only for now. If true, don't reorder fields. + pub linear: bool, } impl ReprOptions { @@ -1398,6 +1400,9 @@ impl ReprOptions { ret.simd = true; } + // This is here instead of layout because the choice must make it into metadata. + ret.linear = !tcx.consider_optimizing(|| format!("Reorder fields of {:?}", + tcx.item_path_str(did))); ret } diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index b441a231874a6..20d495976b05f 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -662,6 +662,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { pub fn bckerr_to_diag(&self, err: &BckError<'tcx>) -> DiagnosticBuilder<'a> { let span = err.span.clone(); let mut immutable_field = None; + let mut local_def = None; let msg = &match err.code { err_mutbl => { @@ -711,6 +712,14 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } None }); + local_def = err.cmt.get_def() + .and_then(|nid| { + if !self.tcx.hir.is_argument(nid) { + Some(self.tcx.hir.span(nid)) + } else { + None + } + }); format!("cannot borrow {} as mutable", descr) } @@ -741,6 +750,11 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { if let Some((span, msg)) = immutable_field { db.span_label(span, &msg); } + if let Some(let_span) = local_def { + if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(let_span) { + db.span_label(let_span, &format!("consider changing this to `mut {}`", snippet)); + } + } db } @@ -1109,6 +1123,11 @@ before rustc 1.16, this temporary lived longer - see issue #39283 \ } else { db.span_label(*error_span, &format!("cannot borrow mutably")); } + } else if let Categorization::Interior(ref cmt, _) = err.cmt.cat { + if let mc::MutabilityCategory::McImmutable = cmt.mutbl { + db.span_label(*error_span, + &"cannot mutably borrow immutable field"); + } } } } diff --git a/src/librustc_data_structures/array_vec.rs b/src/librustc_data_structures/array_vec.rs index 51e6e09ab5003..29fbcb70756ba 100644 --- a/src/librustc_data_structures/array_vec.rs +++ b/src/librustc_data_structures/array_vec.rs @@ -248,7 +248,7 @@ impl<'a, A: Array> Drop for Drain<'a, A> { if self.tail_len > 0 { unsafe { - let source_array_vec = &mut **self.array_vec; + let source_array_vec = &mut *self.array_vec.as_mut_ptr(); // memmove back untouched tail, update to new length let start = source_array_vec.len(); let tail = self.tail_start; @@ -317,4 +317,3 @@ impl Default for ManuallyDrop { ManuallyDrop::new() } } - diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index f278325ebec74..8ecfd75dc95a9 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -27,7 +27,6 @@ #![feature(shared)] #![feature(collections_range)] -#![feature(collections_bound)] #![cfg_attr(stage0,feature(field_init_shorthand))] #![feature(nonzero)] #![feature(rustc_private)] diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 62d7512655728..d30183f01f54b 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -514,6 +514,14 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { control.make_glob_map = resolve::MakeGlobMap::Yes; } + if sess.print_fuel_crate.is_some() { + control.compilation_done.callback = box |state| { + let sess = state.session; + println!("Fuel used by {}: {}", + sess.print_fuel_crate.as_ref().unwrap(), + sess.print_fuel.get()); + } + } control } } diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index d0eedcac0c06a..fac49b29598aa 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -18,16 +18,15 @@ use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; use syntax::parse::token; -use syntax::symbol::{Symbol, InternedString}; +use syntax::symbol::InternedString; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax::tokenstream; use rustc::hir; use rustc::hir::*; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; -use rustc::hir::intravisit as visit; +use rustc::hir::intravisit::{self as visit, Visitor}; use rustc::ty::TyCtxt; -use rustc_data_structures::fnv; use std::hash::{Hash, Hasher}; use super::def_path_hash::DefPathHashes; @@ -559,7 +558,7 @@ macro_rules! hash_span { }); } -impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> { +impl<'a, 'hash, 'tcx> Visitor<'tcx> for StrictVersionHashVisitor<'a, 'hash, 'tcx> { fn nested_visit_map<'this>(&'this mut self) -> visit::NestedVisitorMap<'this, 'tcx> { if self.hash_bodies { visit::NestedVisitorMap::OnlyBodies(&self.tcx.hir) @@ -960,50 +959,24 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { } } - fn hash_meta_item(&mut self, meta_item: &ast::MetaItem) { - debug!("hash_meta_item: st={:?}", self.st); - - // ignoring span information, it doesn't matter here - self.hash_discriminant(&meta_item.node); - meta_item.name.as_str().len().hash(self.st); - meta_item.name.as_str().hash(self.st); - - match meta_item.node { - ast::MetaItemKind::Word => {} - ast::MetaItemKind::NameValue(ref lit) => saw_lit(lit).hash(self.st), - ast::MetaItemKind::List(ref items) => { - // Sort subitems so the hash does not depend on their order - let indices = self.indices_sorted_by(&items, |p| { - (p.name().map(Symbol::as_str), fnv::hash(&p.literal().map(saw_lit))) - }); - items.len().hash(self.st); - for (index, &item_index) in indices.iter().enumerate() { - index.hash(self.st); - let nested_meta_item: &ast::NestedMetaItemKind = &items[item_index].node; - self.hash_discriminant(nested_meta_item); - match *nested_meta_item { - ast::NestedMetaItemKind::MetaItem(ref meta_item) => { - self.hash_meta_item(meta_item); - } - ast::NestedMetaItemKind::Literal(ref lit) => { - saw_lit(lit).hash(self.st); - } - } - } - } - } - } - pub fn hash_attributes(&mut self, attributes: &[ast::Attribute]) { debug!("hash_attributes: st={:?}", self.st); let indices = self.indices_sorted_by(attributes, |attr| attr.name()); for i in indices { let attr = &attributes[i]; - if !attr.is_sugared_doc && - !IGNORED_ATTRIBUTES.contains(&&*attr.value.name().as_str()) { + match attr.name() { + Some(name) if IGNORED_ATTRIBUTES.contains(&&*name.as_str()) => continue, + _ => {} + }; + if !attr.is_sugared_doc { SawAttribute(attr.style).hash(self.st); - self.hash_meta_item(&attr.value); + for segment in &attr.path.segments { + SawIdent(segment.identifier.name.as_str()).hash(self.st); + } + for tt in attr.tokens.trees() { + self.hash_token_tree(&tt); + } } } } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 156f8b9e7c489..929249df0b173 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -104,9 +104,9 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> { impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode { - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(LABEL) { - let value = expect_associated_value(self.tcx, item); + let value = expect_associated_value(self.tcx, &item); match DepNode::from_label_string(&value.as_str(), def_id) { Ok(def_id) => return def_id, Err(()) => { @@ -331,9 +331,9 @@ fn check_config(tcx: TyCtxt, attr: &Attribute) -> bool { debug!("check_config(attr={:?})", attr); let config = &tcx.sess.parse_sess.config; debug!("check_config: config={:?}", config); - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(CFG) { - let value = expect_associated_value(tcx, item); + let value = expect_associated_value(tcx, &item); debug!("check_config: searching for cfg {:?}", value); return config.contains(&(value, None)); } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 58336f939d122..f0276f90f274d 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -312,7 +312,7 @@ impl MissingDoc { } } - let has_doc = attrs.iter().any(|a| a.is_value_str() && a.name() == "doc"); + let has_doc = attrs.iter().any(|a| a.is_value_str() && a.check_name("doc")); if !has_doc { cx.span_lint(MISSING_DOCS, sp, @@ -635,7 +635,7 @@ impl LintPass for DeprecatedAttr { impl EarlyLintPass for DeprecatedAttr { fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) { - let name = attr.name(); + let name = unwrap_or!(attr.name(), return); for &&(n, _, ref g) in &self.depr_attrs { if name == n { if let &AttributeGate::Gated(Stability::Deprecated(link), @@ -1121,8 +1121,8 @@ impl LintPass for UnstableFeatures { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnstableFeatures { fn check_attribute(&mut self, ctx: &LateContext, attr: &ast::Attribute) { - if attr.meta().check_name("feature") { - if let Some(items) = attr.meta().meta_item_list() { + if attr.check_name("feature") { + if let Some(items) = attr.meta_item_list() { for item in items { ctx.span_lint(UNSTABLE_FEATURES, item.span(), "unstable feature"); } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index 443a219928f1c..05dbbc0987025 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -38,6 +38,7 @@ #![feature(slice_patterns)] #![feature(staged_api)] +#[macro_use] extern crate syntax; #[macro_use] extern crate rustc; diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index f9b7c68587678..abba8afd9da86 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -269,6 +269,7 @@ impl LintPass for UnusedAttributes { impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes { fn check_attribute(&mut self, cx: &LateContext, attr: &ast::Attribute) { debug!("checking attribute: {:?}", attr); + let name = unwrap_or!(attr.name(), return); // Note that check_name() marks the attribute as used if it matches. for &(ref name, ty, _) in BUILTIN_ATTRIBUTES { @@ -294,13 +295,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for UnusedAttributes { cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute"); // Is it a builtin attribute that must be used at the crate level? let known_crate = BUILTIN_ATTRIBUTES.iter() - .find(|&&(name, ty, _)| attr.name() == name && ty == AttributeType::CrateLevel) + .find(|&&(builtin, ty, _)| name == builtin && ty == AttributeType::CrateLevel) .is_some(); // Has a plugin registered this attribute as one which must be used at // the crate level? let plugin_crate = plugin_attributes.iter() - .find(|&&(ref x, t)| attr.name() == &**x && AttributeType::CrateLevel == t) + .find(|&&(ref x, t)| name == &**x && AttributeType::CrateLevel == t) .is_some(); if known_crate || plugin_crate { let msg = match attr.style { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 63c14a0035f1a..5af4db6041115 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -973,9 +973,11 @@ impl<'a> CrateLoader<'a> { impl<'a> CrateLoader<'a> { pub fn preprocess(&mut self, krate: &ast::Crate) { - for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") { - if let Some(linkarg) = attr.value_str() { - self.cstore.add_used_link_args(&linkarg.as_str()); + for attr in &krate.attrs { + if attr.path == "link_args" { + if let Some(linkarg) = attr.value_str() { + self.cstore.add_used_link_args(&linkarg.as_str()); + } } } } diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index bb30245df5f56..17a6a706e0aaa 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -269,9 +269,12 @@ impl CrateMetadata { } pub fn is_staged_api(&self) -> bool { - self.get_item_attrs(CRATE_DEF_INDEX) - .iter() - .any(|attr| attr.name() == "stable" || attr.name() == "unstable") + for attr in self.get_item_attrs(CRATE_DEF_INDEX) { + if attr.path == "stable" || attr.path == "unstable" { + return true; + } + } + false } pub fn is_allocator(&self) -> bool { diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index b4b9966cbe47b..c2ad598b0c503 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -683,7 +683,7 @@ impl<'a, 'tcx> CrateMetadata { }, ext.kind() ); - callback(def::Export { name: name, def: def }); + callback(def::Export { name: name, def: def, span: DUMMY_SP }); } } return @@ -720,6 +720,7 @@ impl<'a, 'tcx> CrateMetadata { callback(def::Export { def: def, name: self.item_name(child_index), + span: self.entry(child_index).span.decode(self), }); } } @@ -732,12 +733,10 @@ impl<'a, 'tcx> CrateMetadata { } let def_key = self.def_key(child_index); + let span = child.span.decode(self); if let (Some(def), Some(name)) = (self.get_def(child_index), def_key.disambiguated_data.data.get_opt_name()) { - callback(def::Export { - def: def, - name: name, - }); + callback(def::Export { def: def, name: name, span: span }); // For non-reexport structs and variants add their constructors to children. // Reexport lists automatically contain constructors when necessary. match def { @@ -745,10 +744,7 @@ impl<'a, 'tcx> CrateMetadata { if let Some(ctor_def_id) = self.get_struct_ctor_def_id(child_index) { let ctor_kind = self.get_ctor_kind(child_index); let ctor_def = Def::StructCtor(ctor_def_id, ctor_kind); - callback(def::Export { - def: ctor_def, - name: name, - }); + callback(def::Export { def: ctor_def, name: name, span: span }); } } Def::Variant(def_id) => { @@ -756,10 +752,7 @@ impl<'a, 'tcx> CrateMetadata { // value namespace, they are reserved for possible future use. let ctor_kind = self.get_ctor_kind(child_index); let ctor_def = Def::VariantCtor(def_id, ctor_kind); - callback(def::Export { - def: ctor_def, - name: name, - }); + callback(def::Export { def: ctor_def, name: name, span: span }); } _ => {} } diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 0933fdfd357cd..8c45a66694533 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -241,12 +241,10 @@ impl<'a> Visitor<'a> for AstValidator<'a> { ItemKind::Mod(_) => { // Ensure that `path` attributes on modules are recorded as used (c.f. #35584). attr::first_attr_value_str_by_name(&item.attrs, "path"); - if let Some(attr) = - item.attrs.iter().find(|attr| attr.name() == "warn_directory_ownership") { + if item.attrs.iter().any(|attr| attr.check_name("warn_directory_ownership")) { let lint = lint::builtin::LEGACY_DIRECTORY_OWNERSHIP; let msg = "cannot declare a new module at this location"; self.session.add_lint(lint, item.id, item.span, msg.to_string()); - attr::mark_used(attr); } } ItemKind::Union(ref vdata, _) => { diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 03c61067d64c2..c33d5b9b6e16b 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -23,7 +23,7 @@ use {resolve_error, resolve_struct_error, ResolutionError}; use rustc::middle::cstore::LoadedMacro; use rustc::hir::def::*; -use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; +use rustc::hir::def_id::{CrateNum, BUILTIN_MACROS_CRATE, CRATE_DEF_INDEX, DefId}; use rustc::ty; use std::cell::Cell; @@ -496,6 +496,9 @@ impl<'a> Resolver<'a> { let def_id = self.macro_defs[&expansion]; if let Some(id) = self.definitions.as_local_node_id(def_id) { self.local_macro_def_scopes[&id] + } else if def_id.krate == BUILTIN_MACROS_CRATE { + // FIXME(jseyfried): This happens when `include!()`ing a `$crate::` path, c.f, #40469. + self.graph_root } else { let module_def_id = ty::DefIdTree::parent(&*self, def_id).unwrap(); self.get_extern_crate_root(module_def_id.krate) @@ -602,7 +605,7 @@ impl<'a> Resolver<'a> { let ident = Ident::with_empty_ctxt(name); let result = self.resolve_ident_in_module(module, ident, MacroNS, false, None); if let Ok(binding) = result { - self.macro_exports.push(Export { name: name, def: binding.def() }); + self.macro_exports.push(Export { name: name, def: binding.def(), span: span }); } else { span_err!(self.session, span, E0470, "reexported macro not found"); } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 104cc78597a4a..f832e0f9a4811 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1165,6 +1165,7 @@ pub struct Resolver<'a> { privacy_errors: Vec>, ambiguity_errors: Vec>, + gated_errors: FxHashSet, disallowed_shadowing: Vec<&'a LegacyBinding<'a>>, arenas: &'a ResolverArenas<'a>, @@ -1355,6 +1356,7 @@ impl<'a> Resolver<'a> { privacy_errors: Vec::new(), ambiguity_errors: Vec::new(), + gated_errors: FxHashSet(), disallowed_shadowing: Vec::new(), arenas: arenas, @@ -3359,8 +3361,9 @@ impl<'a> Resolver<'a> { if self.proc_macro_enabled { return; } for attr in attrs { - let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| { - let ident = Ident::with_empty_ctxt(attr.name()); + let name = unwrap_or!(attr.name(), continue); + let maybe_binding = self.builtin_macros.get(&name).cloned().or_else(|| { + let ident = Ident::with_empty_ctxt(name); self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok() }); diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index dbdf3a0b21e69..f72c2eadb04a8 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -28,8 +28,11 @@ use syntax::ext::placeholders::placeholder; use syntax::ext::tt::macro_rules; use syntax::feature_gate::{self, emit_feature_err, GateIssue}; use syntax::fold::{self, Folder}; +use syntax::parse::parser::PathStyle; +use syntax::parse::token::{self, Token}; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; +use syntax::tokenstream::{TokenStream, TokenTree, Delimited}; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{Span, DUMMY_SP}; @@ -179,12 +182,14 @@ impl<'a> base::Resolver for Resolver<'a> { fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec) -> Option { for i in 0..attrs.len() { + let name = unwrap_or!(attrs[i].name(), continue); + if self.session.plugin_attributes.borrow().iter() - .any(|&(ref attr_nm, _)| attrs[i].name() == &**attr_nm) { + .any(|&(ref attr_nm, _)| name == &**attr_nm) { attr::mark_known(&attrs[i]); } - match self.builtin_macros.get(&attrs[i].name()).cloned() { + match self.builtin_macros.get(&name).cloned() { Some(binding) => match *binding.get_macro(self) { MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { return Some(attrs.remove(i)) @@ -197,17 +202,25 @@ impl<'a> base::Resolver for Resolver<'a> { // Check for legacy derives for i in 0..attrs.len() { - if attrs[i].name() == "derive" { - let mut traits = match attrs[i].meta_item_list() { - Some(traits) if !traits.is_empty() => traits.to_owned(), - _ => continue, + let name = unwrap_or!(attrs[i].name(), continue); + + if name == "derive" { + let result = attrs[i].parse_list(&self.session.parse_sess, + |parser| parser.parse_path(PathStyle::Mod)); + let mut traits = match result { + Ok(traits) => traits, + Err(mut e) => { + e.cancel(); + continue + } }; for j in 0..traits.len() { - let legacy_name = Symbol::intern(&match traits[j].word() { - Some(..) => format!("derive_{}", traits[j].name().unwrap()), - None => continue, - }); + if traits[j].segments.len() > 1 { + continue + } + let trait_name = traits[j].segments[0].identifier.name; + let legacy_name = Symbol::intern(&format!("derive_{}", trait_name)); if !self.builtin_macros.contains_key(&legacy_name) { continue } @@ -216,18 +229,27 @@ impl<'a> base::Resolver for Resolver<'a> { if traits.is_empty() { attrs.remove(i); } else { - attrs[i].value = ast::MetaItem { - name: attrs[i].name(), - span: attrs[i].span, - node: ast::MetaItemKind::List(traits), - }; + let mut tokens = Vec::new(); + for (i, path) in traits.iter().enumerate() { + if i > 0 { + tokens.push(TokenTree::Token(attrs[i].span, Token::Comma).into()); + } + for (j, segment) in path.segments.iter().enumerate() { + if j > 0 { + tokens.push(TokenTree::Token(path.span, Token::ModSep).into()); + } + let tok = Token::Ident(segment.identifier); + tokens.push(TokenTree::Token(path.span, tok).into()); + } + } + attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited { + delim: token::Paren, + tts: TokenStream::concat(tokens).into(), + }).into(); } return Some(ast::Attribute { - value: ast::MetaItem { - name: legacy_name, - span: span, - node: ast::MetaItemKind::Word, - }, + path: ast::Path::from_ident(span, Ident::with_empty_ctxt(legacy_name)), + tokens: TokenStream::empty(), id: attr::mk_attr_id(), style: ast::AttrStyle::Outer, is_sugared_doc: false, @@ -267,28 +289,27 @@ impl<'a> Resolver<'a> { InvocationKind::Bang { ref mac, .. } => { return self.resolve_macro_to_def(scope, &mac.node.path, MacroKind::Bang, force); } - InvocationKind::Derive { name, span, .. } => { - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); - return self.resolve_macro_to_def(scope, &path, MacroKind::Derive, force); + InvocationKind::Derive { ref path, .. } => { + return self.resolve_macro_to_def(scope, path, MacroKind::Derive, force); } }; - let (attr_name, path) = { - let attr = attr.as_ref().unwrap(); - (attr.name(), ast::Path::from_ident(attr.span, Ident::with_empty_ctxt(attr.name()))) - }; - let mut determined = true; + let path = attr.as_ref().unwrap().path.clone(); + let mut determinacy = Determinacy::Determined; match self.resolve_macro_to_def(scope, &path, MacroKind::Attr, force) { Ok(def) => return Ok(def), - Err(Determinacy::Undetermined) => determined = false, + Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined, Err(Determinacy::Determined) if force => return Err(Determinacy::Determined), Err(Determinacy::Determined) => {} } - for &(name, span) in traits { - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); - match self.resolve_macro(scope, &path, MacroKind::Derive, force) { + let attr_name = match path.segments.len() { + 1 => path.segments[0].identifier.name, + _ => return Err(determinacy), + }; + for path in traits { + match self.resolve_macro(scope, path, MacroKind::Derive, force) { Ok(ext) => if let SyntaxExtension::ProcMacroDerive(_, ref inert_attrs) = *ext { if inert_attrs.contains(&attr_name) { // FIXME(jseyfried) Avoid `mem::replace` here. @@ -307,12 +328,12 @@ impl<'a> Resolver<'a> { } return Err(Determinacy::Undetermined); }, - Err(Determinacy::Undetermined) => determined = false, + Err(Determinacy::Undetermined) => determinacy = Determinacy::Undetermined, Err(Determinacy::Determined) => {} } } - Err(if determined { Determinacy::Determined } else { Determinacy::Undetermined }) + Err(determinacy) } fn resolve_macro_to_def(&mut self, scope: Mark, path: &ast::Path, kind: MacroKind, force: bool) @@ -331,7 +352,7 @@ impl<'a> Resolver<'a> { self.current_module = invocation.module.get(); if path.len() > 1 { - if !self.use_extern_macros { + if !self.use_extern_macros && self.gated_errors.insert(span) { let msg = "non-ident macro paths are experimental"; let feature = "use_extern_macros"; emit_feature_err(&self.session.parse_sess, feature, span, GateIssue::Language, msg); @@ -632,7 +653,7 @@ impl<'a> Resolver<'a> { if attr::contains_name(&item.attrs, "macro_export") { let def = Def::Macro(def_id, MacroKind::Bang); - self.macro_exports.push(Export { name: ident.name, def: def }); + self.macro_exports.push(Export { name: ident.name, def: def, span: item.span }); } } diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index dbc8bca548b76..2f4ac12cd7363 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -21,7 +21,7 @@ use rustc::ty; use rustc::lint::builtin::PRIVATE_IN_PUBLIC; use rustc::hir::def_id::DefId; use rustc::hir::def::*; -use rustc::util::nodemap::FxHashSet; +use rustc::util::nodemap::FxHashMap; use syntax::ast::{Ident, NodeId}; use syntax::ext::base::Determinacy::{self, Determined, Undetermined}; @@ -763,10 +763,11 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { *module.globs.borrow_mut() = Vec::new(); let mut reexports = Vec::new(); + let mut exported_macro_names = FxHashMap(); if module as *const _ == self.graph_root as *const _ { - let mut exported_macro_names = FxHashSet(); - for export in mem::replace(&mut self.macro_exports, Vec::new()).into_iter().rev() { - if exported_macro_names.insert(export.name) { + let macro_exports = mem::replace(&mut self.macro_exports, Vec::new()); + for export in macro_exports.into_iter().rev() { + if exported_macro_names.insert(export.name, export.span).is_none() { reexports.push(export); } } @@ -786,7 +787,17 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { if !def.def_id().is_local() { self.session.cstore.export_macros(def.def_id().krate); } - reexports.push(Export { name: ident.name, def: def }); + if let Def::Macro(..) = def { + if let Some(&span) = exported_macro_names.get(&ident.name) { + let msg = + format!("a macro named `{}` has already been exported", ident); + self.session.struct_span_err(span, &msg) + .span_label(span, &format!("`{}` already exported", ident)) + .span_note(binding.span, "previous macro export here") + .emit(); + } + } + reexports.push(Export { name: ident.name, def: def, span: binding.span }); } } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 61956e5cd9d66..6437533e8cd8a 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -1375,15 +1375,6 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'l> for DumpVisitor<'l, 'tcx, 'll, debug!("visit_expr {:?}", ex.node); self.process_macro_use(ex.span, ex.id); match ex.node { - ast::ExprKind::Call(ref _f, ref _args) => { - // Don't need to do anything for function calls, - // because just walking the callee path does what we want. - visit::walk_expr(self, ex); - } - ast::ExprKind::Path(_, ref path) => { - self.process_path(ex.id, path, None); - visit::walk_expr(self, ex); - } ast::ExprKind::Struct(ref path, ref fields, ref base) => { let hir_expr = self.save_ctxt.tcx.hir.expect_expr(ex.id); let adt = match self.save_ctxt.tables.expr_ty_opt(&hir_expr) { @@ -1481,6 +1472,8 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'l> for DumpVisitor<'l, 'tcx, 'll, self.visit_expr(element); self.nest_tables(count.id, |v| v.visit_expr(count)); } + // In particular, we take this branch for call and path expressions, + // where we'll index the idents involved just by continuing to walk. _ => { visit::walk_expr(self, ex) } @@ -1580,4 +1573,39 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'l> for DumpVisitor<'l, 'tcx, 'll, walk_list!(self, visit_ty, &l.ty); walk_list!(self, visit_expr, &l.init); } + + fn visit_foreign_item(&mut self, item: &'l ast::ForeignItem) { + match item.node { + ast::ForeignItemKind::Fn(ref decl, ref generics) => { + if let Some(fn_data) = self.save_ctxt.get_extern_item_data(item) { + down_cast_data!(fn_data, FunctionData, item.span); + if !self.span.filter_generated(Some(fn_data.span), item.span) { + self.dumper.function(fn_data.clone().lower(self.tcx)); + } + + self.nest_tables(item.id, |v| v.process_formals(&decl.inputs, + &fn_data.qualname)); + self.process_generic_params(generics, item.span, &fn_data.qualname, item.id); + } + + for arg in &decl.inputs { + self.visit_ty(&arg.ty); + } + + if let ast::FunctionRetTy::Ty(ref ret_ty) = decl.output { + self.visit_ty(&ret_ty); + } + } + ast::ForeignItemKind::Static(ref ty, _) => { + if let Some(var_data) = self.save_ctxt.get_extern_item_data(item) { + down_cast_data!(var_data, VariableData, item.span); + if !self.span.filter_generated(Some(var_data.span), item.span) { + self.dumper.variable(var_data.lower(self.tcx)); + } + } + + self.visit_ty(ty); + } + } + } } diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs index 41658dc5b1b48..f038c2dc298ad 100644 --- a/src/librustc_save_analysis/external_data.rs +++ b/src/librustc_save_analysis/external_data.rs @@ -14,7 +14,6 @@ use rustc::ty::TyCtxt; use syntax::ast::{self, NodeId}; use syntax::codemap::CodeMap; use syntax::print::pprust; -use syntax::symbol::Symbol; use syntax_pos::Span; use data::{self, Visibility, SigElement}; @@ -77,10 +76,9 @@ impl Lower for Vec { type Target = Vec; fn lower(self, tcx: TyCtxt) -> Vec { - let doc = Symbol::intern("doc"); self.into_iter() // Only retain real attributes. Doc comments are lowered separately. - .filter(|attr| attr.name() != doc) + .filter(|attr| attr.path != "doc") .map(|mut attr| { // Remove the surrounding '#[..]' or '#![..]' of the pretty printed // attribute. First normalize all inner attribute (#![..]) to outer diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 111c8370be2b1..8ac550c3d85b8 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -54,7 +54,7 @@ use std::path::{Path, PathBuf}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::lexer::comments::strip_doc_comment_decoration; use syntax::parse::token; -use syntax::symbol::{Symbol, keywords}; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{ty_to_string, arg_to_string}; use syntax::codemap::MacroAttribute; @@ -116,6 +116,48 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { result } + pub fn get_extern_item_data(&self, item: &ast::ForeignItem) -> Option { + let qualname = format!("::{}", self.tcx.node_path_str(item.id)); + match item.node { + ast::ForeignItemKind::Fn(ref decl, ref generics) => { + let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Fn); + Some(Data::FunctionData(FunctionData { + id: item.id, + name: item.ident.to_string(), + qualname: qualname, + declaration: None, + span: sub_span.unwrap(), + scope: self.enclosing_scope(item.id), + value: make_signature(decl, generics), + visibility: From::from(&item.vis), + parent: None, + docs: docs_for_attrs(&item.attrs), + sig: self.sig_base_extern(item), + attributes: item.attrs.clone(), + })) + } + ast::ForeignItemKind::Static(ref ty, m) => { + let keyword = if m { keywords::Mut } else { keywords::Static }; + let sub_span = self.span_utils.sub_span_after_keyword(item.span, keyword); + Some(Data::VariableData(VariableData { + id: item.id, + kind: VariableKind::Static, + name: item.ident.to_string(), + qualname: qualname, + span: sub_span.unwrap(), + scope: self.enclosing_scope(item.id), + parent: None, + value: String::new(), + type_value: ty_to_string(ty), + visibility: From::from(&item.vis), + docs: docs_for_attrs(&item.attrs), + sig: Some(self.sig_base_extern(item)), + attributes: item.attrs.clone(), + })) + } + } + } + pub fn get_item_data(&self, item: &ast::Item) -> Option { match item.node { ast::ItemKind::Fn(ref decl, .., ref generics, _) => { @@ -748,6 +790,21 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { } } + fn sig_base_extern(&self, item: &ast::ForeignItem) -> Signature { + let text = self.span_utils.signature_string_for_span(item.span); + let name = item.ident.to_string(); + let ident_start = text.find(&name).expect("Name not in signature?"); + let ident_end = ident_start + name.len(); + Signature { + span: mk_sp(item.span.lo, item.span.lo + BytePos(text.len() as u32)), + text: text, + ident_start: ident_start, + ident_end: ident_end, + defs: vec![], + refs: vec![], + } + } + #[inline] pub fn enclosing_scope(&self, id: NodeId) -> NodeId { self.tcx.hir.get_enclosing_scope(id).unwrap_or(CRATE_NODE_ID) @@ -829,11 +886,10 @@ impl<'a> Visitor<'a> for PathCollector { } fn docs_for_attrs(attrs: &[Attribute]) -> String { - let doc = Symbol::intern("doc"); let mut result = String::new(); for attr in attrs { - if attr.name() == doc { + if attr.check_name("doc") { if let Some(val) = attr.value_str() { if attr.is_sugared_doc { result.push_str(&strip_doc_comment_decoration(&val.as_str())); diff --git a/src/librustc_trans/assert_module_sources.rs b/src/librustc_trans/assert_module_sources.rs index 7a41f8341099b..8528482c7856c 100644 --- a/src/librustc_trans/assert_module_sources.rs +++ b/src/librustc_trans/assert_module_sources.rs @@ -113,7 +113,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { } fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name { - for item in attr.meta_item_list().unwrap_or(&[]) { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name(name) { if let Some(value) = item.value_str() { return value; diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index b7aedb742db02..762bf8592ffcc 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -261,7 +261,7 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, "ctlz" | "cttz" | "ctpop" | "bswap" | "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" | "overflowing_add" | "overflowing_sub" | "overflowing_mul" | - "unchecked_div" | "unchecked_rem" => { + "unchecked_div" | "unchecked_rem" | "unchecked_shl" | "unchecked_shr" => { let sty = &arg_tys[0].sty; match int_type_width_signed(sty, ccx) { Some((width, signed)) => @@ -311,6 +311,13 @@ pub fn trans_intrinsic_call<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, } else { bcx.urem(llargs[0], llargs[1]) }, + "unchecked_shl" => bcx.shl(llargs[0], llargs[1]), + "unchecked_shr" => + if signed { + bcx.ashr(llargs[0], llargs[1]) + } else { + bcx.lshr(llargs[0], llargs[1]) + }, _ => bug!(), }, None => { diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 4b88f5acf42da..529ee107c46ce 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -254,7 +254,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Call the generic checker. let expected_arg_tys = - self.expected_types_for_fn_args(call_expr.span, + self.expected_inputs_for_expected_output(call_expr.span, expected, fn_sig.output(), fn_sig.inputs()); @@ -280,7 +280,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // do know the types expected for each argument and the return // type. - let expected_arg_tys = self.expected_types_for_fn_args(call_expr.span, + let expected_arg_tys = self.expected_inputs_for_expected_output(call_expr.span, expected, fn_sig.output().clone(), fn_sig.inputs()); diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 28996b40cfdfe..2861fd288326b 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -273,6 +273,8 @@ pub fn check_intrinsic_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, "unchecked_div" | "unchecked_rem" => (1, vec![param(0), param(0)], param(0)), + "unchecked_shl" | "unchecked_shr" => + (1, vec![param(0), param(0)], param(0)), "overflowing_add" | "overflowing_sub" | "overflowing_mul" => (1, vec![param(0), param(0)], param(0)), diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 5a582a523ea1c..f36254a8a10ea 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -2292,7 +2292,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match method_fn_ty.sty { ty::TyFnDef(def_id, .., ref fty) => { // HACK(eddyb) ignore self in the definition (see above). - let expected_arg_tys = self.expected_types_for_fn_args( + let expected_arg_tys = self.expected_inputs_for_expected_output( sp, expected, fty.0.output(), @@ -2645,14 +2645,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { TypeAndSubsts { substs: substs, ty: substd_ty } } - /// Unifies the return type with the expected type early, for more coercions - /// and forward type information on the argument expressions. - fn expected_types_for_fn_args(&self, - call_span: Span, - expected_ret: Expectation<'tcx>, - formal_ret: Ty<'tcx>, - formal_args: &[Ty<'tcx>]) - -> Vec> { + /// Unifies the output type with the expected type early, for more coercions + /// and forward type information on the input expressions. + fn expected_inputs_for_expected_output(&self, + call_span: Span, + expected_ret: Expectation<'tcx>, + formal_ret: Ty<'tcx>, + formal_args: &[Ty<'tcx>]) + -> Vec> { let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| { self.fudge_regions_if_ok(&RegionVariableOrigin::Coercion(call_span), || { // Attempt to apply a subtyping relationship between the formal @@ -2675,7 +2675,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }).collect()) }).ok() }).unwrap_or(vec![]); - debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})", + debug!("expected_inputs_for_expected_output(formal={:?} -> {:?}, expected={:?} -> {:?})", formal_args, formal_ret, expected_args, expected_ret); expected_args @@ -3032,14 +3032,22 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn check_expr_struct_fields(&self, adt_ty: Ty<'tcx>, + expected: Expectation<'tcx>, expr_id: ast::NodeId, span: Span, variant: &'tcx ty::VariantDef, ast_fields: &'gcx [hir::Field], check_completeness: bool) { let tcx = self.tcx; - let (substs, adt_kind, kind_name) = match adt_ty.sty { - ty::TyAdt(adt, substs) => (substs, adt.adt_kind(), adt.variant_descr()), + + let adt_ty_hint = + self.expected_inputs_for_expected_output(span, expected, adt_ty, &[adt_ty]) + .get(0).cloned().unwrap_or(adt_ty); + + let (substs, hint_substs, adt_kind, kind_name) = match (&adt_ty.sty, &adt_ty_hint.sty) { + (&ty::TyAdt(adt, substs), &ty::TyAdt(_, hint_substs)) => { + (substs, hint_substs, adt.adt_kind(), adt.variant_descr()) + } _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields") }; @@ -3054,10 +3062,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Typecheck each field. for field in ast_fields { - let expected_field_type; + let final_field_type; + let field_type_hint; if let Some(v_field) = remaining_fields.remove(&field.name.node) { - expected_field_type = self.field_ty(field.span, v_field, substs); + final_field_type = self.field_ty(field.span, v_field, substs); + field_type_hint = self.field_ty(field.span, v_field, hint_substs); seen_fields.insert(field.name.node, field.span); @@ -3069,7 +3079,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } else { error_happened = true; - expected_field_type = tcx.types.err; + final_field_type = tcx.types.err; + field_type_hint = tcx.types.err; if let Some(_) = variant.find_field_named(field.name.node) { let mut err = struct_span_err!(self.tcx.sess, field.name.span, @@ -3091,7 +3102,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Make sure to give a type to the field even if there's // an error, so we can continue typechecking - self.check_expr_coercable_to_type(&field.expr, expected_field_type); + let ty = self.check_expr_with_hint(&field.expr, field_type_hint); + self.demand_coerce(&field.expr, ty, final_field_type); } // Make sure the programmer specified correct number of fields. @@ -3201,6 +3213,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn check_expr_struct(&self, expr: &hir::Expr, + expected: Expectation<'tcx>, qpath: &hir::QPath, fields: &'gcx [hir::Field], base_expr: &'gcx Option>) -> Ty<'tcx> @@ -3219,7 +3232,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { hir::QPath::TypeRelative(ref qself, _) => qself.span }; - self.check_expr_struct_fields(struct_ty, expr.id, path_span, variant, fields, + self.check_expr_struct_fields(struct_ty, expected, expr.id, path_span, variant, fields, base_expr.is_none()); if let &Some(ref base_expr) = base_expr { self.check_expr_has_type(base_expr, struct_ty); @@ -3764,7 +3777,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } hir::ExprStruct(ref qpath, ref fields, ref base_expr) => { - self.check_expr_struct(expr, qpath, fields, base_expr) + self.check_expr_struct(expr, expected, qpath, fields, base_expr) } hir::ExprField(ref base, ref field) => { self.check_field(expr, lvalue_pref, &base, field) diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 1294296840ebd..eff848be2b8b9 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -39,12 +39,11 @@ use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc::hir; +use std::{mem, slice, vec}; use std::path::PathBuf; use std::rc::Rc; -use std::slice; use std::sync::Arc; use std::u32; -use std::mem; use core::DocContext; use doctree; @@ -472,12 +471,12 @@ impl Clean for doctree::Module { pub struct ListAttributesIter<'a> { attrs: slice::Iter<'a, ast::Attribute>, - current_list: slice::Iter<'a, ast::NestedMetaItem>, + current_list: vec::IntoIter, name: &'a str } impl<'a> Iterator for ListAttributesIter<'a> { - type Item = &'a ast::NestedMetaItem; + type Item = ast::NestedMetaItem; fn next(&mut self) -> Option { if let Some(nested) = self.current_list.next() { @@ -485,9 +484,9 @@ impl<'a> Iterator for ListAttributesIter<'a> { } for attr in &mut self.attrs { - if let Some(ref list) = attr.meta_item_list() { + if let Some(list) = attr.meta_item_list() { if attr.check_name(self.name) { - self.current_list = list.iter(); + self.current_list = list.into_iter(); if let Some(nested) = self.current_list.next() { return Some(nested); } @@ -508,7 +507,7 @@ impl AttributesExt for [ast::Attribute] { fn lists<'a>(&'a self, name: &'a str) -> ListAttributesIter<'a> { ListAttributesIter { attrs: self.iter(), - current_list: [].iter(), + current_list: Vec::new().into_iter(), name: name } } @@ -519,7 +518,7 @@ pub trait NestedAttributesExt { fn has_word(self, &str) -> bool; } -impl<'a, I: IntoIterator> NestedAttributesExt for I { +impl> NestedAttributesExt for I { fn has_word(self, word: &str) -> bool { self.into_iter().any(|attr| attr.is_word() && attr.check_name(word)) } @@ -1388,7 +1387,7 @@ impl<'tcx> Clean for ty::AssociatedItem { decl: decl, abi: sig.abi(), - // trait methods canot (currently, at least) be const + // trait methods cannot (currently, at least) be const constness: hir::Constness::NotConst, }) } else { @@ -2596,9 +2595,9 @@ impl Clean> for doctree::Import { // #[doc(no_inline)] attribute is present. // Don't inline doc(hidden) imports so they can be stripped at a later stage. let denied = self.vis != hir::Public || self.attrs.iter().any(|a| { - a.name() == "doc" && match a.meta_item_list() { - Some(l) => attr::list_contains_name(l, "no_inline") || - attr::list_contains_name(l, "hidden"), + a.name().unwrap() == "doc" && match a.meta_item_list() { + Some(l) => attr::list_contains_name(&l, "no_inline") || + attr::list_contains_name(&l, "hidden"), None => false, } }); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index c571bcb08e4b7..7b60d497932de 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -60,6 +60,7 @@ use rustc::middle::privacy::AccessLevels; use rustc::middle::stability; use rustc::hir; use rustc::util::nodemap::{FxHashMap, FxHashSet}; +use rustc::session::config::nightly_options::is_nightly_build; use rustc_data_structures::flock; use clean::{self, AttributesExt, GetDefId, SelfTy, Mutability}; @@ -2316,9 +2317,10 @@ fn render_assoc_item(w: &mut fmt::Formatter, } }; // FIXME(#24111): remove when `const_fn` is stabilized - let vis_constness = match UnstableFeatures::from_environment() { - UnstableFeatures::Allow => constness, - _ => hir::Constness::NotConst + let vis_constness = if is_nightly_build() { + constness + } else { + hir::Constness::NotConst }; let prefix = format!("{}{}{:#}fn {}{:#}", ConstnessSpace(vis_constness), @@ -2620,11 +2622,11 @@ fn render_attributes(w: &mut fmt::Formatter, it: &clean::Item) -> fmt::Result { let mut attrs = String::new(); for attr in &it.attrs.other_attrs { - let name = attr.name(); + let name = attr.name().unwrap(); if !ATTRIBUTE_WHITELIST.contains(&&name.as_str()[..]) { continue; } - if let Some(s) = render_attribute(attr.meta()) { + if let Some(s) = render_attribute(&attr.meta().unwrap()) { attrs.push_str(&format!("#[{}]\n", s)); } } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index c1ecc241b7b63..f6b7a07bdae01 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -137,13 +137,13 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions { attrs: Vec::new(), }; - let attrs = krate.attrs.iter() - .filter(|a| a.check_name("doc")) - .filter_map(|a| a.meta_item_list()) - .flat_map(|l| l) - .filter(|a| a.check_name("test")) - .filter_map(|a| a.meta_item_list()) - .flat_map(|l| l); + let test_attrs: Vec<_> = krate.attrs.iter() + .filter(|a| a.check_name("doc")) + .flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new)) + .filter(|a| a.check_name("test")) + .collect(); + let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[])); + for attr in attrs { if attr.check_name("no_crate_inject") { opts.no_crate_inject = true; diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index b80de3cc50546..4a909f8e2a972 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -376,7 +376,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { if item.vis == hir::Public && self.inside_public_path { let please_inline = item.attrs.iter().any(|item| { match item.meta_item_list() { - Some(list) if item.check_name("doc") => { + Some(ref list) if item.check_name("doc") => { list.iter().any(|i| i.check_name("inline")) } _ => false, diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index 3ca8b41347a26..5733217008146 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -222,8 +222,8 @@ const DISPLACEMENT_THRESHOLD: usize = 128; /// resistance against HashDoS attacks. The algorithm is randomly seeded, and a /// reasonable best-effort is made to generate this seed from a high quality, /// secure source of randomness provided by the host without blocking the -/// program. Because of this, the randomness of the seed is dependant on the -/// quality of the system's random number generator at the time it is created. +/// program. Because of this, the randomness of the seed depends on the output +/// quality of the system's random number generator when the seed is created. /// In particular, seeds generated when the system's entropy pool is abnormally /// low such as during system boot may be of a lower quality. /// diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 2c8bb433e8aef..211605bef1ee0 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -1154,7 +1154,7 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> { fn next(&mut self) -> Option<(SafeHash, K, V)> { self.iter.next().map(|bucket| { unsafe { - (**self.table).size -= 1; + (*self.table.as_mut_ptr()).size -= 1; let (k, v) = ptr::read(bucket.pair); (SafeHash { hash: ptr::replace(bucket.hash, EMPTY_BUCKET) }, k, v) } diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 2c83518d38880..206a37b8e5db8 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -245,7 +245,6 @@ #![feature(char_escape_debug)] #![feature(char_internals)] #![feature(collections)] -#![feature(collections_bound)] #![feature(collections_range)] #![feature(compiler_builtins_lib)] #![feature(const_fn)] diff --git a/src/libstd/net/ip.rs b/src/libstd/net/ip.rs index 5d6e8d319d7b2..24e0e6f3fa659 100644 --- a/src/libstd/net/ip.rs +++ b/src/libstd/net/ip.rs @@ -636,6 +636,7 @@ impl FromInner for Ipv4Addr { #[stable(feature = "ip_u32", since = "1.1.0")] impl From for u32 { + /// It performs the conversion in network order (big-endian). fn from(ip: Ipv4Addr) -> u32 { let ip = ip.octets(); ((ip[0] as u32) << 24) + ((ip[1] as u32) << 16) + ((ip[2] as u32) << 8) + (ip[3] as u32) @@ -644,6 +645,7 @@ impl From for u32 { #[stable(feature = "ip_u32", since = "1.1.0")] impl From for Ipv4Addr { + /// It performs the conversion in network order (big-endian). fn from(ip: u32) -> Ipv4Addr { Ipv4Addr::new((ip >> 24) as u8, (ip >> 16) as u8, (ip >> 8) as u8, ip as u8) } diff --git a/src/libstd/process.rs b/src/libstd/process.rs index f846ef3e69e09..97c48ee590341 100644 --- a/src/libstd/process.rs +++ b/src/libstd/process.rs @@ -1032,7 +1032,7 @@ pub fn exit(code: i32) -> ! { /// will be run. If a clean shutdown is needed it is recommended to only call /// this function at a known point where there are no more destructors left /// to run. -#[unstable(feature = "process_abort", issue = "37838")] +#[stable(feature = "process_abort", since = "1.17.0")] pub fn abort() -> ! { unsafe { ::sys::abort_internal() }; } diff --git a/src/libstd/sync/mutex.rs b/src/libstd/sync/mutex.rs index 48d8d34dbe090..d79be2944c9e3 100644 --- a/src/libstd/sync/mutex.rs +++ b/src/libstd/sync/mutex.rs @@ -132,7 +132,7 @@ unsafe impl Sync for Mutex { } /// An RAII implementation of a "scoped lock" of a mutex. When this structure is /// dropped (falls out of scope), the lock will be unlocked. /// -/// The data protected by the mutex can be access through this guard via its +/// The data protected by the mutex can be accessed through this guard via its /// [`Deref`] and [`DerefMut`] implementations. /// /// This structure is created by the [`lock`] and [`try_lock`] methods on diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index a456fa512254c..84fb69a7f1094 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -116,6 +116,12 @@ pub struct Path { pub segments: Vec, } +impl<'a> PartialEq<&'a str> for Path { + fn eq(&self, string: &&'a str) -> bool { + self.segments.len() == 1 && self.segments[0].identifier.name == *string + } +} + impl fmt::Debug for Path { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "path({})", pprust::path_to_string(self)) @@ -1681,7 +1687,8 @@ pub struct AttrId(pub usize); pub struct Attribute { pub id: AttrId, pub style: AttrStyle, - pub value: MetaItem, + pub path: Path, + pub tokens: TokenStream, pub is_sugared_doc: bool, pub span: Span, } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 096657a6e7ac8..2f1efd6ad00ee 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -15,20 +15,24 @@ pub use self::ReprAttr::*; pub use self::IntType::*; use ast; -use ast::{AttrId, Attribute, Name}; +use ast::{AttrId, Attribute, Name, Ident}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; -use ast::{Lit, Expr, Item, Local, Stmt, StmtKind}; +use ast::{Lit, LitKind, Expr, ExprKind, Item, Local, Stmt, StmtKind}; use codemap::{Spanned, spanned, dummy_spanned, mk_sp}; use syntax_pos::{Span, BytePos, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::ParseSess; +use parse::parser::Parser; +use parse::{self, ParseSess, PResult}; +use parse::token::{self, Token}; use ptr::P; use symbol::Symbol; +use tokenstream::{TokenStream, TokenTree, Delimited}; use util::ThinVec; use std::cell::{RefCell, Cell}; +use std::iter; thread_local! { static USED_ATTRS: RefCell> = RefCell::new(Vec::new()); @@ -185,26 +189,38 @@ impl NestedMetaItem { impl Attribute { pub fn check_name(&self, name: &str) -> bool { - let matches = self.name() == name; + let matches = self.path == name; if matches { mark_used(self); } matches } - pub fn name(&self) -> Name { self.meta().name() } + pub fn name(&self) -> Option { + match self.path.segments.len() { + 1 => Some(self.path.segments[0].identifier.name), + _ => None, + } + } pub fn value_str(&self) -> Option { - self.meta().value_str() + self.meta().and_then(|meta| meta.value_str()) } - pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { - self.meta().meta_item_list() + pub fn meta_item_list(&self) -> Option> { + match self.meta() { + Some(MetaItem { node: MetaItemKind::List(list), .. }) => Some(list), + _ => None + } } - pub fn is_word(&self) -> bool { self.meta().is_word() } + pub fn is_word(&self) -> bool { + self.path.segments.len() == 1 && self.tokens.is_empty() + } - pub fn span(&self) -> Span { self.meta().span } + pub fn span(&self) -> Span { + self.span + } pub fn is_meta_item_list(&self) -> bool { self.meta_item_list().is_some() @@ -225,7 +241,7 @@ impl MetaItem { match self.node { MetaItemKind::NameValue(ref v) => { match v.node { - ast::LitKind::Str(ref s, _) => Some((*s).clone()), + LitKind::Str(ref s, _) => Some((*s).clone()), _ => None, } }, @@ -264,8 +280,66 @@ impl MetaItem { impl Attribute { /// Extract the MetaItem from inside this Attribute. - pub fn meta(&self) -> &MetaItem { - &self.value + pub fn meta(&self) -> Option { + let mut tokens = self.tokens.trees().peekable(); + Some(MetaItem { + name: match self.path.segments.len() { + 1 => self.path.segments[0].identifier.name, + _ => return None, + }, + node: if let Some(node) = MetaItemKind::from_tokens(&mut tokens) { + if tokens.peek().is_some() { + return None; + } + node + } else { + return None; + }, + span: self.span, + }) + } + + pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, + { + let mut parser = Parser::new(sess, self.tokens.clone(), None, false); + let result = f(&mut parser)?; + if parser.token != token::Eof { + parser.unexpected()?; + } + Ok(result) + } + + pub fn parse_list<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, Vec> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>, + { + if self.tokens.is_empty() { + return Ok(Vec::new()); + } + self.parse(sess, |parser| { + parser.expect(&token::OpenDelim(token::Paren))?; + let mut list = Vec::new(); + while !parser.eat(&token::CloseDelim(token::Paren)) { + list.push(f(parser)?); + if !parser.eat(&token::Comma) { + parser.expect(&token::CloseDelim(token::Paren))?; + break + } + } + Ok(list) + }) + } + + pub fn parse_meta<'a>(&self, sess: &'a ParseSess) -> PResult<'a, MetaItem> { + if self.path.segments.len() > 1 { + sess.span_diagnostic.span_err(self.path.span, "expected ident, found path"); + } + + Ok(MetaItem { + name: self.path.segments.last().unwrap().identifier.name, + node: self.parse(sess, |parser| parser.parse_meta_item_kind())?, + span: self.span, + }) } /// Convert self to a normal #[doc="foo"] comment, if it is a @@ -293,7 +367,7 @@ impl Attribute { /* Constructors */ pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem { - let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked)); + let value_lit = dummy_spanned(LitKind::Str(value, ast::StrStyle::Cooked)); mk_spanned_name_value_item(DUMMY_SP, name, value_lit) } @@ -348,7 +422,8 @@ pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute Attribute { id: id, style: ast::AttrStyle::Inner, - value: item, + path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)), + tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } @@ -365,7 +440,8 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute Attribute { id: id, style: ast::AttrStyle::Outer, - value: item, + path: ast::Path::from_ident(item.span, ast::Ident::with_empty_ctxt(item.name)), + tokens: item.node.tokens(item.span), is_sugared_doc: false, span: sp, } @@ -374,32 +450,25 @@ pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) -> Attribute { let style = doc_comment_style(&text.as_str()); - let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked)); + let lit = spanned(lo, hi, LitKind::Str(text, ast::StrStyle::Cooked)); Attribute { id: id, style: style, - value: MetaItem { - span: mk_sp(lo, hi), - name: Symbol::intern("doc"), - node: MetaItemKind::NameValue(lit), - }, + path: ast::Path::from_ident(mk_sp(lo, hi), ast::Ident::from_str("doc")), + tokens: MetaItemKind::NameValue(lit).tokens(mk_sp(lo, hi)), is_sugared_doc: true, span: mk_sp(lo, hi), } } pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool { - debug!("attr::list_contains_name (name={})", name); items.iter().any(|item| { - debug!(" testing: {:?}", item.name()); item.check_name(name) }) } pub fn contains_name(attrs: &[Attribute], name: &str) -> bool { - debug!("attr::contains_name (name={})", name); attrs.iter().any(|item| { - debug!(" testing: {}", item.name()); item.check_name(name) }) } @@ -452,8 +521,14 @@ pub enum InlineAttr { /// Determine what `#[inline]` attribute is present in `attrs`, if any. pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr { attrs.iter().fold(InlineAttr::None, |ia, attr| { - match attr.value.node { - _ if attr.value.name != "inline" => ia, + if attr.path != "inline" { + return ia; + } + let meta = match attr.meta() { + Some(meta) => meta.node, + None => return ia, + }; + match meta { MetaItemKind::Word => { mark_used(attr); InlineAttr::Hint @@ -574,14 +649,15 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, let mut rustc_depr: Option = None; 'outer: for attr in attrs_iter { - let tag = attr.name(); - if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" { + if attr.path != "rustc_deprecated" && attr.path != "unstable" && attr.path != "stable" { continue // not a stability level } mark_used(attr); - if let Some(metas) = attr.meta_item_list() { + let meta = attr.meta(); + if let Some(MetaItem { node: MetaItemKind::List(ref metas), .. }) = meta { + let meta = meta.as_ref().unwrap(); let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); @@ -596,7 +672,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, } }; - match &*tag.as_str() { + match &*meta.name.as_str() { "rustc_deprecated" => { if rustc_depr.is_some() { span_err!(diagnostic, item_sp, E0540, @@ -772,7 +848,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, let mut depr: Option = None; 'outer: for attr in attrs_iter { - if attr.name() != "deprecated" { + if attr.path != "deprecated" { continue } @@ -847,8 +923,8 @@ pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute], /// structure layout, and `packed` to remove padding. pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec { let mut acc = Vec::new(); - match attr.value.node { - ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => { + if attr.path == "repr" { + if let Some(items) = attr.meta_item_list() { mark_used(attr); for item in items { if !item.is_meta_item() { @@ -883,8 +959,6 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec } } } - // Not a "repr" hint: ignore. - _ => { } } acc } @@ -931,6 +1005,206 @@ impl IntType { } } +impl MetaItem { + fn tokens(&self) -> TokenStream { + let ident = TokenTree::Token(self.span, Token::Ident(Ident::with_empty_ctxt(self.name))); + TokenStream::concat(vec![ident.into(), self.node.tokens(self.span)]) + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + let (mut span, name) = match tokens.next() { + Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { + token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), + _ => None, + }, + _ => return None, + }; + let node = match MetaItemKind::from_tokens(tokens) { + Some(node) => node, + _ => return None, + }; + if let Some(last_span) = node.last_span() { + span.hi = last_span.hi; + } + Some(MetaItem { name: name, span: span, node: node }) + } +} + +impl MetaItemKind { + fn last_span(&self) -> Option { + match *self { + MetaItemKind::Word => None, + MetaItemKind::List(ref list) => list.last().map(NestedMetaItem::span), + MetaItemKind::NameValue(ref lit) => Some(lit.span), + } + } + + pub fn tokens(&self, span: Span) -> TokenStream { + match *self { + MetaItemKind::Word => TokenStream::empty(), + MetaItemKind::NameValue(ref lit) => { + TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()]) + } + MetaItemKind::List(ref list) => { + let mut tokens = Vec::new(); + for (i, item) in list.iter().enumerate() { + if i > 0 { + tokens.push(TokenTree::Token(span, Token::Comma).into()); + } + tokens.push(item.node.tokens()); + } + TokenTree::Delimited(span, Delimited { + delim: token::Paren, + tts: TokenStream::concat(tokens).into(), + }).into() + } + } + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + let delimited = match tokens.peek().cloned() { + Some(TokenTree::Token(_, token::Eq)) => { + tokens.next(); + return if let Some(TokenTree::Token(span, token)) = tokens.next() { + LitKind::from_token(token) + .map(|lit| MetaItemKind::NameValue(Spanned { node: lit, span: span })) + } else { + None + }; + } + Some(TokenTree::Delimited(_, ref delimited)) if delimited.delim == token::Paren => { + tokens.next(); + delimited.stream() + } + _ => return Some(MetaItemKind::Word), + }; + + let mut tokens = delimited.into_trees().peekable(); + let mut result = Vec::new(); + while let Some(..) = tokens.peek() { + match NestedMetaItemKind::from_tokens(&mut tokens) { + Some(item) => result.push(Spanned { span: item.span(), node: item }), + None => return None, + } + match tokens.next() { + None | Some(TokenTree::Token(_, Token::Comma)) => {} + _ => return None, + } + } + Some(MetaItemKind::List(result)) + } +} + +impl NestedMetaItemKind { + fn span(&self) -> Span { + match *self { + NestedMetaItemKind::MetaItem(ref item) => item.span, + NestedMetaItemKind::Literal(ref lit) => lit.span, + } + } + + fn tokens(&self) -> TokenStream { + match *self { + NestedMetaItemKind::MetaItem(ref item) => item.tokens(), + NestedMetaItemKind::Literal(ref lit) => lit.tokens(), + } + } + + fn from_tokens(tokens: &mut iter::Peekable) -> Option + where I: Iterator, + { + if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() { + if let Some(node) = LitKind::from_token(token) { + tokens.next(); + return Some(NestedMetaItemKind::Literal(Spanned { node: node, span: span })); + } + } + + MetaItem::from_tokens(tokens).map(NestedMetaItemKind::MetaItem) + } +} + +impl Lit { + fn tokens(&self) -> TokenStream { + TokenTree::Token(self.span, self.node.token()).into() + } +} + +impl LitKind { + fn token(&self) -> Token { + use std::ascii; + + match *self { + LitKind::Str(string, ast::StrStyle::Cooked) => { + let mut escaped = String::new(); + for ch in string.as_str().chars() { + escaped.extend(ch.escape_unicode()); + } + Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None) + } + LitKind::Str(string, ast::StrStyle::Raw(n)) => { + Token::Literal(token::Lit::StrRaw(string, n), None) + } + LitKind::ByteStr(ref bytes) => { + let string = bytes.iter().cloned().flat_map(ascii::escape_default) + .map(Into::::into).collect::(); + Token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None) + } + LitKind::Byte(byte) => { + let string: String = ascii::escape_default(byte).map(Into::::into).collect(); + Token::Literal(token::Lit::Byte(Symbol::intern(&string)), None) + } + LitKind::Char(ch) => { + let string: String = ch.escape_default().map(Into::::into).collect(); + Token::Literal(token::Lit::Char(Symbol::intern(&string)), None) + } + LitKind::Int(n, ty) => { + let suffix = match ty { + ast::LitIntType::Unsigned(ty) => Some(Symbol::intern(ty.ty_to_string())), + ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())), + ast::LitIntType::Unsuffixed => None, + }; + Token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), suffix) + } + LitKind::Float(symbol, ty) => { + Token::Literal(token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string()))) + } + LitKind::FloatUnsuffixed(symbol) => Token::Literal(token::Lit::Float(symbol), None), + LitKind::Bool(value) => Token::Ident(Ident::with_empty_ctxt(Symbol::intern(match value { + true => "true", + false => "false", + }))), + } + } + + fn from_token(token: Token) -> Option { + match token { + Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)), + Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)), + Token::Interpolated(ref nt) => match **nt { + token::NtExpr(ref v) => match v.node { + ExprKind::Lit(ref lit) => Some(lit.node.clone()), + _ => None, + }, + _ => None, + }, + Token::Literal(lit, suf) => { + let (suffix_illegal, result) = parse::lit_token(lit, suf, None); + if suffix_illegal && suf.is_some() { + return None; + } + result + } + _ => None, + } + } +} + pub trait HasAttrs: Sized { fn attrs(&self) -> &[ast::Attribute]; fn map_attrs) -> Vec>(self, f: F) -> Self; diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index ea12a31770fc0..ede8a33df6546 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -13,9 +13,10 @@ use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features use {fold, attr}; use ast; use codemap::Spanned; -use parse::ParseSess; -use ptr::P; +use parse::{token, ParseSess}; +use syntax_pos::Span; +use ptr::P; use util::small_vector::SmallVector; /// A folder that strips out items that do not belong in the current configuration. @@ -84,43 +85,33 @@ impl<'a> StripUnconfigured<'a> { return Some(attr); } - let attr_list = match attr.meta_item_list() { - Some(attr_list) => attr_list, - None => { - let msg = "expected `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); - return None; - } - }; - - let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) { - (2, Some(cfg), Some(mi)) => (cfg, mi), - _ => { - let msg = "expected `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); + let (cfg, path, tokens, span) = match attr.parse(self.sess, |parser| { + parser.expect(&token::OpenDelim(token::Paren))?; + let cfg = parser.parse_meta_item()?; + parser.expect(&token::Comma)?; + let lo = parser.span.lo; + let (path, tokens) = parser.parse_path_and_tokens()?; + parser.expect(&token::CloseDelim(token::Paren))?; + Ok((cfg, path, tokens, Span { lo: lo, ..parser.prev_span })) + }) { + Ok(result) => result, + Err(mut e) => { + e.emit(); return None; } }; - use attr::cfg_matches; - match (cfg.meta_item(), mi.meta_item()) { - (Some(cfg), Some(mi)) => - if cfg_matches(&cfg, self.sess, self.features) { - self.process_cfg_attr(ast::Attribute { - id: attr::mk_attr_id(), - style: attr.style, - value: mi.clone(), - is_sugared_doc: false, - span: mi.span, - }) - } else { - None - }, - _ => { - let msg = "unexpected literal(s) in `#[cfg_attr(, )]`"; - self.sess.span_diagnostic.span_err(attr.span, msg); - None - } + if attr::cfg_matches(&cfg, self.sess, self.features) { + self.process_cfg_attr(ast::Attribute { + id: attr::mk_attr_id(), + style: attr.style, + path: path, + tokens: tokens, + is_sugared_doc: false, + span: span, + }) + } else { + None } } @@ -132,9 +123,12 @@ impl<'a> StripUnconfigured<'a> { return false; } - let mis = match attr.value.node { - ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis, - _ => return true + let mis = if !is_cfg(&attr) { + return true; + } else if let Some(mis) = attr.meta_item_list() { + mis + } else { + return true; }; if mis.len() != 1 { diff --git a/src/libsyntax/ext/derive.rs b/src/libsyntax/ext/derive.rs index 77cc7bab0315a..1569d9f540b8e 100644 --- a/src/libsyntax/ext/derive.rs +++ b/src/libsyntax/ext/derive.rs @@ -12,36 +12,31 @@ use attr::HasAttrs; use {ast, codemap}; use ext::base::ExtCtxt; use ext::build::AstBuilder; +use parse::parser::PathStyle; use symbol::Symbol; use syntax_pos::Span; -pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec<(Symbol, Span)> { +pub fn collect_derives(cx: &mut ExtCtxt, attrs: &mut Vec) -> Vec { let mut result = Vec::new(); attrs.retain(|attr| { - if attr.name() != "derive" { + if attr.path != "derive" { return true; } - if attr.value_str().is_some() { - cx.span_err(attr.span, "unexpected value in `derive`"); - return false; - } - - let traits = attr.meta_item_list().unwrap_or(&[]).to_owned(); - if traits.is_empty() { - cx.span_warn(attr.span, "empty trait list in `derive`"); - return false; - } - - for titem in traits { - if titem.word().is_none() { - cx.span_err(titem.span, "malformed `derive` entry"); - return false; + match attr.parse_list(cx.parse_sess, |parser| parser.parse_path(PathStyle::Mod)) { + Ok(ref traits) if traits.is_empty() => { + cx.span_warn(attr.span, "empty trait list in `derive`"); + false + } + Ok(traits) => { + result.extend(traits); + true + } + Err(mut e) => { + e.emit(); + false } - result.push((titem.name().unwrap(), titem.span)); } - - true }); result } @@ -60,21 +55,21 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { } } -pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[(Symbol, Span)], item: T) -> T { +pub fn add_derived_markers(cx: &mut ExtCtxt, traits: &[ast::Path], item: T) -> T { let span = match traits.get(0) { - Some(&(_, span)) => span, + Some(path) => path.span, None => return item, }; item.map_attrs(|mut attrs| { - if traits.iter().any(|&(name, _)| name == "PartialEq") && - traits.iter().any(|&(name, _)| name == "Eq") { + if traits.iter().any(|path| *path == "PartialEq") && + traits.iter().any(|path| *path == "Eq") { let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); let meta = cx.meta_word(span, Symbol::intern("structural_match")); attrs.push(cx.attribute(span, meta)); } - if traits.iter().any(|&(name, _)| name == "Copy") && - traits.iter().any(|&(name, _)| name == "Clone") { + if traits.iter().any(|path| *path == "Copy") && + traits.iter().any(|path| *path == "Clone") { let span = allow_unstable(cx, span, "derive(Copy, Clone)"); let meta = cx.meta_word(span, Symbol::intern("rustc_copy_clone_marker")); attrs.push(cx.attribute(span, meta)); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 10168f010a077..c1816582bc6ca 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{self, Block, Ident, PatKind}; -use ast::{Name, MacStmtStyle, StmtKind, ItemKind}; +use ast::{self, Block, Ident, PatKind, Path}; +use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use config::{is_test_or_bench, StripUnconfigured}; @@ -27,7 +27,7 @@ use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; -use syntax_pos::{self, Span, ExpnId}; +use syntax_pos::{Span, ExpnId, DUMMY_SP}; use tokenstream::TokenStream; use util::small_vector::SmallVector; use visit::Visitor; @@ -165,12 +165,11 @@ pub enum InvocationKind { }, Attr { attr: Option, - traits: Vec<(Symbol, Span)>, + traits: Vec, item: Annotatable, }, Derive { - name: Symbol, - span: Span, + path: Path, item: Annotatable, }, } @@ -180,8 +179,8 @@ impl Invocation { match self.kind { InvocationKind::Bang { span, .. } => span, InvocationKind::Attr { attr: Some(ref attr), .. } => attr.span, - InvocationKind::Attr { attr: None, .. } => syntax_pos::DUMMY_SP, - InvocationKind::Derive { span, .. } => span, + InvocationKind::Attr { attr: None, .. } => DUMMY_SP, + InvocationKind::Derive { ref path, .. } => path.span, } } } @@ -272,17 +271,16 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.collect_invocations(expansion, &[]) } else if let InvocationKind::Attr { attr: None, traits, item } = invoc.kind { let item = item - .map_attrs(|mut attrs| { attrs.retain(|a| a.name() != "derive"); attrs }); + .map_attrs(|mut attrs| { attrs.retain(|a| a.path != "derive"); attrs }); let item_with_markers = add_derived_markers(&mut self.cx, &traits, item.clone()); let derives = derives.entry(invoc.expansion_data.mark).or_insert_with(Vec::new); - for &(name, span) in &traits { + for path in &traits { let mark = Mark::fresh(); derives.push(mark); - let path = ast::Path::from_ident(span, Ident::with_empty_ctxt(name)); let item = match self.cx.resolver.resolve_macro( - Mark::root(), &path, MacroKind::Derive, false) { + Mark::root(), path, MacroKind::Derive, false) { Ok(ext) => match *ext { SyntaxExtension::BuiltinDerive(..) => item_with_markers.clone(), _ => item.clone(), @@ -290,7 +288,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { _ => item.clone(), }; invocations.push(Invocation { - kind: InvocationKind::Derive { name: name, span: span, item: item }, + kind: InvocationKind::Derive { path: path.clone(), item: item }, expansion_kind: invoc.expansion_kind, expansion_data: ExpansionData { mark: mark, @@ -380,11 +378,10 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - let name = attr.name(); self.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - format: MacroAttribute(name), + format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), span: Some(attr.span), allow_internal_unstable: false, } @@ -392,25 +389,25 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { MultiModifier(ref mac) => { - let item = mac.expand(self.cx, attr.span, &attr.value, item); + let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + let item = mac.expand(self.cx, attr.span, &meta, item); kind.expect_from_annotatables(item) } MultiDecorator(ref mac) => { let mut items = Vec::new(); - mac.expand(self.cx, attr.span, &attr.value, &item, - &mut |item| items.push(item)); + let meta = panictry!(attr.parse_meta(&self.cx.parse_sess)); + mac.expand(self.cx, attr.span, &meta, &item, &mut |item| items.push(item)); items.push(item); kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let attr_toks = stream_for_attr_args(&attr, &self.cx.parse_sess); let item_toks = stream_for_item(&item, &self.cx.parse_sess); let span = Span { expn_id: self.cx.codemap().record_expansion(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - format: MacroAttribute(name), + format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))), span: None, allow_internal_unstable: false, }, @@ -418,15 +415,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ..attr.span }; - let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); - self.parse_expansion(tok_result, kind, name, span) + let tok_result = mac.expand(self.cx, attr.span, attr.tokens.clone(), item_toks); + self.parse_expansion(tok_result, kind, &attr.path, span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { - self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name)); + self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path)); kind.dummy(attr.span) } _ => { - let msg = &format!("macro `{}` may not be used in attributes", name); + let msg = &format!("macro `{}` may not be used in attributes", attr.path); self.cx.span_err(attr.span, &msg); kind.dummy(attr.span) } @@ -442,7 +439,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; let path = &mac.node.path; - let extname = path.segments.last().unwrap().identifier.name; let ident = ident.unwrap_or(keywords::Invalid.ident()); let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker { mark: mark, expn_id: None }); @@ -450,7 +446,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { NormalTT(ref expandfun, exp_span, allow_internal_unstable) => { if ident.name != keywords::Invalid.name() { let msg = - format!("macro {}! expects no ident argument, given '{}'", extname, ident); + format!("macro {}! expects no ident argument, given '{}'", path, ident); self.cx.span_err(path.span, &msg); return kind.dummy(span); } @@ -458,7 +454,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), span: exp_span, allow_internal_unstable: allow_internal_unstable, }, @@ -470,14 +466,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { IdentTT(ref expander, tt_span, allow_internal_unstable) => { if ident.name == keywords::Invalid.name() { self.cx.span_err(path.span, - &format!("macro {}! expects an ident argument", extname)); + &format!("macro {}! expects an ident argument", path)); return kind.dummy(span); }; self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), span: tt_span, allow_internal_unstable: allow_internal_unstable, } @@ -489,19 +485,19 @@ impl<'a, 'b> MacroExpander<'a, 'b> { MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => { self.cx.span_err(path.span, - &format!("`{}` can only be used in attributes", extname)); + &format!("`{}` can only be used in attributes", path)); return kind.dummy(span); } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { - self.cx.span_err(path.span, &format!("`{}` is a derive mode", extname)); + self.cx.span_err(path.span, &format!("`{}` is a derive mode", path)); return kind.dummy(span); } SyntaxExtension::ProcMacro(ref expandfun) => { if ident.name != keywords::Invalid.name() { let msg = - format!("macro {}! expects no ident argument, given '{}'", extname, ident); + format!("macro {}! expects no ident argument, given '{}'", path, ident); self.cx.span_err(path.span, &msg); return kind.dummy(span); } @@ -509,7 +505,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { - format: MacroBang(extname), + format: MacroBang(Symbol::intern(&format!("{}", path))), // FIXME procedural macros do not have proper span info // yet, when they do, we should use it here. span: None, @@ -519,7 +515,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }); let tok_result = expandfun.expand(self.cx, span, marked_tts); - Some(self.parse_expansion(tok_result, kind, extname, span)) + Some(self.parse_expansion(tok_result, kind, path, span)) } }; @@ -541,19 +537,24 @@ impl<'a, 'b> MacroExpander<'a, 'b> { /// Expand a derive invocation. Returns the result of expansion. fn expand_derive_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { let Invocation { expansion_kind: kind, .. } = invoc; - let (name, span, item) = match invoc.kind { - InvocationKind::Derive { name, span, item } => (name, span, item), + let (path, item) = match invoc.kind { + InvocationKind::Derive { path, item } => (path, item), _ => unreachable!(), }; - let mitem = ast::MetaItem { name: name, span: span, node: ast::MetaItemKind::Word }; - let pretty_name = Symbol::intern(&format!("derive({})", name)); + let pretty_name = Symbol::intern(&format!("derive({})", path)); + let span = path.span; + let attr = ast::Attribute { + path: path, tokens: TokenStream::empty(), span: span, + // irrelevant: + id: ast::AttrId(0), style: ast::AttrStyle::Outer, is_sugared_doc: false, + }; self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroAttribute(pretty_name), - span: Some(span), + span: None, allow_internal_unstable: false, } }); @@ -571,7 +572,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }), ..span }; - return kind.expect_from_annotatables(ext.expand(self.cx, span, &mitem, item)); + let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this + name: keywords::Invalid.name(), + span: DUMMY_SP, + node: ast::MetaItemKind::Word, + }; + return kind.expect_from_annotatables(ext.expand(self.cx, span, &dummy, item)); } SyntaxExtension::BuiltinDerive(func) => { let span = Span { @@ -586,20 +592,18 @@ impl<'a, 'b> MacroExpander<'a, 'b> { ..span }; let mut items = Vec::new(); - func(self.cx, span, &mitem, &item, &mut |a| { - items.push(a) - }); + func(self.cx, span, &attr.meta().unwrap(), &item, &mut |a| items.push(a)); return kind.expect_from_annotatables(items); } _ => { - let msg = &format!("macro `{}` may not be used for derive attributes", name); + let msg = &format!("macro `{}` may not be used for derive attributes", attr.path); self.cx.span_err(span, &msg); kind.dummy(span) } } } - fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) + fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, path: &Path, span: Span) -> Expansion { let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::>()); let expansion = match parser.parse_expansion(kind, false) { @@ -609,7 +613,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { return kind.dummy(span); } }; - parser.ensure_complete_parse(name, kind.name(), span); + parser.ensure_complete_parse(path, kind.name(), span); // FIXME better span info expansion.fold_with(&mut ChangeSpan { span: span }) } @@ -658,14 +662,14 @@ impl<'a> Parser<'a> { }) } - pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) { + pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span: Span) { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); let mut err = self.diagnostic().struct_span_err(self.span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", - macro_name, kind_name); + macro_path, kind_name); err.span_note(span, &msg).emit(); } } @@ -708,20 +712,20 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { fn collect_attr(&mut self, attr: Option, - traits: Vec<(Symbol, Span)>, + traits: Vec, item: Annotatable, kind: ExpansionKind) -> Expansion { if !traits.is_empty() && (kind == ExpansionKind::TraitItems || kind == ExpansionKind::ImplItems) { - self.cx.span_err(traits[0].1, "`derive` can be only be applied to items"); + self.cx.span_err(traits[0].span, "`derive` can be only be applied to items"); return kind.expect_from_annotatables(::std::iter::once(item)); } self.collect(kind, InvocationKind::Attr { attr: attr, traits: traits, item: item }) } // If `item` is an attr invocation, remove and return the macro attribute. - fn classify_item(&mut self, mut item: T) -> (Option, Vec<(Symbol, Span)>, T) + fn classify_item(&mut self, mut item: T) -> (Option, Vec, T) where T: HasAttrs, { let (mut attr, mut traits) = (None, Vec::new()); @@ -784,32 +788,6 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream { string_to_stream(text, parse_sess) } -fn stream_for_attr_args(attr: &ast::Attribute, parse_sess: &ParseSess) -> TokenStream { - use ast::MetaItemKind::*; - use print::pp::Breaks; - use print::pprust::PrintState; - - let token_string = match attr.value.node { - // For `#[foo]`, an empty token - Word => return TokenStream::empty(), - // For `#[foo(bar, baz)]`, returns `(bar, baz)` - List(ref items) => pprust::to_string(|s| { - s.popen()?; - s.commasep(Breaks::Consistent, - &items[..], - |s, i| s.print_meta_list_item(&i))?; - s.pclose() - }), - // For `#[foo = "bar"]`, returns `= "bar"` - NameValue(ref lit) => pprust::to_string(|s| { - s.word_space("=")?; - s.print_literal(lit) - }), - }; - - string_to_stream(token_string, parse_sess) -} - fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { let filename = String::from(""); filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text)) @@ -926,7 +904,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // Detect if this is an inline module (`mod m { ... }` as opposed to `mod m;`). // In the non-inline case, `inner` is never the dummy span (c.f. `parse_item_mod`). // Thus, if `inner` is the dummy span, we know the module is inline. - let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP; + let inline_module = item.span.contains(inner) || inner == DUMMY_SP; if inline_module { if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 69ff726e719a9..10b7249743b8c 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -220,16 +220,24 @@ pub mod rt { } impl ToTokens for ast::Attribute { - fn to_tokens(&self, cx: &ExtCtxt) -> Vec { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let mut r = vec![]; // FIXME: The spans could be better r.push(TokenTree::Token(self.span, token::Pound)); if self.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } + let mut inner = Vec::new(); + for (i, segment) in self.path.segments.iter().enumerate() { + if i > 0 { + inner.push(TokenTree::Token(self.span, token::Colon).into()); + } + inner.push(TokenTree::Token(self.span, token::Ident(segment.identifier)).into()); + } + inner.push(self.tokens.clone()); + r.push(TokenTree::Delimited(self.span, tokenstream::Delimited { - delim: token::Bracket, - tts: self.value.to_tokens(cx).into_iter().collect::().into(), + delim: token::Bracket, tts: TokenStream::concat(inner).into() })); r } diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index b9cb3d82d4f7c..6385d206a0cb9 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -488,7 +488,7 @@ pub fn parse(sess: &ParseSess, tts: TokenStream, ms: &[TokenTree], directory: Op fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { match name { "tt" => { - return token::NtTT(panictry!(p.parse_token_tree())); + return token::NtTT(p.parse_token_tree()); } _ => {} } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 7aa1230f9aeea..021c5398a4200 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -51,7 +51,8 @@ impl<'a> ParserAnyMacro<'a> { } // Make sure we don't have any tokens left to parse so we don't silently drop anything. - parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span); + let path = ast::Path::from_ident(site_span, macro_ident); + parser.ensure_complete_parse(&path, kind.name(), site_span); expansion } } diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 15913d56d162f..d07069d030a1b 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -342,6 +342,9 @@ declare_features! ( // Allows the `catch {...}` expression (active, catch_expr, "1.17.0", Some(31436)), + + // See rust-lang/rfcs#1414. Allows code like `let x: &'static u32 = &42` to work. + (active, rvalue_static_promotion, "1.15.1", Some(38865)), ); declare_features! ( @@ -862,35 +865,34 @@ macro_rules! gate_feature { impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); - let name = &*attr.name().as_str(); + let name = unwrap_or!(attr.name(), return); + for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { - if n == name { + if name == n { if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { gate_feature_fn!(self, has_feature, attr.span, name, desc); } - debug!("check_attribute: {:?} is builtin, {:?}, {:?}", name, ty, gateage); + debug!("check_attribute: {:?} is builtin, {:?}, {:?}", attr.path, ty, gateage); return; } } for &(ref n, ref ty) in self.plugin_attributes { - if n == name { + if attr.path == &**n { // Plugins can't gate attributes, so we don't check for it // unlike the code above; we only use this loop to // short-circuit to avoid the checks below - debug!("check_attribute: {:?} is registered by a plugin, {:?}", name, ty); + debug!("check_attribute: {:?} is registered by a plugin, {:?}", attr.path, ty); return; } } - if name.starts_with("rustc_") { + if name.as_str().starts_with("rustc_") { gate_feature!(self, rustc_attrs, attr.span, "unless otherwise specified, attributes \ with the prefix `rustc_` \ are reserved for internal compiler diagnostics"); - } else if name.starts_with("derive_") { + } else if name.as_str().starts_with("derive_") { gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); - } else if attr::is_known(attr) { - debug!("check_attribute: {:?} is known", name); - } else { + } else if !attr::is_known(attr) { // Only run the custom attribute lint during regular // feature gate checking. Macro gating runs // before the plugin attributes are registered @@ -901,7 +903,7 @@ impl<'a> Context<'a> { unknown to the compiler and \ may have meaning \ added to it in the future", - name)); + attr.path)); } } } @@ -1100,7 +1102,12 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { self.context.check_attribute(attr, false); } - if contains_novel_literal(&attr.value) { + if self.context.features.proc_macro && attr::is_known(attr) { + return + } + + let meta = panictry!(attr.parse_meta(&self.context.parse_sess)); + if contains_novel_literal(&meta) { gate_feature_post!(&self, attr_literals, attr.span, "non-string literals in attributes, or string \ literals in top-level positions, are experimental"); @@ -1163,8 +1170,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { `#[repr(simd)]` instead"); } for attr in &i.attrs { - if attr.name() == "repr" { - for item in attr.meta_item_list().unwrap_or(&[]) { + if attr.path == "repr" { + for item in attr.meta_item_list().unwrap_or_else(Vec::new) { if item.check_name("simd") { gate_feature_post!(&self, repr_simd, i.span, "SIMD types are experimental \ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 9f11d0173d6d5..1a4e196ac5577 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -489,7 +489,8 @@ pub fn noop_fold_attribute(attr: Attribute, fld: &mut T) -> Option(nt: token::Nonterminal, fld: &mut T) token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), token::NtIdent(id) => token::NtIdent(Spanned::{node: fld.fold_ident(id.node), ..id}), - token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), + token::NtMeta(meta) => token::NtMeta(fld.fold_meta_item(meta)), token::NtPath(path) => token::NtPath(fld.fold_path(path)), token::NtTT(tt) => token::NtTT(fld.fold_tt(tt)), token::NtArm(arm) => token::NtArm(fld.fold_arm(arm)), @@ -1371,7 +1372,7 @@ mod tests { matches_codepattern, "matches_codepattern", pprust::to_string(|s| fake_print_crate(s, &folded_crate)), - "#[a]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); + "#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()); } // even inside macro defs.... diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 39a9aff48bf27..4c9a5d512af02 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -65,6 +65,16 @@ macro_rules! panictry { }) } +#[macro_export] +macro_rules! unwrap_or { + ($opt:expr, $default:expr) => { + match $opt { + Some(x) => x, + None => $default, + } + } +} + #[macro_use] pub mod diagnostics { #[macro_use] diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index ded676da3c676..53106214fa310 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -14,8 +14,9 @@ use syntax_pos::{mk_sp, Span}; use codemap::spanned; use parse::common::SeqSep; use parse::PResult; -use parse::token; -use parse::parser::{Parser, TokenType}; +use parse::token::{self, Nonterminal}; +use parse::parser::{Parser, TokenType, PathStyle}; +use tokenstream::TokenStream; #[derive(PartialEq, Eq, Debug)] enum InnerAttributeParsePolicy<'a> { @@ -91,7 +92,7 @@ impl<'a> Parser<'a> { debug!("parse_attribute_with_inner_parse_policy: inner_parse_policy={:?} self.token={:?}", inner_parse_policy, self.token); - let (span, value, mut style) = match self.token { + let (span, path, tokens, mut style) = match self.token { token::Pound => { let lo = self.span.lo; self.bump(); @@ -119,11 +120,11 @@ impl<'a> Parser<'a> { }; self.expect(&token::OpenDelim(token::Bracket))?; - let meta_item = self.parse_meta_item()?; + let (path, tokens) = self.parse_path_and_tokens()?; self.expect(&token::CloseDelim(token::Bracket))?; let hi = self.prev_span.hi; - (mk_sp(lo, hi), meta_item, style) + (mk_sp(lo, hi), path, tokens, style) } _ => { let token_str = self.this_token_to_string(); @@ -143,12 +144,30 @@ impl<'a> Parser<'a> { Ok(ast::Attribute { id: attr::mk_attr_id(), style: style, - value: value, + path: path, + tokens: tokens, is_sugared_doc: false, span: span, }) } + pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { + let meta = match self.token { + token::Interpolated(ref nt) => match **nt { + Nonterminal::NtMeta(ref meta) => Some(meta.clone()), + _ => None, + }, + _ => None, + }; + Ok(if let Some(meta) = meta { + self.bump(); + (ast::Path::from_ident(meta.span, ast::Ident::with_empty_ctxt(meta.name)), + meta.node.tokens(meta.span)) + } else { + (self.parse_path(PathStyle::Mod)?, self.parse_tokens()) + }) + } + /// Parse attributes that appear after the opening of an item. These should /// be preceded by an exclamation mark, but we accept and warn about one /// terminated by a semicolon. @@ -221,15 +240,20 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let ident = self.parse_ident()?; - let node = if self.eat(&token::Eq) { + let node = self.parse_meta_item_kind()?; + let hi = self.prev_span.hi; + Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + } + + pub fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> { + Ok(if self.eat(&token::Eq) { ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) } else if self.token == token::OpenDelim(token::Paren) { ast::MetaItemKind::List(self.parse_meta_seq()?) } else { + self.eat(&token::OpenDelim(token::Paren)); ast::MetaItemKind::Word - }; - let hi = self.prev_span.hi; - Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) + }) } /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index de8a87e3a2b32..d48cf6911ed37 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -725,7 +725,7 @@ impl<'a> StringReader<'a> { base = 16; num_digits = self.scan_digits(16, 16); } - '0'...'9' | '_' | '.' => { + '0'...'9' | '_' | '.' | 'e' | 'E' => { num_digits = self.scan_digits(10, 10) + 1; } _ => { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 88535f91379f7..e188bcaf105f2 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -374,38 +374,80 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s[1..].chars().all(|c| '0' <= c && c <= '9') } -fn filtered_float_lit(data: Symbol, suffix: Option, sd: &Handler, sp: Span) - -> ast::LitKind { +macro_rules! err { + ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => { + match $opt_diag { + Some(($span, $diag)) => { $($body)* } + None => return None, + } + } +} + +pub fn lit_token(lit: token::Lit, suf: Option, diag: Option<(Span, &Handler)>) + -> (bool /* suffix illegal? */, Option) { + use ast::LitKind; + + match lit { + token::Byte(i) => (true, Some(LitKind::Byte(byte_lit(&i.as_str()).0))), + token::Char(i) => (true, Some(LitKind::Char(char_lit(&i.as_str()).0))), + + // There are some valid suffixes for integer and float literals, + // so all the handling is done internally. + token::Integer(s) => (false, integer_lit(&s.as_str(), suf, diag)), + token::Float(s) => (false, float_lit(&s.as_str(), suf, diag)), + + token::Str_(s) => { + let s = Symbol::intern(&str_lit(&s.as_str())); + (true, Some(LitKind::Str(s, ast::StrStyle::Cooked))) + } + token::StrRaw(s, n) => { + let s = Symbol::intern(&raw_str_lit(&s.as_str())); + (true, Some(LitKind::Str(s, ast::StrStyle::Raw(n)))) + } + token::ByteStr(i) => { + (true, Some(LitKind::ByteStr(byte_str_lit(&i.as_str())))) + } + token::ByteStrRaw(i, _) => { + (true, Some(LitKind::ByteStr(Rc::new(i.to_string().into_bytes())))) + } + } +} + +fn filtered_float_lit(data: Symbol, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { debug!("filtered_float_lit: {}, {:?}", data, suffix); let suffix = match suffix { Some(suffix) => suffix, - None => return ast::LitKind::FloatUnsuffixed(data), + None => return Some(ast::LitKind::FloatUnsuffixed(data)), }; - match &*suffix.as_str() { + Some(match &*suffix.as_str() { "f32" => ast::LitKind::Float(data, ast::FloatTy::F32), "f64" => ast::LitKind::Float(data, ast::FloatTy::F64), suf => { - if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { - // if it looks like a width, lets try to be helpful. - sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..])) - .help("valid widths are 32 and 64") - .emit(); - } else { - sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf)) - .help("valid suffixes are `f32` and `f64`") - .emit(); - } + err!(diag, |span, diag| { + if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { + // if it looks like a width, lets try to be helpful. + let msg = format!("invalid width `{}` for float literal", &suf[1..]); + diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit() + } else { + let msg = format!("invalid suffix `{}` for float literal", suf); + diag.struct_span_err(span, &msg) + .help("valid suffixes are `f32` and `f64`") + .emit(); + } + }); ast::LitKind::FloatUnsuffixed(data) } - } + }) } -pub fn float_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { +pub fn float_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::(); - filtered_float_lit(Symbol::intern(&s), suffix, sd, sp) + filtered_float_lit(Symbol::intern(&s), suffix, diag) } /// Parse a string representing a byte literal into its final form. Similar to `char_lit` @@ -500,7 +542,8 @@ pub fn byte_str_lit(lit: &str) -> Rc> { Rc::new(res) } -pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { +pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler)>) + -> Option { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); @@ -524,13 +567,16 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a // 1f64 and 2f32 etc. are valid float literals. if let Some(suf) = suffix { if looks_like_width_suffix(&['f'], &suf.as_str()) { - match base { - 16 => sd.span_err(sp, "hexadecimal float literal is not supported"), - 8 => sd.span_err(sp, "octal float literal is not supported"), - 2 => sd.span_err(sp, "binary float literal is not supported"), - _ => () + let err = match base { + 16 => Some("hexadecimal float literal is not supported"), + 8 => Some("octal float literal is not supported"), + 2 => Some("binary float literal is not supported"), + _ => None, + }; + if let Some(err) = err { + err!(diag, |span, diag| diag.span_err(span, err)); } - return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp) + return filtered_float_lit(Symbol::intern(&s), Some(suf), diag) } } @@ -539,7 +585,9 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a } if let Some(suf) = suffix { - if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} + if suf.as_str().is_empty() { + err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some")); + } ty = match &*suf.as_str() { "isize" => ast::LitIntType::Signed(ast::IntTy::Is), "i8" => ast::LitIntType::Signed(ast::IntTy::I8), @@ -556,17 +604,20 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a suf => { // i and u look like widths, so lets // give an error message along those lines - if looks_like_width_suffix(&['i', 'u'], suf) { - sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal", - &suf[1..])) - .help("valid widths are 8, 16, 32, 64 and 128") - .emit(); - } else { - sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf)) - .help("the suffix must be one of the integral types \ - (`u32`, `isize`, etc)") - .emit(); - } + err!(diag, |span, diag| { + if looks_like_width_suffix(&['i', 'u'], suf) { + let msg = format!("invalid width `{}` for integer literal", &suf[1..]); + diag.struct_span_err(span, &msg) + .help("valid widths are 8, 16, 32, 64 and 128") + .emit(); + } else { + let msg = format!("invalid suffix `{}` for numeric literal", suf); + diag.struct_span_err(span, &msg) + .help("the suffix must be one of the integral types \ + (`u32`, `isize`, etc)") + .emit(); + } + }); ty } @@ -576,7 +627,7 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \ string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix); - match u128::from_str_radix(s, base) { + Some(match u128::from_str_radix(s, base) { Ok(r) => ast::LitKind::Int(r, ty), Err(_) => { // small bases are lexed as if they were base 10, e.g, the string @@ -588,11 +639,11 @@ pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> a s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base)); if !already_errored { - sd.span_err(sp, "int literal is too large"); + err!(diag, |span, diag| diag.span_err(span, "int literal is too large")); } ast::LitKind::Int(0, ty) } - } + }) } #[cfg(test)] @@ -961,7 +1012,7 @@ mod tests { let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.name() == "doc") + let docs = item.attrs.iter().filter(|a| a.path == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 208db8391441c..0a97accead6a2 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -60,7 +60,6 @@ use util::ThinVec; use std::collections::HashSet; use std::{cmp, mem, slice}; use std::path::{Path, PathBuf}; -use std::rc::Rc; bitflags! { flags Restrictions: u8 { @@ -891,7 +890,7 @@ impl<'a> Parser<'a> { self.parse_seq_to_before_tokens(kets, SeqSep::none(), - |p| p.parse_token_tree(), + |p| Ok(p.parse_token_tree()), |mut e| handler.cancel(&mut e)); } @@ -1267,7 +1266,7 @@ impl<'a> Parser<'a> { break; } token::OpenDelim(token::Brace) => { - self.parse_token_tree()?; + self.parse_token_tree(); break; } _ => self.bump(), @@ -1643,44 +1642,15 @@ impl<'a> Parser<'a> { _ => { return self.unexpected_last(&self.token); } }, token::Literal(lit, suf) => { - let (suffix_illegal, out) = match lit { - token::Byte(i) => (true, LitKind::Byte(parse::byte_lit(&i.as_str()).0)), - token::Char(i) => (true, LitKind::Char(parse::char_lit(&i.as_str()).0)), - - // there are some valid suffixes for integer and - // float literals, so all the handling is done - // internally. - token::Integer(s) => { - let diag = &self.sess.span_diagnostic; - (false, parse::integer_lit(&s.as_str(), suf, diag, self.span)) - } - token::Float(s) => { - let diag = &self.sess.span_diagnostic; - (false, parse::float_lit(&s.as_str(), suf, diag, self.span)) - } - - token::Str_(s) => { - let s = Symbol::intern(&parse::str_lit(&s.as_str())); - (true, LitKind::Str(s, ast::StrStyle::Cooked)) - } - token::StrRaw(s, n) => { - let s = Symbol::intern(&parse::raw_str_lit(&s.as_str())); - (true, LitKind::Str(s, ast::StrStyle::Raw(n))) - } - token::ByteStr(i) => { - (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))) - } - token::ByteStrRaw(i, _) => { - (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))) - } - }; + let diag = Some((self.span, &self.sess.span_diagnostic)); + let (suffix_illegal, result) = parse::lit_token(lit, suf, diag); if suffix_illegal { let sp = self.span; self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf) } - out + result.unwrap() } _ => { return self.unexpected_last(&self.token); } }; @@ -2108,10 +2078,10 @@ impl<'a> Parser<'a> { fn expect_delimited_token_tree(&mut self) -> PResult<'a, (token::DelimToken, ThinTokenStream)> { match self.token { - token::OpenDelim(delim) => self.parse_token_tree().map(|tree| match tree { - TokenTree::Delimited(_, delimited) => (delim, delimited.stream().into()), + token::OpenDelim(delim) => match self.parse_token_tree() { + TokenTree::Delimited(_, delimited) => Ok((delim, delimited.stream().into())), _ => unreachable!(), - }), + }, _ => Err(self.fatal("expected open delimiter")), } } @@ -2656,24 +2626,23 @@ impl<'a> Parser<'a> { } /// parse a single token tree from the input. - pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> { + pub fn parse_token_tree(&mut self) -> TokenTree { match self.token { token::OpenDelim(..) => { let frame = mem::replace(&mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap()); self.span = frame.span; self.bump(); - return Ok(TokenTree::Delimited(frame.span, Delimited { + TokenTree::Delimited(frame.span, Delimited { delim: frame.delim, tts: frame.tree_cursor.original_stream().into(), - })); + }) }, token::CloseDelim(_) | token::Eof => unreachable!(), _ => { let token = mem::replace(&mut self.token, token::Underscore); - let res = Ok(TokenTree::Token(self.span, token)); self.bump(); - res + TokenTree::Token(self.prev_span, token) } } } @@ -2683,11 +2652,22 @@ impl<'a> Parser<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?); + tts.push(self.parse_token_tree()); } Ok(tts) } + pub fn parse_tokens(&mut self) -> TokenStream { + let mut result = Vec::new(); + loop { + match self.token { + token::Eof | token::CloseDelim(..) => break, + _ => result.push(self.parse_token_tree().into()), + } + } + TokenStream::concat(result) + } + /// Parse a prefix-unary-operator expr pub fn parse_prefix_expr(&mut self, already_parsed_attrs: Option>) @@ -5181,11 +5161,9 @@ impl<'a> Parser<'a> { let attr = ast::Attribute { id: attr::mk_attr_id(), style: ast::AttrStyle::Outer, - value: ast::MetaItem { - name: Symbol::intern("warn_directory_ownership"), - node: ast::MetaItemKind::Word, - span: syntax_pos::DUMMY_SP, - }, + path: ast::Path::from_ident(syntax_pos::DUMMY_SP, + Ident::from_str("warn_directory_ownership")), + tokens: TokenStream::empty(), is_sugared_doc: false, span: syntax_pos::DUMMY_SP, }; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 25601f2420e8a..75852629ce131 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -17,7 +17,7 @@ pub use self::Token::*; use ast::{self}; use ptr::P; use symbol::keywords; -use tokenstream; +use tokenstream::TokenTree; use std::fmt; use std::rc::Rc; @@ -349,7 +349,7 @@ pub enum Nonterminal { /// Stuff inside brackets for attributes NtMeta(ast::MetaItem), NtPath(ast::Path), - NtTT(tokenstream::TokenTree), + NtTT(TokenTree), // These are not exposed to macros, but are used by quasiquote. NtArm(ast::Arm), NtImplItem(ast::ImplItem), diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 83753f398a376..f042a18d61036 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -28,7 +28,7 @@ use ptr::P; use std_inject; use symbol::{Symbol, keywords}; use syntax_pos::DUMMY_SP; -use tokenstream::{self, TokenTree}; +use tokenstream::{self, TokenStream, TokenTree}; use std::ascii; use std::io::{self, Write, Read}; @@ -329,6 +329,10 @@ pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String { to_string(|s| s.print_tts(tts.iter().cloned().collect())) } +pub fn tokens_to_string(tokens: TokenStream) -> String { + to_string(|s| s.print_tts(tokens)) +} + pub fn stmt_to_string(stmt: &ast::Stmt) -> String { to_string(|s| s.print_stmt(stmt)) } @@ -750,7 +754,21 @@ pub trait PrintState<'a> { ast::AttrStyle::Inner => word(self.writer(), "#![")?, ast::AttrStyle::Outer => word(self.writer(), "#[")?, } - self.print_meta_item(&attr.meta())?; + if let Some(mi) = attr.meta() { + self.print_meta_item(&mi)? + } else { + for (i, segment) in attr.path.segments.iter().enumerate() { + if i > 0 { + word(self.writer(), "::")? + } + if segment.identifier.name != keywords::CrateRoot.name() && + segment.identifier.name != "$crate" { + word(self.writer(), &segment.identifier.name.as_str())?; + } + } + space(self.writer())?; + self.print_tts(attr.tokens.clone())?; + } word(self.writer(), "]") } } @@ -789,6 +807,45 @@ pub trait PrintState<'a> { self.end() } + /// This doesn't deserve to be called "pretty" printing, but it should be + /// meaning-preserving. A quick hack that might help would be to look at the + /// spans embedded in the TTs to decide where to put spaces and newlines. + /// But it'd be better to parse these according to the grammar of the + /// appropriate macro, transcribe back into the grammar we just parsed from, + /// and then pretty-print the resulting AST nodes (so, e.g., we print + /// expression arguments as expressions). It can be done! I think. + fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { + match tt { + TokenTree::Token(_, ref tk) => { + word(self.writer(), &token_to_string(tk))?; + match *tk { + parse::token::DocComment(..) => { + hardbreak(self.writer()) + } + _ => Ok(()) + } + } + TokenTree::Delimited(_, ref delimed) => { + word(self.writer(), &token_to_string(&delimed.open_token()))?; + space(self.writer())?; + self.print_tts(delimed.stream())?; + space(self.writer())?; + word(self.writer(), &token_to_string(&delimed.close_token())) + }, + } + } + + fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { + self.ibox(0)?; + for (i, tt) in tts.into_trees().enumerate() { + if i != 0 { + space(self.writer())?; + } + self.print_tt(tt)?; + } + self.end() + } + fn space_if_not_bol(&mut self) -> io::Result<()> { if !self.is_bol() { space(self.writer())?; } Ok(()) @@ -1458,45 +1515,6 @@ impl<'a> State<'a> { } } - /// This doesn't deserve to be called "pretty" printing, but it should be - /// meaning-preserving. A quick hack that might help would be to look at the - /// spans embedded in the TTs to decide where to put spaces and newlines. - /// But it'd be better to parse these according to the grammar of the - /// appropriate macro, transcribe back into the grammar we just parsed from, - /// and then pretty-print the resulting AST nodes (so, e.g., we print - /// expression arguments as expressions). It can be done! I think. - pub fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> { - match tt { - TokenTree::Token(_, ref tk) => { - word(&mut self.s, &token_to_string(tk))?; - match *tk { - parse::token::DocComment(..) => { - hardbreak(&mut self.s) - } - _ => Ok(()) - } - } - TokenTree::Delimited(_, ref delimed) => { - word(&mut self.s, &token_to_string(&delimed.open_token()))?; - space(&mut self.s)?; - self.print_tts(delimed.stream())?; - space(&mut self.s)?; - word(&mut self.s, &token_to_string(&delimed.close_token())) - }, - } - } - - pub fn print_tts(&mut self, tts: tokenstream::TokenStream) -> io::Result<()> { - self.ibox(0)?; - for (i, tt) in tts.into_trees().enumerate() { - if i != 0 { - space(&mut self.s)?; - } - self.print_tt(tt)?; - } - self.end() - } - pub fn print_variant(&mut self, v: &ast::Variant) -> io::Result<()> { self.head("")?; let generics = ast::Generics::default(); diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 2192d203cdc23..c541df9230a64 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -15,6 +15,7 @@ use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; use parse::ParseSess; use ptr::P; +use tokenstream::TokenStream; /// Craft a span that will be ignored by the stability lint's /// call to codemap's is_internal check. @@ -70,11 +71,8 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { style: ast::AttrStyle::Outer, - value: ast::MetaItem { - name: Symbol::intern("prelude_import"), - node: ast::MetaItemKind::Word, - span: span, - }, + path: ast::Path::from_ident(span, ast::Ident::from_str("prelude_import")), + tokens: TokenStream::empty(), id: attr::mk_attr_id(), is_sugared_doc: false, span: span, diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 2da442a1a53da..b75b3efda36c8 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -162,6 +162,12 @@ impl From for TokenStream { } } +impl From for TokenStream { + fn from(token: Token) -> TokenStream { + TokenTree::Token(DUMMY_SP, token).into() + } +} + impl> iter::FromIterator for TokenStream { fn from_iter>(iter: I) -> Self { TokenStream::concat(iter.into_iter().map(Into::into).collect::>()) @@ -360,7 +366,7 @@ impl PartialEq for ThinTokenStream { impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(&pprust::tts_to_string(&self.trees().collect::>())) + f.write_str(&pprust::tokens_to_string(self.clone())) } } diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index a7e2d82bb978f..b01ef65e5fe5e 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -23,9 +23,11 @@ struct MarkAttrs<'a>(&'a [ast::Name]); impl<'a> Visitor<'a> for MarkAttrs<'a> { fn visit_attribute(&mut self, attr: &Attribute) { - if self.0.contains(&attr.name()) { - mark_used(attr); - mark_known(attr); + if let Some(name) = attr.name() { + if self.0.contains(&name) { + mark_used(attr); + mark_known(attr); + } } } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index fe492bd7fc849..48e7ff0d24370 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -439,7 +439,7 @@ impl<'a> TraitDef<'a> { attrs.extend(item.attrs .iter() .filter(|a| { - match &*a.name().as_str() { + a.name().is_some() && match &*a.name().unwrap().as_str() { "allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true, _ => false, } diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 5adaf470f2374..2d815b3f1bb7d 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -248,7 +248,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { fn visit_item(&mut self, item: &'a ast::Item) { if let ast::ItemKind::MacroDef(..) = item.node { if self.is_proc_macro_crate && - item.attrs.iter().any(|attr| attr.name() == "macro_export") { + item.attrs.iter().any(|attr| attr.path == "macro_export") { let msg = "cannot export macro_rules! macros from a `proc-macro` crate type currently"; self.handler.span_err(item.span, msg); @@ -270,12 +270,12 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { for attr in &item.attrs { if is_proc_macro_attr(&attr) { if let Some(prev_attr) = found_attr { - let msg = if attr.name() == prev_attr.name() { + let msg = if attr.path == prev_attr.path { format!("Only one `#[{}]` attribute is allowed on any given function", - attr.name()) + attr.path) } else { format!("`#[{}]` and `#[{}]` attributes cannot both be applied \ - to the same function", attr.name(), prev_attr.name()) + to the same function", attr.path, prev_attr.path) }; self.handler.struct_span_err(attr.span(), &msg) @@ -299,7 +299,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { if !is_fn { let msg = format!("the `#[{}]` attribute may only be used on bare functions", - attr.name()); + attr.path); self.handler.span_err(attr.span(), &msg); return; @@ -311,7 +311,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> { if !self.is_proc_macro_crate { let msg = format!("the `#[{}]` attribute is only usable with crates of the \ - `proc-macro` crate type", attr.name()); + `proc-macro` crate type", attr.path); self.handler.span_err(attr.span(), &msg); return; diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index e89f48b4105d3..5ab786f40b933 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -9,6 +9,7 @@ // except according to those terms. #include "rustllvm.h" +#include "llvm/IR/DebugInfoMetadata.h" #include "llvm/IR/DiagnosticInfo.h" #include "llvm/IR/DiagnosticPrinter.h" #include "llvm/IR/Instructions.h" @@ -594,7 +595,7 @@ extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateStaticVariable( const char *LinkageName, LLVMRustMetadataRef File, unsigned LineNo, LLVMRustMetadataRef Ty, bool IsLocalToUnit, LLVMValueRef V, LLVMRustMetadataRef Decl = nullptr, uint32_t AlignInBits = 0) { - Constant *InitVal = cast(unwrap(V)); + llvm::GlobalVariable *InitVal = cast(unwrap(V)); #if LLVM_VERSION_GE(4, 0) llvm::DIExpression *InitExpr = nullptr; @@ -606,26 +607,21 @@ extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateStaticVariable( InitExpr = Builder->createConstantValueExpression( FPVal->getValueAPF().bitcastToAPInt().getZExtValue()); } -#endif -#if LLVM_VERSION_GE(4, 0) - return wrap(Builder->createGlobalVariableExpression( + llvm::DIGlobalVariableExpression *VarExpr = Builder->createGlobalVariableExpression( + unwrapDI(Context), Name, LinkageName, + unwrapDI(File), LineNo, unwrapDI(Ty), IsLocalToUnit, + InitExpr, unwrapDIPtr(Decl), AlignInBits); + + InitVal->setMetadata("dbg", VarExpr); + + return wrap(VarExpr); #else return wrap(Builder->createGlobalVariable( -#endif unwrapDI(Context), Name, LinkageName, unwrapDI(File), LineNo, unwrapDI(Ty), IsLocalToUnit, -#if LLVM_VERSION_GE(4, 0) - InitExpr, -#else - InitVal, -#endif - unwrapDIPtr(Decl) -#if LLVM_VERSION_GE(4, 0) - , - AlignInBits + InitVal, unwrapDIPtr(Decl))); #endif - )); } extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateVariable( diff --git a/src/test/compile-fail-fulldeps/proc-macro/proc-macro-attributes.rs b/src/test/compile-fail-fulldeps/proc-macro/proc-macro-attributes.rs index 4ad1cf79d61c6..df881bedec1bb 100644 --- a/src/test/compile-fail-fulldeps/proc-macro/proc-macro-attributes.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/proc-macro-attributes.rs @@ -20,6 +20,7 @@ extern crate derive_b; #[C] //~ ERROR: The attribute `C` is currently unknown to the compiler #[B(D)] #[B(E = "foo")] +#[B arbitrary tokens] //~ expected one of `(` or `=`, found `arbitrary` struct B; fn main() {} diff --git a/src/test/compile-fail/duplicate-check-macro-exports.rs b/src/test/compile-fail/duplicate-check-macro-exports.rs new file mode 100644 index 0000000000000..53d7e54ee5b7b --- /dev/null +++ b/src/test/compile-fail/duplicate-check-macro-exports.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(use_extern_macros)] + +pub use std::panic; //~ NOTE previous macro export here + +#[macro_export] +macro_rules! panic { () => {} } //~ ERROR a macro named `panic` has already been exported +//~| NOTE `panic` already exported + +fn main() {} diff --git a/src/test/compile-fail/feature-gate-rvalue_static_promotion.rs b/src/test/compile-fail/feature-gate-rvalue_static_promotion.rs new file mode 100644 index 0000000000000..f33d0a71481d2 --- /dev/null +++ b/src/test/compile-fail/feature-gate-rvalue_static_promotion.rs @@ -0,0 +1,15 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[allow(unused_variables)] +fn main() { + let x: &'static u32 = &42; //~ error: does not live long enough + let y: &'static Option = &None; //~ error: does not live long enough +} diff --git a/src/test/parse-fail/macro-attribute.rs b/src/test/compile-fail/macro-attribute.rs similarity index 94% rename from src/test/parse-fail/macro-attribute.rs rename to src/test/compile-fail/macro-attribute.rs index 18add7d011cba..52f867fe913b8 100644 --- a/src/test/parse-fail/macro-attribute.rs +++ b/src/test/compile-fail/macro-attribute.rs @@ -8,7 +8,5 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - #[doc = $not_there] //~ error: unexpected token: `$` fn main() { } diff --git a/src/test/compile-fail/macro-with-seps-err-msg.rs b/src/test/compile-fail/macro-with-seps-err-msg.rs index 6cc682bde997f..c28e22d58f9db 100644 --- a/src/test/compile-fail/macro-with-seps-err-msg.rs +++ b/src/test/compile-fail/macro-with-seps-err-msg.rs @@ -14,4 +14,5 @@ fn main() { globnar::brotz!(); //~ ERROR non-ident macro paths are experimental ::foo!(); //~ ERROR non-ident macro paths are experimental foo::!(); //~ ERROR type parameters are not allowed on macros + #[derive(foo::Bar)] struct T; //~ ERROR non-ident macro paths are experimental } diff --git a/src/test/compile-fail/malformed-derive-entry.rs b/src/test/compile-fail/malformed-derive-entry.rs index 62dbc21495a54..ac000628f2b04 100644 --- a/src/test/compile-fail/malformed-derive-entry.rs +++ b/src/test/compile-fail/malformed-derive-entry.rs @@ -9,11 +9,11 @@ // except according to those terms. #[derive(Copy(Bad))] -//~^ ERROR malformed `derive` entry +//~^ ERROR expected one of `)`, `,`, or `::`, found `(` struct Test1; #[derive(Copy="bad")] -//~^ ERROR malformed `derive` entry +//~^ ERROR expected one of `)`, `,`, or `::`, found `=` struct Test2; #[derive()] diff --git a/src/test/compile-fail/suffixed-literal-meta.rs b/src/test/compile-fail/suffixed-literal-meta.rs new file mode 100644 index 0000000000000..bf55b7bdcb1de --- /dev/null +++ b/src/test/compile-fail/suffixed-literal-meta.rs @@ -0,0 +1,25 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(attr_literals)] + +#[path = 1usize] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u8] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u16] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1u64] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1isize] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i8] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i16] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1i64] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes +#[path = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes +fn main() { } diff --git a/src/test/parse-fail/attr-bad-meta.rs b/src/test/parse-fail/attr-bad-meta.rs index 092adbf29e340..d57a813311b5a 100644 --- a/src/test/parse-fail/attr-bad-meta.rs +++ b/src/test/parse-fail/attr-bad-meta.rs @@ -8,10 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - -// error-pattern:expected one of `=` or `]` - // asterisk is bogus -#[attr*] +#[path*] //~ ERROR expected one of `(` or `=` mod m {} diff --git a/src/test/parse-fail/suffixed-literal-meta.rs b/src/test/parse-fail/suffixed-literal-meta.rs deleted file mode 100644 index 0e2840c69d364..0000000000000 --- a/src/test/parse-fail/suffixed-literal-meta.rs +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// compile-flags: -Z parse-only - -#[foo = 1usize] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u8] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u16] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1u64] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1isize] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i8] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i16] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1i64] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1.0f32] //~ ERROR: suffixed literals are not allowed in attributes -#[foo = 1.0f64] //~ ERROR: suffixed literals are not allowed in attributes -fn main() { } diff --git a/src/test/run-make/save-analysis-fail/foo.rs b/src/test/run-make/save-analysis-fail/foo.rs index e331f65abb7b3..a996aa4fad5a7 100644 --- a/src/test/run-make/save-analysis-fail/foo.rs +++ b/src/test/run-make/save-analysis-fail/foo.rs @@ -448,3 +448,8 @@ fn test_format_args() { print!("{0} + {} = {}", x, y); print!("x is {}, y is {1}, name is {n}", x, y, n = name); } + +extern { + static EXTERN_FOO: u8; + fn extern_foo(a: u8, b: i32) -> String; +} diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 2f94a440e72da..0433b95865ef8 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -49,9 +49,10 @@ fn cond(input: TokenStream) -> TokenStream { _ => false, }; conds.push(if is_else || input.peek().is_none() { - qquote!({ unquote rhs }) + quote!({ $rhs }) } else { - qquote!(if unquote(test.unwrap()) { unquote rhs } else) + let test = test.unwrap(); + quote!(if $test { $rhs } else) }); } diff --git a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs index 91075276a3020..9522592a5e9e6 100644 --- a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs +++ b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs @@ -29,6 +29,11 @@ pub fn plugin_registrar(reg: &mut Registry) { // This macro is not very interesting, but it does contain delimited tokens with // no content - `()` and `{}` - which has caused problems in the past. +// Also, it tests that we can escape `$` via `$$`. fn hello(_: TokenStream) -> TokenStream { - qquote!({ fn hello() {} hello(); }) + quote!({ + fn hello() {} + macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } } + m!(hello()); + }) } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index 612c199e8281a..0e37a7a5dcce2 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -34,21 +34,21 @@ pub fn plugin_registrar(reg: &mut Registry) { } fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { - qquote!(fn f1() -> bool { true }) + quote!(fn f1() -> bool { true }) } fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { - qquote!(unquote item) + quote!($item) } fn tru(_ts: TokenStream) -> TokenStream { - qquote!(true) + quote!(true) } fn ret_tru(_ts: TokenStream) -> TokenStream { - qquote!(return true;) + quote!(return true;) } fn identity(ts: TokenStream) -> TokenStream { - qquote!(unquote ts) + quote!($ts) } diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs index 57b6c3f0adb89..01b0ed802354c 100644 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -22,6 +22,6 @@ use syntax::parse::token; use syntax::tokenstream::TokenTree; fn main() { - let true_tok = TokenTree::Token(syntax_pos::DUMMY_SP, token::Ident(Ident::from_str("true"))); - assert!(qquote!(true).eq_unspanned(&true_tok.into())); + let true_tok = token::Ident(Ident::from_str("true")); + assert!(quote!(true).eq_unspanned(&true_tok.into())); } diff --git a/src/test/run-pass-fulldeps/macro-quote-empty-delims.rs b/src/test/run-pass-fulldeps/macro-quote-test.rs similarity index 100% rename from src/test/run-pass-fulldeps/macro-quote-empty-delims.rs rename to src/test/run-pass-fulldeps/macro-quote-test.rs diff --git a/src/test/run-pass-fulldeps/proc-macro/attr-args.rs b/src/test/run-pass-fulldeps/proc-macro/attr-args.rs index d28d75d81a2fb..8a9fdd7536770 100644 --- a/src/test/run-pass-fulldeps/proc-macro/attr-args.rs +++ b/src/test/run-pass-fulldeps/proc-macro/attr-args.rs @@ -19,6 +19,6 @@ use attr_args::attr_with_args; #[attr_with_args(text = "Hello, world!")] fn foo() {} -fn main() { - assert_eq!(foo(), "Hello, world!"); -} +#[::attr_args::identity + fn main() { assert_eq!(foo(), "Hello, world!"); }] +struct Dummy; diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs index 6e1eb395a0a19..989c77f1089cf 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs @@ -30,3 +30,8 @@ pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream { fn foo() -> &'static str { "Hello, world!" } "#.parse().unwrap() } + +#[proc_macro_attribute] +pub fn identity(attr_args: TokenStream, _: TokenStream) -> TokenStream { + attr_args +} diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-b.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-b.rs index bf793534d50c9..7b521f2b9138a 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-b.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-b.rs @@ -19,7 +19,7 @@ use proc_macro::TokenStream; #[proc_macro_derive(B, attributes(B, C))] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert!(input.contains("#[B]")); + assert!(input.contains("#[B arbitrary tokens]")); assert!(input.contains("struct B {")); assert!(input.contains("#[C]")); "".parse().unwrap() diff --git a/src/test/run-pass-fulldeps/proc-macro/derive-b.rs b/src/test/run-pass-fulldeps/proc-macro/derive-b.rs index f1e1626ddf8ca..995dc65729a50 100644 --- a/src/test/run-pass-fulldeps/proc-macro/derive-b.rs +++ b/src/test/run-pass-fulldeps/proc-macro/derive-b.rs @@ -11,11 +11,12 @@ // aux-build:derive-b.rs // ignore-stage1 -#[macro_use] +#![feature(proc_macro)] + extern crate derive_b; -#[derive(Debug, PartialEq, B, Eq, Copy, Clone)] -#[B] +#[derive(Debug, PartialEq, derive_b::B, Eq, Copy, Clone)] +#[cfg_attr(all(), B arbitrary tokens)] struct B { #[C] a: u64 diff --git a/src/test/run-pass/auxiliary/issue_40469.rs b/src/test/run-pass/auxiliary/issue_40469.rs new file mode 100644 index 0000000000000..4970bba431a84 --- /dev/null +++ b/src/test/run-pass/auxiliary/issue_40469.rs @@ -0,0 +1,11 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { () => { $crate::main(); } } diff --git a/src/test/run-pass/ifmt.rs b/src/test/run-pass/ifmt.rs index 2a7a593d26800..cef2f879f9cd7 100644 --- a/src/test/run-pass/ifmt.rs +++ b/src/test/run-pass/ifmt.rs @@ -160,6 +160,34 @@ pub fn main() { t!(format!("{:?}", -0.0), "-0"); t!(format!("{:?}", 0.0), "0"); + // sign aware zero padding + t!(format!("{:<3}", 1), "1 "); + t!(format!("{:>3}", 1), " 1"); + t!(format!("{:^3}", 1), " 1 "); + t!(format!("{:03}", 1), "001"); + t!(format!("{:<03}", 1), "001"); + t!(format!("{:>03}", 1), "001"); + t!(format!("{:^03}", 1), "001"); + t!(format!("{:+03}", 1), "+01"); + t!(format!("{:<+03}", 1), "+01"); + t!(format!("{:>+03}", 1), "+01"); + t!(format!("{:^+03}", 1), "+01"); + t!(format!("{:#05x}", 1), "0x001"); + t!(format!("{:<#05x}", 1), "0x001"); + t!(format!("{:>#05x}", 1), "0x001"); + t!(format!("{:^#05x}", 1), "0x001"); + t!(format!("{:05}", 1.2), "001.2"); + t!(format!("{:<05}", 1.2), "001.2"); + t!(format!("{:>05}", 1.2), "001.2"); + t!(format!("{:^05}", 1.2), "001.2"); + t!(format!("{:05}", -1.2), "-01.2"); + t!(format!("{:<05}", -1.2), "-01.2"); + t!(format!("{:>05}", -1.2), "-01.2"); + t!(format!("{:^05}", -1.2), "-01.2"); + t!(format!("{:+05}", 1.2), "+01.2"); + t!(format!("{:<+05}", 1.2), "+01.2"); + t!(format!("{:>+05}", 1.2), "+01.2"); + t!(format!("{:^+05}", 1.2), "+01.2"); // Ergonomic format_args! t!(format!("{0:x} {0:X}", 15), "f F"); diff --git a/src/test/run-pass/issue-31260.rs b/src/test/run-pass/issue-31260.rs new file mode 100644 index 0000000000000..e771fc7464d00 --- /dev/null +++ b/src/test/run-pass/issue-31260.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Struct { + pub field: K, +} + +// Partial fix for #31260, doesn't work without {...}. +static STRUCT: Struct<&'static [u8]> = Struct { + field: {&[1]} +}; + +fn main() {} diff --git a/src/test/run-pass/issue-34571.rs b/src/test/run-pass/issue-34571.rs new file mode 100644 index 0000000000000..7d80415657655 --- /dev/null +++ b/src/test/run-pass/issue-34571.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[repr(u8)] +enum Foo { + Foo(u8), +} + +fn main() { + match Foo::Foo(1) { + _ => () + } +} diff --git a/src/test/run-pass/issue-40408.rs b/src/test/run-pass/issue-40408.rs new file mode 100644 index 0000000000000..a73dc1966b4be --- /dev/null +++ b/src/test/run-pass/issue-40408.rs @@ -0,0 +1,16 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + println!("{}", 0E+10); + println!("{}", 0e+10); + println!("{}", 00e+10); + println!("{}", 00E+10); +} diff --git a/src/test/run-pass/issue-40469.rs b/src/test/run-pass/issue-40469.rs new file mode 100644 index 0000000000000..30055e532cd45 --- /dev/null +++ b/src/test/run-pass/issue-40469.rs @@ -0,0 +1,18 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-pretty issue #37195 + +#![allow(dead_code)] + +include!("auxiliary/issue_40469.rs"); +fn f() { m!(); } + +fn main() {} diff --git a/src/test/run-pass/macro-nested_definition_issue-31946.rs b/src/test/run-pass/macro-nested_definition_issue-31946.rs new file mode 100644 index 0000000000000..84ff2dc4d0d61 --- /dev/null +++ b/src/test/run-pass/macro-nested_definition_issue-31946.rs @@ -0,0 +1,18 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + println!("{}", { + macro_rules! foo { + ($name:expr) => { concat!("hello ", $name) } + } + foo!("rust") + }); +} diff --git a/src/test/run-pass/optimization-fuel-0.rs b/src/test/run-pass/optimization-fuel-0.rs new file mode 100644 index 0000000000000..3832c040108f8 --- /dev/null +++ b/src/test/run-pass/optimization-fuel-0.rs @@ -0,0 +1,24 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name="foo"] + +use std::mem::size_of; + +// compile-flags: -Z fuel=foo=0 + +struct S1(u8, u16, u8); +struct S2(u8, u16, u8); + +fn main() { + assert_eq!(size_of::(), 6); + assert_eq!(size_of::(), 6); +} + diff --git a/src/test/run-pass/optimization-fuel-1.rs b/src/test/run-pass/optimization-fuel-1.rs new file mode 100644 index 0000000000000..5f294e26aa53e --- /dev/null +++ b/src/test/run-pass/optimization-fuel-1.rs @@ -0,0 +1,26 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name="foo"] + +use std::mem::size_of; + +// compile-flags: -Z fuel=foo=1 + +struct S1(u8, u16, u8); +struct S2(u8, u16, u8); + +fn main() { + let optimized = (size_of::() == 4) as usize + +(size_of::() == 4) as usize; + assert_eq!(optimized, 1); +} + + diff --git a/src/test/run-pass/rvalue-static-promotion.rs b/src/test/run-pass/rvalue-static-promotion.rs new file mode 100644 index 0000000000000..30643cfc3eb7d --- /dev/null +++ b/src/test/run-pass/rvalue-static-promotion.rs @@ -0,0 +1,17 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rvalue_static_promotion)] + +#[allow(unused_variables)] +fn main() { + let x: &'static u32 = &42; + let y: &'static Option = &None; +} diff --git a/src/test/run-pass/type-sizes.rs b/src/test/run-pass/type-sizes.rs index bbb01eaaf46b9..9d1a3500a582a 100644 --- a/src/test/run-pass/type-sizes.rs +++ b/src/test/run-pass/type-sizes.rs @@ -31,6 +31,17 @@ enum e3 { a([u16; 0], u8), b } +struct ReorderedStruct { + a: u8, + b: u64, + c: u8 +} + +enum ReorderedEnum { + A(u8, u64, u8), + B(u8, u64, u8), +} + pub fn main() { assert_eq!(size_of::(), 1 as usize); assert_eq!(size_of::(), 4 as usize); @@ -54,4 +65,6 @@ pub fn main() { assert_eq!(size_of::(), 8 as usize); assert_eq!(size_of::(), 8 as usize); assert_eq!(size_of::(), 4 as usize); + assert_eq!(size_of::(), 16); + assert_eq!(size_of::(), 16); } diff --git a/src/test/rustdoc/const.rs b/src/test/rustdoc/const.rs new file mode 100644 index 0000000000000..380feb941d6fe --- /dev/null +++ b/src/test/rustdoc/const.rs @@ -0,0 +1,22 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type="lib"] + +#![feature(const_fn)] + +pub struct Foo; + +impl Foo { + // @has const/struct.Foo.html '//*[@id="new.v"]//code' 'const unsafe fn new' + pub const unsafe fn new() -> Foo { + Foo + } +} diff --git a/src/test/ui/did_you_mean/issue-39544.stderr b/src/test/ui/did_you_mean/issue-39544.stderr index c0088f39ad3e1..7f124e6d34d35 100644 --- a/src/test/ui/did_you_mean/issue-39544.stderr +++ b/src/test/ui/did_you_mean/issue-39544.stderr @@ -1,8 +1,10 @@ error: cannot borrow immutable field `z.x` as mutable --> $DIR/issue-39544.rs:21:18 | +20 | let z = Z { x: X::Y }; + | - consider changing this to `mut z` 21 | let _ = &mut z.x; - | ^^^ + | ^^^ cannot mutably borrow immutable field error: aborting due to previous error diff --git a/src/test/ui/print-fuel/print-fuel.rs b/src/test/ui/print-fuel/print-fuel.rs new file mode 100644 index 0000000000000..0d9e243763f78 --- /dev/null +++ b/src/test/ui/print-fuel/print-fuel.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name="foo"] +#![allow(dead_code)] + +// compile-flags: -Z print-fuel=foo + +struct S1(u8, u16, u8); +struct S2(u8, u16, u8); +struct S3(u8, u16, u8); + +fn main() { +} diff --git a/src/test/ui/print-fuel/print-fuel.stdout b/src/test/ui/print-fuel/print-fuel.stdout new file mode 100644 index 0000000000000..cc88cc077bb21 --- /dev/null +++ b/src/test/ui/print-fuel/print-fuel.stdout @@ -0,0 +1 @@ +Fuel used by foo: 3 diff --git a/src/test/ui/print_type_sizes/nullable.stdout b/src/test/ui/print_type_sizes/nullable.stdout index dd999c4a5e4c7..830678f174f88 100644 --- a/src/test/ui/print_type_sizes/nullable.stdout +++ b/src/test/ui/print_type_sizes/nullable.stdout @@ -1,25 +1,22 @@ -print-type-size type: `IndirectNonZero`: 20 bytes, alignment: 4 bytes -print-type-size field `.pre`: 1 bytes -print-type-size padding: 3 bytes -print-type-size field `.nested`: 12 bytes, alignment: 4 bytes +print-type-size type: `IndirectNonZero`: 12 bytes, alignment: 4 bytes +print-type-size field `.nested`: 8 bytes print-type-size field `.post`: 2 bytes -print-type-size end padding: 2 bytes -print-type-size type: `MyOption>`: 20 bytes, alignment: 4 bytes -print-type-size variant `Some`: 20 bytes -print-type-size field `.0`: 20 bytes -print-type-size type: `EmbeddedDiscr`: 12 bytes, alignment: 4 bytes -print-type-size variant `Record`: 10 bytes -print-type-size field `.pre`: 1 bytes -print-type-size padding: 3 bytes -print-type-size field `.val`: 4 bytes, alignment: 4 bytes -print-type-size field `.post`: 2 bytes -print-type-size end padding: 2 bytes -print-type-size type: `NestedNonZero`: 12 bytes, alignment: 4 bytes print-type-size field `.pre`: 1 bytes -print-type-size padding: 3 bytes -print-type-size field `.val`: 4 bytes, alignment: 4 bytes +print-type-size end padding: 1 bytes +print-type-size type: `MyOption>`: 12 bytes, alignment: 4 bytes +print-type-size variant `Some`: 12 bytes +print-type-size field `.0`: 12 bytes +print-type-size type: `EmbeddedDiscr`: 8 bytes, alignment: 4 bytes +print-type-size variant `Record`: 7 bytes +print-type-size field `.val`: 4 bytes +print-type-size field `.post`: 2 bytes +print-type-size field `.pre`: 1 bytes +print-type-size end padding: 1 bytes +print-type-size type: `NestedNonZero`: 8 bytes, alignment: 4 bytes +print-type-size field `.val`: 4 bytes print-type-size field `.post`: 2 bytes -print-type-size end padding: 2 bytes +print-type-size field `.pre`: 1 bytes +print-type-size end padding: 1 bytes print-type-size type: `MyOption>`: 4 bytes, alignment: 4 bytes print-type-size variant `Some`: 4 bytes print-type-size field `.0`: 4 bytes diff --git a/src/test/ui/print_type_sizes/packed.stdout b/src/test/ui/print_type_sizes/packed.stdout index 1278a7d7c92c6..83fd333c9c7fc 100644 --- a/src/test/ui/print_type_sizes/packed.stdout +++ b/src/test/ui/print_type_sizes/packed.stdout @@ -1,13 +1,11 @@ -print-type-size type: `Padded`: 16 bytes, alignment: 4 bytes +print-type-size type: `Padded`: 12 bytes, alignment: 4 bytes +print-type-size field `.g`: 4 bytes +print-type-size field `.h`: 2 bytes print-type-size field `.a`: 1 bytes print-type-size field `.b`: 1 bytes -print-type-size padding: 2 bytes -print-type-size field `.g`: 4 bytes, alignment: 4 bytes print-type-size field `.c`: 1 bytes -print-type-size padding: 1 bytes -print-type-size field `.h`: 2 bytes, alignment: 2 bytes print-type-size field `.d`: 1 bytes -print-type-size end padding: 3 bytes +print-type-size end padding: 2 bytes print-type-size type: `Packed`: 10 bytes, alignment: 1 bytes print-type-size field `.a`: 1 bytes print-type-size field `.b`: 1 bytes diff --git a/src/test/ui/print_type_sizes/padding.stdout b/src/test/ui/print_type_sizes/padding.stdout index bb95f790bd9e4..0eaff7118b35c 100644 --- a/src/test/ui/print_type_sizes/padding.stdout +++ b/src/test/ui/print_type_sizes/padding.stdout @@ -1,10 +1,12 @@ print-type-size type: `E1`: 12 bytes, alignment: 4 bytes -print-type-size discriminant: 4 bytes -print-type-size variant `A`: 5 bytes -print-type-size field `.0`: 4 bytes +print-type-size discriminant: 1 bytes +print-type-size variant `A`: 7 bytes print-type-size field `.1`: 1 bytes -print-type-size variant `B`: 8 bytes -print-type-size field `.0`: 8 bytes +print-type-size padding: 2 bytes +print-type-size field `.0`: 4 bytes, alignment: 4 bytes +print-type-size variant `B`: 11 bytes +print-type-size padding: 3 bytes +print-type-size field `.0`: 8 bytes, alignment: 4 bytes print-type-size type: `E2`: 12 bytes, alignment: 4 bytes print-type-size discriminant: 1 bytes print-type-size variant `A`: 7 bytes @@ -15,7 +17,7 @@ print-type-size variant `B`: 11 bytes print-type-size padding: 3 bytes print-type-size field `.0`: 8 bytes, alignment: 4 bytes print-type-size type: `S`: 8 bytes, alignment: 4 bytes +print-type-size field `.g`: 4 bytes print-type-size field `.a`: 1 bytes print-type-size field `.b`: 1 bytes -print-type-size padding: 2 bytes -print-type-size field `.g`: 4 bytes, alignment: 4 bytes +print-type-size end padding: 2 bytes diff --git a/src/test/ui/span/E0536.stderr b/src/test/ui/span/E0536.stderr index c33b89953e274..b2da0c6a296d8 100644 --- a/src/test/ui/span/E0536.stderr +++ b/src/test/ui/span/E0536.stderr @@ -2,7 +2,7 @@ error[E0536]: expected 1 cfg-pattern --> $DIR/E0536.rs:11:7 | 11 | #[cfg(not())] //~ ERROR E0536 - | ^^^^^ + | ^^^ error: aborting due to previous error diff --git a/src/test/ui/span/E0537.stderr b/src/test/ui/span/E0537.stderr index 9d66ddbaae317..29873943f444d 100644 --- a/src/test/ui/span/E0537.stderr +++ b/src/test/ui/span/E0537.stderr @@ -2,7 +2,7 @@ error[E0537]: invalid predicate `unknown` --> $DIR/E0537.rs:11:7 | 11 | #[cfg(unknown())] //~ ERROR E0537 - | ^^^^^^^^^ + | ^^^^^^^ error: aborting due to previous error