Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove unnecessary explicit conversions from *mut T to *const T #21375

Merged
merged 1 commit into from
Jan 21, 2015
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/doc/trpl/unsafe.md
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ impl<T: Send> Drop for Unique<T> {
// Copy the object out from the pointer onto the stack,
// where it is covered by normal Rust destructor semantics
// and cleans itself up, if necessary
ptr::read(self.ptr as *const T);
ptr::read(self.ptr);
// clean-up our allocation
free(self.ptr as *mut c_void)
Expand Down
2 changes: 1 addition & 1 deletion src/liballoc/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -298,7 +298,7 @@ mod imp {
libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
} else {
let new_ptr = allocate(size, align);
ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
ptr::copy_memory(new_ptr, ptr, cmp::min(size, old_size));
deallocate(ptr, old_size, align);
new_ptr
}
Expand Down
6 changes: 3 additions & 3 deletions src/libcollections/btree/node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -326,11 +326,11 @@ impl<K, V> Node<K, V> {
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
unsafe {(
mem::transmute(raw::Slice {
data: self.keys.0 as *const K,
data: self.keys.0,
len: self.len()
}),
mem::transmute(raw::Slice {
data: self.vals.0 as *const V,
data: self.vals.0,
len: self.len()
})
)}
Expand All @@ -349,7 +349,7 @@ impl<K, V> Node<K, V> {
} else {
unsafe {
mem::transmute(raw::Slice {
data: self.edges.0 as *const Node<K, V>,
data: self.edges.0,
len: self.len() + 1
})
}
Expand Down
6 changes: 3 additions & 3 deletions src/libcollections/ring_buf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -88,19 +88,19 @@ impl<T> RingBuf<T> {
/// Turn ptr into a slice
#[inline]
unsafe fn buffer_as_slice(&self) -> &[T] {
mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
}

/// Turn ptr into a mut slice
#[inline]
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
mem::transmute(RawSlice { data: self.ptr as *const T, len: self.cap })
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
}

/// Moves an element out of the buffer
#[inline]
unsafe fn buffer_read(&mut self, off: uint) -> T {
ptr::read(self.ptr.offset(off as int) as *const T)
ptr::read(self.ptr.offset(off as int))
}

/// Writes an element into the buffer, moving it.
Expand Down
2 changes: 1 addition & 1 deletion src/libcollections/slice.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1222,7 +1222,7 @@ fn insertion_sort<T, F>(v: &mut [T], mut compare: F) where F: FnMut(&T, &T) -> O
&*buf_v.offset(j),
(i - j) as uint);
ptr::copy_nonoverlapping_memory(buf_v.offset(j),
&tmp as *const T,
&tmp,
1);
mem::forget(tmp);
}
Expand Down
12 changes: 6 additions & 6 deletions src/libcollections/vec.rs
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ impl<T> Vec<T> {
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
mem::transmute(RawSlice {
data: *self.ptr as *const T,
data: *self.ptr,
len: self.len,
})
}
Expand Down Expand Up @@ -574,7 +574,7 @@ impl<T> Vec<T> {
let ptr = self.as_mut_ptr().offset(index as int);
// copy it out, unsafely having a copy of the value on
// the stack and in the vector at the same time.
ret = ptr::read(ptr as *const T);
ret = ptr::read(ptr);

// Shift everything down to fill in that spot.
ptr::copy_memory(ptr, &*ptr.offset(1), len - index - 1);
Expand Down Expand Up @@ -842,7 +842,7 @@ impl<T> Vec<T> {
// | |
// end_u end_t

let t = ptr::read(pv.start_t as *const T);
let t = ptr::read(pv.start_t);
// start_u start_t
// | |
// +-+-+-+-+-+-+-+-+-+
Expand Down Expand Up @@ -1414,7 +1414,7 @@ impl<T> AsSlice<T> for Vec<T> {
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
mem::transmute(RawSlice {
data: *self.ptr as *const T,
data: *self.ptr,
len: self.len
})
}
Expand Down Expand Up @@ -1777,11 +1777,11 @@ impl<T,U> Drop for PartialVecNonZeroSized<T,U> {

// We have instances of `U`s and `T`s in `vec`. Destruct them.
while self.start_u != self.end_u {
let _ = ptr::read(self.start_u as *const U); // Run a `U` destructor.
let _ = ptr::read(self.start_u); // Run a `U` destructor.
self.start_u = self.start_u.offset(1);
}
while self.start_t != self.end_t {
let _ = ptr::read(self.start_t as *const T); // Run a `T` destructor.
let _ = ptr::read(self.start_t); // Run a `T` destructor.
self.start_t = self.start_t.offset(1);
}
// After this destructor ran, the destructor of `vec` will run,
Expand Down
12 changes: 6 additions & 6 deletions src/libcore/atomic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,7 @@ impl AtomicBool {
#[inline]
#[stable]
pub fn load(&self, order: Ordering) -> bool {
unsafe { atomic_load(self.v.get() as *const usize, order) > 0 }
unsafe { atomic_load(self.v.get(), order) > 0 }
}

/// Stores a value into the bool.
Expand Down Expand Up @@ -438,7 +438,7 @@ impl AtomicIsize {
/// ```
#[inline]
pub fn load(&self, order: Ordering) -> isize {
unsafe { atomic_load(self.v.get() as *const isize, order) }
unsafe { atomic_load(self.v.get(), order) }
}

/// Stores a value into the isize.
Expand Down Expand Up @@ -615,7 +615,7 @@ impl AtomicUsize {
/// ```
#[inline]
pub fn load(&self, order: Ordering) -> usize {
unsafe { atomic_load(self.v.get() as *const usize, order) }
unsafe { atomic_load(self.v.get(), order) }
}

/// Stores a value into the usize.
Expand Down Expand Up @@ -796,7 +796,7 @@ impl<T> AtomicPtr<T> {
#[stable]
pub fn load(&self, order: Ordering) -> *mut T {
unsafe {
atomic_load(self.p.get() as *const *mut T, order) as *mut T
atomic_load(self.p.get(), order) as *mut T
}
}

Expand Down Expand Up @@ -1070,7 +1070,7 @@ impl AtomicInt {

#[inline]
pub fn load(&self, order: Ordering) -> int {
unsafe { atomic_load(self.v.get() as *const int, order) }
unsafe { atomic_load(self.v.get(), order) }
}

#[inline]
Expand Down Expand Up @@ -1123,7 +1123,7 @@ impl AtomicUint {

#[inline]
pub fn load(&self, order: Ordering) -> uint {
unsafe { atomic_load(self.v.get() as *const uint, order) }
unsafe { atomic_load(self.v.get(), order) }
}

#[inline]
Expand Down
2 changes: 1 addition & 1 deletion src/libcore/ptr.rs
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ impl<T> PtrExt for *mut T {
#[inline]
#[stable]
unsafe fn offset(self, count: int) -> *mut T {
intrinsics::offset(self as *const T, count) as *mut T
intrinsics::offset(self, count) as *mut T
}

#[inline]
Expand Down
4 changes: 2 additions & 2 deletions src/libcore/slice.rs
Original file line number Diff line number Diff line change
Expand Up @@ -741,7 +741,7 @@ macro_rules! make_slice {
diff / mem::size_of::<$t>()
};
unsafe {
transmute::<_, $result>(RawSlice { data: $start as *const T, len: len })
transmute::<_, $result>(RawSlice { data: $start, len: len })
}
}}
}
Expand Down Expand Up @@ -1409,7 +1409,7 @@ pub unsafe fn from_raw_buf<'a, T>(p: &'a *const T, len: uint) -> &'a [T] {
#[inline]
#[unstable = "should be renamed to from_raw_parts_mut"]
pub unsafe fn from_raw_mut_buf<'a, T>(p: &'a *mut T, len: uint) -> &'a mut [T] {
transmute(RawSlice { data: *p as *const T, len: len })
transmute(RawSlice { data: *p, len: len })
}

//
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_trans/trans/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub struct Builder<'a, 'tcx: 'a> {
// lot more efficient) than doing str::as_c_str("", ...) every time.
pub fn noname() -> *const c_char {
static CNULL: c_char = 0;
&CNULL as *const c_char
&CNULL
}

impl<'a, 'tcx> Builder<'a, 'tcx> {
Expand Down
4 changes: 2 additions & 2 deletions src/librustdoc/flock.rs
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ mod imp {
l_sysid: 0,
};
let ret = unsafe {
libc::fcntl(fd, os::F_SETLKW, &flock as *const os::flock)
libc::fcntl(fd, os::F_SETLKW, &flock)
};
if ret == -1 {
unsafe { libc::close(fd); }
Expand All @@ -147,7 +147,7 @@ mod imp {
l_sysid: 0,
};
unsafe {
libc::fcntl(self.fd, os::F_SETLK, &flock as *const os::flock);
libc::fcntl(self.fd, os::F_SETLK, &flock);
libc::close(self.fd);
}
}
Expand Down
23 changes: 10 additions & 13 deletions src/libstd/collections/hash/table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -395,9 +395,6 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
/// This works similarly to `put`, building an `EmptyBucket` out of the
/// taken bucket.
pub fn take(mut self) -> (EmptyBucket<K, V, M>, K, V) {
let key = self.raw.key as *const K;
let val = self.raw.val as *const V;

self.table.size -= 1;

unsafe {
Expand All @@ -408,8 +405,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>> + DerefMut> FullBucket<K, V, M> {
idx: self.idx,
table: self.table
},
ptr::read(key),
ptr::read(val)
ptr::read(self.raw.key),
ptr::read(self.raw.val)
)
}
}
Expand Down Expand Up @@ -477,8 +474,8 @@ impl<K, V, M: Deref<Target=RawTable<K, V>>> GapThenFull<K, V, M> {
pub fn shift(mut self) -> Option<GapThenFull<K, V, M>> {
unsafe {
*self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET);
copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key as *const K, 1);
copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val as *const V, 1);
copy_nonoverlapping_memory(self.gap.raw.key, self.full.raw.key, 1);
copy_nonoverlapping_memory(self.gap.raw.val, self.full.raw.val, 1);
}

let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full;
Expand Down Expand Up @@ -781,8 +778,8 @@ impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
if *self.raw.hash != EMPTY_BUCKET {
self.elems_left -= 1;
return Some((
ptr::read(self.raw.key as *const K),
ptr::read(self.raw.val as *const V)
ptr::read(self.raw.key),
ptr::read(self.raw.val)
));
}
}
Expand Down Expand Up @@ -878,8 +875,8 @@ impl<K, V> Iterator for IntoIter<K, V> {
SafeHash {
hash: *bucket.hash,
},
ptr::read(bucket.key as *const K),
ptr::read(bucket.val as *const V)
ptr::read(bucket.key),
ptr::read(bucket.val)
)
}
})
Expand All @@ -906,8 +903,8 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> {
SafeHash {
hash: ptr::replace(bucket.hash, EMPTY_BUCKET),
},
ptr::read(bucket.key as *const K),
ptr::read(bucket.val as *const V)
ptr::read(bucket.key),
ptr::read(bucket.val)
)
}
})
Expand Down
2 changes: 1 addition & 1 deletion src/libstd/sys/unix/backtrace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void) -> IoResult<()> {
}

let mut info: Dl_info = unsafe { intrinsics::init() };
if unsafe { dladdr(addr as *const libc::c_void, &mut info) == 0 } {
if unsafe { dladdr(addr, &mut info) == 0 } {
output(w, idx,addr, None)
} else {
output(w, idx, addr, Some(unsafe {
Expand Down
2 changes: 1 addition & 1 deletion src/libstd/thread_local/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,7 @@ mod imp {
// destructor as running for this thread so calls to `get` will return
// `None`.
*(*ptr).dtor_running.get() = true;
ptr::read((*ptr).inner.get() as *const T);
ptr::read((*ptr).inner.get());
}
}

Expand Down