From a457d67b6e0e9643d8140cb24555669ad778b1ed Mon Sep 17 00:00:00 2001 From: Matt Brubeck Date: Thu, 9 Mar 2017 17:53:01 -0800 Subject: [PATCH] Specialize Vec::from_elem to use calloc or memset Fixes #38723. --- src/doc/unstable-book/src/allocator.md | 5 ++++ src/liballoc/heap.rs | 32 +++++++++++++++++++++++ src/liballoc/raw_vec.rs | 16 +++++++++++- src/liballoc_jemalloc/lib.rs | 21 +++++++++++++++ src/liballoc_system/lib.rs | 36 +++++++++++++++++++++++--- src/libcollections/vec.rs | 35 ++++++++++++++++++++++--- 6 files changed, 137 insertions(+), 8 deletions(-) diff --git a/src/doc/unstable-book/src/allocator.md b/src/doc/unstable-book/src/allocator.md index 7261641698f48..cfcf8e22d7088 100644 --- a/src/doc/unstable-book/src/allocator.md +++ b/src/doc/unstable-book/src/allocator.md @@ -51,6 +51,11 @@ pub extern fn __rust_allocate(size: usize, _align: usize) -> *mut u8 { unsafe { libc::malloc(size as libc::size_t) as *mut u8 } } +#[no_mangle] +pub extern fn __rust_allocate_zeroed(size: usize, _align: usize) -> *mut u8 { + unsafe { libc::calloc(size as libc::size_t, 1) as *mut u8 } +} + #[no_mangle] pub extern fn __rust_deallocate(ptr: *mut u8, _old_size: usize, _align: usize) { unsafe { libc::free(ptr as *mut libc::c_void) } diff --git a/src/liballoc/heap.rs b/src/liballoc/heap.rs index 51e6f2f8bd7a6..9ec3535881341 100644 --- a/src/liballoc/heap.rs +++ b/src/liballoc/heap.rs @@ -23,6 +23,7 @@ use core::intrinsics::{min_align_of_val, size_of_val}; extern "C" { #[allocator] fn __rust_allocate(size: usize, align: usize) -> *mut u8; + fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8; fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize); fn __rust_reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8; fn __rust_reallocate_inplace(ptr: *mut u8, @@ -59,6 +60,20 @@ pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { __rust_allocate(size, align) } +/// Return a pointer to `size` bytes of memory aligned to `align` and +/// initialized to zeroes. +/// +/// On failure, return a null pointer. +/// +/// Behavior is undefined if the requested size is 0 or the alignment is not a +/// power of 2. The alignment must be no larger than the largest supported page +/// size on the platform. +#[inline] +pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + check_size_and_alignment(size, align); + __rust_allocate_zeroed(size, align) +} + /// Resize the allocation referenced by `ptr` to `size` bytes. /// /// On failure, return a null pointer and leave the original allocation intact. @@ -162,6 +177,23 @@ mod tests { use boxed::Box; use heap; + #[test] + fn allocate_zeroed() { + unsafe { + let size = 1024; + let mut ptr = heap::allocate_zeroed(size, 1); + if ptr.is_null() { + ::oom() + } + let end = ptr.offset(size as isize); + while ptr < end { + assert_eq!(*ptr, 0); + ptr = ptr.offset(1); + } + heap::deallocate(ptr, size, 1); + } + } + #[test] fn basic_reallocate_inplace_noop() { unsafe { diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 357a2724e0020..54da527976715 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -82,6 +82,16 @@ impl RawVec { /// /// Aborts on OOM pub fn with_capacity(cap: usize) -> Self { + RawVec::allocate(cap, false) + } + + /// Like `with_capacity` but guarantees the buffer is zeroed. + pub fn with_capacity_zeroed(cap: usize) -> Self { + RawVec::allocate(cap, true) + } + + #[inline] + fn allocate(cap: usize, zeroed: bool) -> Self { unsafe { let elem_size = mem::size_of::(); @@ -93,7 +103,11 @@ impl RawVec { heap::EMPTY as *mut u8 } else { let align = mem::align_of::(); - let ptr = heap::allocate(alloc_size, align); + let ptr = if zeroed { + heap::allocate_zeroed(alloc_size, align) + } else { + heap::allocate(alloc_size, align) + }; if ptr.is_null() { oom() } diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs index a496ab870c63b..1a0a83c7f1eae 100644 --- a/src/liballoc_jemalloc/lib.rs +++ b/src/liballoc_jemalloc/lib.rs @@ -38,6 +38,10 @@ mod imp { target_os = "dragonfly", target_os = "windows"), link_name = "je_mallocx")] fn mallocx(size: size_t, flags: c_int) -> *mut c_void; + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_calloc")] + fn calloc(size: size_t, flags: c_int) -> *mut c_void; #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", target_os = "dragonfly", target_os = "windows"), link_name = "je_rallocx")] @@ -56,6 +60,8 @@ mod imp { fn nallocx(size: size_t, flags: c_int) -> size_t; } + const MALLOCX_ZERO: c_int = 0x40; + // The minimum alignment guaranteed by the architecture. This value is used to // add fast paths for low alignment values. In practice, the alignment is a // constant at the call site and the branch will be optimized out. @@ -91,6 +97,16 @@ mod imp { unsafe { mallocx(size as size_t, flags) as *mut u8 } } + #[no_mangle] + pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 { + if align <= MIN_ALIGN { + unsafe { calloc(size as size_t, 1) as *mut u8 } + } else { + let flags = align_to_flags(align) | MALLOCX_ZERO; + unsafe { mallocx(size as size_t, flags) as *mut u8 } + } + } + #[no_mangle] pub extern "C" fn __rust_reallocate(ptr: *mut u8, _old_size: usize, @@ -135,6 +151,11 @@ mod imp { bogus() } + #[no_mangle] + pub extern "C" fn __rust_allocate_zeroed(_size: usize, _align: usize) -> *mut u8 { + bogus() + } + #[no_mangle] pub extern "C" fn __rust_reallocate(_ptr: *mut u8, _old_size: usize, diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index de2b75f62b68a..fbe1fb8832ee6 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -1,4 +1,4 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -44,6 +44,11 @@ pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 { unsafe { imp::allocate(size, align) } } +#[no_mangle] +pub extern "C" fn __rust_allocate_zeroed(size: usize, align: usize) -> *mut u8 { + unsafe { imp::allocate_zeroed(size, align) } +} + #[no_mangle] pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) { unsafe { imp::deallocate(ptr, old_size, align) } @@ -121,6 +126,18 @@ mod imp { } } + pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + if align <= MIN_ALIGN { + libc::calloc(size as libc::size_t, 1) as *mut u8 + } else { + let ptr = aligned_malloc(size, align); + if !ptr.is_null() { + ptr::write_bytes(ptr, 0, size); + } + ptr + } + } + pub unsafe fn reallocate(ptr: *mut u8, old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8 @@ -173,6 +190,8 @@ mod imp { #[repr(C)] struct Header(*mut u8); + + const HEAP_ZERO_MEMORY: DWORD = 0x00000008; const HEAP_REALLOC_IN_PLACE_ONLY: DWORD = 0x00000010; unsafe fn get_header<'a>(ptr: *mut u8) -> &'a mut Header { @@ -185,11 +204,12 @@ mod imp { aligned } - pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { + #[inline] + fn allocate_with_flags(size: usize, align: usize, flags: DWORD) -> *mut u8 { if align <= MIN_ALIGN { - HeapAlloc(GetProcessHeap(), 0, size as SIZE_T) as *mut u8 + HeapAlloc(GetProcessHeap(), flags, size as SIZE_T) as *mut u8 } else { - let ptr = HeapAlloc(GetProcessHeap(), 0, (size + align) as SIZE_T) as *mut u8; + let ptr = HeapAlloc(GetProcessHeap(), flags, (size + align) as SIZE_T) as *mut u8; if ptr.is_null() { return ptr; } @@ -197,6 +217,14 @@ mod imp { } } + pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { + allocate_with_flags(size, align, 0) + } + + pub unsafe fn allocate_zeroed(size: usize, align: usize) -> *mut u8 { + allocate_with_flags(size, align, HEAP_ZERO_MEMORY) + } + pub unsafe fn reallocate(ptr: *mut u8, _old_size: usize, size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { HeapReAlloc(GetProcessHeap(), 0, ptr as LPVOID, size as SIZE_T) as *mut u8 diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index d38c9f6e1cf80..714b5e463c3ad 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1365,9 +1365,38 @@ impl Vec { #[doc(hidden)] #[stable(feature = "rust1", since = "1.0.0")] pub fn from_elem(elem: T, n: usize) -> Vec { - let mut v = Vec::with_capacity(n); - v.extend_with_element(n, elem); - v + ::from_elem(elem, n) +} + +// Specialization trait used for Vec::from_elem +trait SpecFromElem: Sized { + fn from_elem(elem: Self, n: usize) -> Vec; +} + +impl SpecFromElem for T { + default fn from_elem(elem: Self, n: usize) -> Vec { + let mut v = Vec::with_capacity(n); + v.extend_with_element(n, elem); + v + } +} + +impl SpecFromElem for u8 { + #[inline] + fn from_elem(elem: u8, n: usize) -> Vec { + if elem == 0 { + return Vec { + buf: RawVec::with_capacity_zeroed(n), + len: n, + } + } + unsafe { + let mut v = Vec::with_capacity(n); + ptr::write_bytes(v.as_mut_ptr(), elem, n); + v.set_len(n); + v + } + } } ////////////////////////////////////////////////////////////////////////////////