From 21dd9963223ed5a493d517529788cb9a27e0b18e Mon Sep 17 00:00:00 2001 From: HyeonuPark Date: Sat, 20 Aug 2022 18:51:25 +0900 Subject: [PATCH 1/5] change to_vec to into_vec and takes &mut AtomicPtr<()> instead of &AtomicPtr<()> --- src/bytes.rs | 45 +++++++++++++++++++++++++-------------------- src/bytes_mut.rs | 6 +++--- 2 files changed, 28 insertions(+), 23 deletions(-) diff --git a/src/bytes.rs b/src/bytes.rs index f8d3ce319..3afff8926 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -110,8 +110,8 @@ pub(crate) struct Vtable { pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, /// fn(data, ptr, len) /// - /// takes `Bytes` to value - pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec, + /// Consumes `Bytes` to return `Vec` + pub into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec, /// fn(data, ptr, len) pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), } @@ -851,8 +851,8 @@ impl From for Bytes { impl From for Vec { fn from(bytes: Bytes) -> Vec { - let bytes = mem::ManuallyDrop::new(bytes); - unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) } + let bytes = &mut *mem::ManuallyDrop::new(bytes); + unsafe { (bytes.vtable.into_vec)(&mut bytes.data, bytes.ptr, bytes.len) } } } @@ -862,6 +862,7 @@ impl fmt::Debug for Vtable { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Vtable") .field("clone", &(self.clone as *const ())) + .field("into_vec", &(self.into_vec as *const ())) .field("drop", &(self.drop as *const ())) .finish() } @@ -871,7 +872,7 @@ impl fmt::Debug for Vtable { const STATIC_VTABLE: Vtable = Vtable { clone: static_clone, - to_vec: static_to_vec, + into_vec: static_into_vec, drop: static_drop, }; @@ -880,7 +881,7 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { Bytes::from_static(slice) } -unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { +unsafe fn static_into_vec(_: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { let slice = slice::from_raw_parts(ptr, len); slice.to_vec() } @@ -893,13 +894,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable { clone: promotable_even_clone, - to_vec: promotable_even_to_vec, + into_vec: promotable_even_into_vec, drop: promotable_even_drop, }; static PROMOTABLE_ODD_VTABLE: Vtable = Vtable { clone: promotable_odd_clone, - to_vec: promotable_odd_to_vec, + into_vec: promotable_odd_into_vec, drop: promotable_odd_drop, }; @@ -916,17 +917,17 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize } } -unsafe fn promotable_to_vec( - data: &AtomicPtr<()>, +unsafe fn promotable_into_vec( + data: &mut AtomicPtr<()>, ptr: *const u8, len: usize, f: fn(*mut ()) -> *mut u8, ) -> Vec { - let shared = data.load(Ordering::Acquire); + let shared = data.with_mut(|p| *p); let kind = shared as usize & KIND_MASK; if kind == KIND_ARC { - shared_to_vec_impl(shared.cast(), ptr, len) + shared_into_vec_impl(shared.cast(), ptr, len) } else { // If Bytes holds a Vec, then the offset must be 0. debug_assert_eq!(kind, KIND_VEC); @@ -942,8 +943,12 @@ unsafe fn promotable_to_vec( } } -unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - promotable_to_vec(data, ptr, len, |shared| { +unsafe fn promotable_even_into_vec( + data: &mut AtomicPtr<()>, + ptr: *const u8, + len: usize, +) -> Vec { + promotable_into_vec(data, ptr, len, |shared| { ptr_map(shared.cast(), |addr| addr & !KIND_MASK) }) } @@ -975,8 +980,8 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) } } -unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - promotable_to_vec(data, ptr, len, |shared| shared.cast()) +unsafe fn promotable_odd_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + promotable_into_vec(data, ptr, len, |shared| shared.cast()) } unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { @@ -1022,7 +1027,7 @@ const _: [(); 0 - mem::align_of::() % 2] = []; // Assert that the alignm static SHARED_VTABLE: Vtable = Vtable { clone: shared_clone, - to_vec: shared_to_vec, + into_vec: shared_into_vec, drop: shared_drop, }; @@ -1035,7 +1040,7 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte shallow_clone_arc(shared as _, ptr, len) } -unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec { +unsafe fn shared_into_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec { // Check that the ref_cnt is 1 (unique). // // If it is unique, then it is set to 0 with AcqRel fence for the same @@ -1064,8 +1069,8 @@ unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> } } -unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len) +unsafe fn shared_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + shared_into_vec_impl((data.with_mut(|p| *p)).cast(), ptr, len) } unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs index a292ca7bd..ed4559abd 100644 --- a/src/bytes_mut.rs +++ b/src/bytes_mut.rs @@ -1675,7 +1675,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize) static SHARED_VTABLE: Vtable = Vtable { clone: shared_v_clone, - to_vec: shared_v_to_vec, + into_vec: shared_v_into_vec, drop: shared_v_drop, }; @@ -1687,8 +1687,8 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE) } -unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - let shared: *mut Shared = data.load(Ordering::Relaxed).cast(); +unsafe fn shared_v_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + let shared: *mut Shared = (data.with_mut(|p| *p)).cast(); if (*shared).is_unique() { let shared = &mut *shared; From 40208415abc92e124c825568a04d3504c3c1c8ba Mon Sep 17 00:00:00 2001 From: HyeonuPark Date: Sat, 20 Aug 2022 19:22:10 +0900 Subject: [PATCH 2/5] add will_truncate to Bytes vtable to prevent special casing promotable ones --- src/bytes.rs | 44 +++++++++++++++++++++++++++++++++++--------- src/bytes_mut.rs | 5 +++++ 2 files changed, 40 insertions(+), 9 deletions(-) diff --git a/src/bytes.rs b/src/bytes.rs index 3afff8926..d45ea43ff 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -110,6 +110,11 @@ pub(crate) struct Vtable { pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, /// fn(data, ptr, len) /// + /// Called before the `Bytes::truncate` is processed. + /// Useful if the implementation needs some preparation step for it. + pub will_truncate: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), + /// fn(data, ptr, len) + /// /// Consumes `Bytes` to return `Vec` pub into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec, /// fn(data, ptr, len) @@ -455,16 +460,10 @@ impl Bytes { #[inline] pub fn truncate(&mut self, len: usize) { if len < self.len { - // The Vec "promotable" vtables do not store the capacity, - // so we cannot truncate while using this repr. We *have* to - // promote using `split_off` so the capacity can be stored. - if self.vtable as *const Vtable == &PROMOTABLE_EVEN_VTABLE - || self.vtable as *const Vtable == &PROMOTABLE_ODD_VTABLE - { - drop(self.split_off(len)); - } else { - self.len = len; + unsafe { + (self.vtable.will_truncate)(&mut self.data, self.ptr, self.len); } + self.len = len; } } @@ -862,6 +861,7 @@ impl fmt::Debug for Vtable { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Vtable") .field("clone", &(self.clone as *const ())) + .field("will_truncate", &(self.will_truncate as *const ())) .field("into_vec", &(self.into_vec as *const ())) .field("drop", &(self.drop as *const ())) .finish() @@ -872,6 +872,7 @@ impl fmt::Debug for Vtable { const STATIC_VTABLE: Vtable = Vtable { clone: static_clone, + will_truncate: static_will_truncate, into_vec: static_into_vec, drop: static_drop, }; @@ -881,6 +882,10 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { Bytes::from_static(slice) } +unsafe fn static_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { + // nothing to do before truncate for &'static [u8] +} + unsafe fn static_into_vec(_: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { let slice = slice::from_raw_parts(ptr, len); slice.to_vec() @@ -894,12 +899,14 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable { clone: promotable_even_clone, + will_truncate: promotable_even_will_truncate, into_vec: promotable_even_into_vec, drop: promotable_even_drop, }; static PROMOTABLE_ODD_VTABLE: Vtable = Vtable { clone: promotable_odd_clone, + will_truncate: promotable_odd_will_truncate, into_vec: promotable_odd_into_vec, drop: promotable_odd_drop, }; @@ -917,6 +924,13 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize } } +unsafe fn promotable_even_will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + // The Vec "promotable" vtables do not store the capacity, + // so we cannot truncate while using this repr. We *have* to + // promote using `clone` so the capacity can be stored. + drop(promotable_even_clone(&*data, ptr, len)); +} + unsafe fn promotable_into_vec( data: &mut AtomicPtr<()>, ptr: *const u8, @@ -980,6 +994,13 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) } } +unsafe fn promotable_odd_will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + // The Vec "promotable" vtables do not store the capacity, + // so we cannot truncate while using this repr. We *have* to + // promote using `clone` so the capacity can be stored. + drop(promotable_odd_clone(&*data, ptr, len)); +} + unsafe fn promotable_odd_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { promotable_into_vec(data, ptr, len, |shared| shared.cast()) } @@ -1027,6 +1048,7 @@ const _: [(); 0 - mem::align_of::() % 2] = []; // Assert that the alignm static SHARED_VTABLE: Vtable = Vtable { clone: shared_clone, + will_truncate: shared_will_truncate, into_vec: shared_into_vec, drop: shared_drop, }; @@ -1040,6 +1062,10 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte shallow_clone_arc(shared as _, ptr, len) } +unsafe fn shared_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { + // nothing to do before truncate for Shared +} + unsafe fn shared_into_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec { // Check that the ref_cnt is 1 (unique). // diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs index ed4559abd..8db653844 100644 --- a/src/bytes_mut.rs +++ b/src/bytes_mut.rs @@ -1675,6 +1675,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize) static SHARED_VTABLE: Vtable = Vtable { clone: shared_v_clone, + will_truncate: shared_v_will_truncate, into_vec: shared_v_into_vec, drop: shared_v_drop, }; @@ -1687,6 +1688,10 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE) } +unsafe fn shared_v_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { + // nothing to do before truncate for Shared +} + unsafe fn shared_v_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { let shared: *mut Shared = (data.with_mut(|p| *p)).cast(); From 461c39bc3869ca286d22af69fb6ed7d071d79c07 Mon Sep 17 00:00:00 2001 From: HyeonuPark Date: Sat, 20 Aug 2022 20:27:01 +0900 Subject: [PATCH 3/5] add `Bytes::with_impl` and related trait --- src/bytes.rs | 48 +++++++++++++++++++++++++++++++++++++++++++++++- src/lib.rs | 2 +- 2 files changed, 48 insertions(+), 2 deletions(-) diff --git a/src/bytes.rs b/src/bytes.rs index d45ea43ff..9f7933369 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -104,6 +104,32 @@ pub struct Bytes { data: AtomicPtr<()>, vtable: &'static Vtable, } +/// A trait for underlying implementations for `Bytes` type. +/// +/// All implementations must fulfill the following requirements: +/// - They are cheaply cloneable and thereby shareable between an unlimited amount +/// of components, for example by modifying a reference count. +/// - Instances can be sliced to refer to a subset of the the original buffer. +pub unsafe trait BytesImpl: 'static { + /// Decompose `Self` into parts used by `Bytes`. + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize); + + /// Returns new `Bytes` based on the current parts. + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes; + + /// Called before the `Bytes::truncate` is processed. + /// Useful if the implementation needs some preparation step for it. + unsafe fn will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + // do nothing by default + let _ = (data, ptr, len); + } + + /// Consumes underlying resources and return `Vec` + unsafe fn into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec; + + /// Release underlying resources. + unsafe fn drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize); +} pub(crate) struct Vtable { /// fn(data, ptr, len) @@ -115,7 +141,7 @@ pub(crate) struct Vtable { pub will_truncate: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), /// fn(data, ptr, len) /// - /// Consumes `Bytes` to return `Vec` + /// Consumes `Bytes` and return `Vec` pub into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec, /// fn(data, ptr, len) pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), @@ -183,6 +209,26 @@ impl Bytes { } } + /// Creates a new `Bytes` from `BytesImpl` implementation. + /// + /// Useful if you want to construct `Bytes` from your own buffer implementation. + #[inline] + pub fn with_impl(bytes_impl: T) -> Bytes { + let (data, ptr, len) = BytesImpl::into_bytes_parts(bytes_impl); + + Bytes { + ptr, + len, + data, + vtable: &Vtable { + clone: T::clone, + will_truncate: T::will_truncate, + into_vec: T::into_vec, + drop: T::drop, + }, + } + } + /// Returns the number of bytes contained in this `Bytes`. /// /// # Examples diff --git a/src/lib.rs b/src/lib.rs index 706735e3d..a8cc07af2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -87,7 +87,7 @@ mod bytes; mod bytes_mut; mod fmt; mod loom; -pub use crate::bytes::Bytes; +pub use crate::bytes::{Bytes, BytesImpl}; pub use crate::bytes_mut::BytesMut; // Optional Serde support From 4231f693abcdb893716b0134aeca2292218843dd Mon Sep 17 00:00:00 2001 From: HyeonuPark Date: Sat, 20 Aug 2022 21:31:03 +0900 Subject: [PATCH 4/5] migrate existing vtable code to BytesImpl trait --- src/bytes.rs | 332 ++++++++++++++++++++++++----------------------- src/bytes_mut.rs | 75 ++++++----- 2 files changed, 206 insertions(+), 201 deletions(-) diff --git a/src/bytes.rs b/src/bytes.rs index 9f7933369..21b1dd51d 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -131,20 +131,20 @@ pub unsafe trait BytesImpl: 'static { unsafe fn drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize); } -pub(crate) struct Vtable { +struct Vtable { /// fn(data, ptr, len) - pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, + clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, /// fn(data, ptr, len) /// /// Called before the `Bytes::truncate` is processed. /// Useful if the implementation needs some preparation step for it. - pub will_truncate: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), + will_truncate: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), /// fn(data, ptr, len) /// /// Consumes `Bytes` and return `Vec` - pub into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec, + into_vec: unsafe fn(&mut AtomicPtr<()>, *const u8, usize) -> Vec, /// fn(data, ptr, len) - pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), + drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize), } impl Bytes { @@ -191,6 +191,13 @@ impl Bytes { #[inline] #[cfg(not(all(loom, test)))] pub const fn from_static(bytes: &'static [u8]) -> Bytes { + const STATIC_VTABLE: Vtable = Vtable { + clone: ::clone, + will_truncate: ::will_truncate, + into_vec: ::into_vec, + drop: ::drop, + }; + Bytes { ptr: bytes.as_ptr(), len: bytes.len(), @@ -201,6 +208,13 @@ impl Bytes { #[cfg(all(loom, test))] pub fn from_static(bytes: &'static [u8]) -> Bytes { + const STATIC_VTABLE: Vtable = Vtable { + clone: ::clone, + will_truncate: ::will_truncate, + into_vec: ::into_vec, + drop: ::drop, + }; + Bytes { ptr: bytes.as_ptr(), len: bytes.len(), @@ -529,21 +543,6 @@ impl Bytes { self.truncate(0); } - #[inline] - pub(crate) unsafe fn with_vtable( - ptr: *const u8, - len: usize, - data: AtomicPtr<()>, - vtable: &'static Vtable, - ) -> Bytes { - Bytes { - ptr, - len, - data, - vtable, - } - } - // private #[inline] @@ -866,24 +865,10 @@ impl From> for Bytes { return Bytes::new(); } - let len = slice.len(); - let ptr = Box::into_raw(slice) as *mut u8; - - if ptr as usize & 0x1 == 0 { - let data = ptr_map(ptr, |addr| addr | KIND_VEC); - Bytes { - ptr, - len, - data: AtomicPtr::new(data.cast()), - vtable: &PROMOTABLE_EVEN_VTABLE, - } + if slice.as_ptr() as usize & 0x1 == 0 { + Bytes::with_impl(PromotableEvenImpl(Promotable::Owned(slice))) } else { - Bytes { - ptr, - len, - data: AtomicPtr::new(ptr.cast()), - vtable: &PROMOTABLE_ODD_VTABLE, - } + Bytes::with_impl(PromotableOddImpl(Promotable::Owned(slice))) } } } @@ -916,65 +901,97 @@ impl fmt::Debug for Vtable { // ===== impl StaticVtable ===== -const STATIC_VTABLE: Vtable = Vtable { - clone: static_clone, - will_truncate: static_will_truncate, - into_vec: static_into_vec, - drop: static_drop, -}; +struct StaticImpl(&'static [u8]); -unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { - let slice = slice::from_raw_parts(ptr, len); - Bytes::from_static(slice) -} +unsafe impl BytesImpl for StaticImpl { + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { + let mut bytes = mem::ManuallyDrop::new(Bytes::from_static(this.0)); + (mem::take(&mut bytes.data), bytes.ptr, bytes.len) + } -unsafe fn static_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { - // nothing to do before truncate for &'static [u8] -} + unsafe fn clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { + let slice = slice::from_raw_parts(ptr, len); + Bytes::from_static(slice) + } -unsafe fn static_into_vec(_: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - let slice = slice::from_raw_parts(ptr, len); - slice.to_vec() -} + unsafe fn into_vec(_: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + let slice = slice::from_raw_parts(ptr, len); + slice.to_vec() + } -unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { - // nothing to drop for &'static [u8] + unsafe fn drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { + // nothing to drop for &'static [u8] + } } // ===== impl PromotableVtable ===== -static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable { - clone: promotable_even_clone, - will_truncate: promotable_even_will_truncate, - into_vec: promotable_even_into_vec, - drop: promotable_even_drop, -}; +struct PromotableEvenImpl(Promotable); -static PROMOTABLE_ODD_VTABLE: Vtable = Vtable { - clone: promotable_odd_clone, - will_truncate: promotable_odd_will_truncate, - into_vec: promotable_odd_into_vec, - drop: promotable_odd_drop, -}; +struct PromotableOddImpl(Promotable); -unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { - let shared = data.load(Ordering::Acquire); - let kind = shared as usize & KIND_MASK; +enum Promotable { + Owned(Box<[u8]>), + #[allow(dead_code)] + Shared(SharedImpl), +} - if kind == KIND_ARC { - shallow_clone_arc(shared.cast(), ptr, len) - } else { - debug_assert_eq!(kind, KIND_VEC); - let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK); - shallow_clone_vec(data, shared, buf, ptr, len) +unsafe impl BytesImpl for PromotableEvenImpl { + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { + let slice = match this.0 { + Promotable::Owned(slice) => slice, + Promotable::Shared(shared) => return SharedImpl::into_bytes_parts(shared), + }; + + let len = slice.len(); + let ptr = Box::into_raw(slice) as *mut u8; + assert!(ptr as usize & 0x1 == 0); + + let data = ptr_map(ptr, |addr| addr | KIND_VEC); + + (AtomicPtr::new(data.cast()), ptr, len) + } + + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { + let shared = data.load(Ordering::Acquire); + let kind = shared as usize & KIND_MASK; + + if kind == KIND_ARC { + shallow_clone_arc(shared.cast(), ptr, len) + } else { + debug_assert_eq!(kind, KIND_VEC); + let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK); + shallow_clone_vec(data, shared, buf, ptr, len) + } + } + + unsafe fn will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + // The Vec "promotable" vtables do not store the capacity, + // so we cannot truncate while using this repr. We *have* to + // promote using `clone` so the capacity can be stored. + drop(PromotableEvenImpl::clone(&*data, ptr, len)); } -} -unsafe fn promotable_even_will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { - // The Vec "promotable" vtables do not store the capacity, - // so we cannot truncate while using this repr. We *have* to - // promote using `clone` so the capacity can be stored. - drop(promotable_even_clone(&*data, ptr, len)); + unsafe fn into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + promotable_into_vec(data, ptr, len, |shared| { + ptr_map(shared.cast(), |addr| addr & !KIND_MASK) + }) + } + + unsafe fn drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + data.with_mut(|shared| { + let shared = *shared; + let kind = shared as usize & KIND_MASK; + + if kind == KIND_ARC { + release_shared(shared.cast()); + } else { + debug_assert_eq!(kind, KIND_VEC); + let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK); + free_boxed_slice(buf, ptr, len); + } + }); + } } unsafe fn promotable_into_vec( @@ -1003,67 +1020,57 @@ unsafe fn promotable_into_vec( } } -unsafe fn promotable_even_into_vec( - data: &mut AtomicPtr<()>, - ptr: *const u8, - len: usize, -) -> Vec { - promotable_into_vec(data, ptr, len, |shared| { - ptr_map(shared.cast(), |addr| addr & !KIND_MASK) - }) -} +unsafe impl BytesImpl for PromotableOddImpl { + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { + let slice = match this.0 { + Promotable::Owned(slice) => slice, + Promotable::Shared(shared) => return SharedImpl::into_bytes_parts(shared), + }; -unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { - data.with_mut(|shared| { - let shared = *shared; + let len = slice.len(); + let ptr = Box::into_raw(slice) as *mut u8; + assert!(ptr as usize & 0x1 == 1); + + (AtomicPtr::new(ptr.cast()), ptr, len) + } + + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { + let shared = data.load(Ordering::Acquire); let kind = shared as usize & KIND_MASK; if kind == KIND_ARC { - release_shared(shared.cast()); + shallow_clone_arc(shared as _, ptr, len) } else { debug_assert_eq!(kind, KIND_VEC); - let buf = ptr_map(shared.cast(), |addr| addr & !KIND_MASK); - free_boxed_slice(buf, ptr, len); + shallow_clone_vec(data, shared, shared.cast(), ptr, len) } - }); -} - -unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { - let shared = data.load(Ordering::Acquire); - let kind = shared as usize & KIND_MASK; - - if kind == KIND_ARC { - shallow_clone_arc(shared as _, ptr, len) - } else { - debug_assert_eq!(kind, KIND_VEC); - shallow_clone_vec(data, shared, shared.cast(), ptr, len) } -} -unsafe fn promotable_odd_will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { - // The Vec "promotable" vtables do not store the capacity, - // so we cannot truncate while using this repr. We *have* to - // promote using `clone` so the capacity can be stored. - drop(promotable_odd_clone(&*data, ptr, len)); -} + unsafe fn will_truncate(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + // The Vec "promotable" vtables do not store the capacity, + // so we cannot truncate while using this repr. We *have* to + // promote using `clone` so the capacity can be stored. + drop(PromotableOddImpl::clone(&*data, ptr, len)); + } -unsafe fn promotable_odd_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - promotable_into_vec(data, ptr, len, |shared| shared.cast()) -} + unsafe fn into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + promotable_into_vec(data, ptr, len, |shared| shared.cast()) + } -unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { - data.with_mut(|shared| { - let shared = *shared; - let kind = shared as usize & KIND_MASK; + unsafe fn drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) { + data.with_mut(|shared| { + let shared = *shared; + let kind = shared as usize & KIND_MASK; - if kind == KIND_ARC { - release_shared(shared.cast()); - } else { - debug_assert_eq!(kind, KIND_VEC); + if kind == KIND_ARC { + release_shared(shared.cast()); + } else { + debug_assert_eq!(kind, KIND_VEC); - free_boxed_slice(shared.cast(), ptr, len); - } - }); + free_boxed_slice(shared.cast(), ptr, len); + } + }); + } } unsafe fn free_boxed_slice(buf: *mut u8, offset: *const u8, len: usize) { @@ -1092,24 +1099,35 @@ impl Drop for Shared { // This flag is set when the LSB is 0. const _: [(); 0 - mem::align_of::() % 2] = []; // Assert that the alignment of `Shared` is divisible by 2. -static SHARED_VTABLE: Vtable = Vtable { - clone: shared_clone, - will_truncate: shared_will_truncate, - into_vec: shared_into_vec, - drop: shared_drop, -}; +struct SharedImpl { + shared: *mut Shared, + offset: *const u8, + len: usize, +} const KIND_ARC: usize = 0b0; const KIND_VEC: usize = 0b1; const KIND_MASK: usize = 0b1; -unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { - let shared = data.load(Ordering::Relaxed); - shallow_clone_arc(shared as _, ptr, len) -} +unsafe impl BytesImpl for SharedImpl { + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { + (AtomicPtr::new(this.shared.cast()), this.offset, this.len) + } + + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { + let shared = data.load(Ordering::Relaxed); + shallow_clone_arc(shared as _, ptr, len) + } + + unsafe fn into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + shared_into_vec_impl((data.with_mut(|p| *p)).cast(), ptr, len) + } -unsafe fn shared_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { - // nothing to do before truncate for Shared + unsafe fn drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { + data.with_mut(|shared| { + release_shared(shared.cast()); + }); + } } unsafe fn shared_into_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec { @@ -1141,16 +1159,6 @@ unsafe fn shared_into_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) } } -unsafe fn shared_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - shared_into_vec_impl((data.with_mut(|p| *p)).cast(), ptr, len) -} - -unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { - data.with_mut(|shared| { - release_shared(shared.cast()); - }); -} - unsafe fn shallow_clone_arc(shared: *mut Shared, ptr: *const u8, len: usize) -> Bytes { let old_size = (*shared).ref_cnt.fetch_add(1, Ordering::Relaxed); @@ -1158,12 +1166,11 @@ unsafe fn shallow_clone_arc(shared: *mut Shared, ptr: *const u8, len: usize) -> crate::abort(); } - Bytes { - ptr, + Bytes::with_impl(SharedImpl { + shared, + offset: ptr, len, - data: AtomicPtr::new(shared as _), - vtable: &SHARED_VTABLE, - } + }) } #[cold] @@ -1217,12 +1224,11 @@ unsafe fn shallow_clone_vec( debug_assert!(actual as usize == ptr as usize); // The upgrade was successful, the new handle can be // returned. - Bytes { - ptr: offset, + Bytes::with_impl(SharedImpl { + shared, + offset, len, - data: AtomicPtr::new(shared as _), - vtable: &SHARED_VTABLE, - } + }) } Err(actual) => { // The upgrade failed, a concurrent clone happened. Release diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs index 8db653844..2920b4339 100644 --- a/src/bytes_mut.rs +++ b/src/bytes_mut.rs @@ -13,7 +13,6 @@ use alloc::{ }; use crate::buf::{IntoIter, UninitSlice}; -use crate::bytes::Vtable; #[allow(unused)] use crate::loom::sync::atomic::AtomicMut; use crate::loom::sync::atomic::{AtomicPtr, AtomicUsize, Ordering}; @@ -253,9 +252,9 @@ impl BytesMut { let ptr = self.ptr.as_ptr(); let len = self.len; - let data = AtomicPtr::new(self.data.cast()); + let shared = self.data; mem::forget(self); - unsafe { Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE) } + Bytes::with_impl(SharedImpl { shared, ptr, len }) } } @@ -1673,51 +1672,51 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize) // ===== impl SharedVtable ===== -static SHARED_VTABLE: Vtable = Vtable { - clone: shared_v_clone, - will_truncate: shared_v_will_truncate, - into_vec: shared_v_into_vec, - drop: shared_v_drop, -}; +struct SharedImpl { + shared: *mut Shared, + ptr: *const u8, + len: usize, +} -unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { - let shared = data.load(Ordering::Relaxed) as *mut Shared; - increment_shared(shared); +unsafe impl crate::BytesImpl for SharedImpl { + fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { + (AtomicPtr::new(this.shared.cast()), this.ptr, this.len) + } - let data = AtomicPtr::new(shared as *mut ()); - Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE) -} + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { + let shared = data.load(Ordering::Relaxed) as *mut Shared; + increment_shared(shared); -unsafe fn shared_v_will_truncate(_: &mut AtomicPtr<()>, _: *const u8, _: usize) { - // nothing to do before truncate for Shared -} + Bytes::with_impl(SharedImpl { shared, ptr, len }) + } -unsafe fn shared_v_into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { - let shared: *mut Shared = (data.with_mut(|p| *p)).cast(); + unsafe fn into_vec(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec { + let shared: *mut Shared = (data.with_mut(|p| *p)).cast(); - if (*shared).is_unique() { - let shared = &mut *shared; + if (*shared).is_unique() { + let shared = &mut *shared; - // Drop shared - let mut vec = mem::replace(&mut shared.vec, Vec::new()); - release_shared(shared); + // Drop shared + let mut vec = mem::replace(&mut shared.vec, Vec::new()); + release_shared(shared); - // Copy back buffer - ptr::copy(ptr, vec.as_mut_ptr(), len); - vec.set_len(len); + // Copy back buffer + ptr::copy(ptr, vec.as_mut_ptr(), len); + vec.set_len(len); - vec - } else { - let v = slice::from_raw_parts(ptr, len).to_vec(); - release_shared(shared); - v + vec + } else { + let v = slice::from_raw_parts(ptr, len).to_vec(); + release_shared(shared); + v + } } -} -unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { - data.with_mut(|shared| { - release_shared(*shared as *mut Shared); - }); + unsafe fn drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) { + data.with_mut(|shared| { + release_shared(*shared as *mut Shared); + }); + } } // compile-fails From 169cd444fe00acbc3105079613ba03327b78b275 Mon Sep 17 00:00:00 2001 From: HyeonuPark Date: Sun, 21 Aug 2022 00:05:58 +0900 Subject: [PATCH 5/5] add `Bytes::downcast_impl` to extract underlying implementation --- src/bytes.rs | 77 ++++++++++++++++++++++++++++++++++++++++++++++-- src/bytes_mut.rs | 8 +++++ 2 files changed, 83 insertions(+), 2 deletions(-) diff --git a/src/bytes.rs b/src/bytes.rs index 21b1dd51d..4883d10aa 100644 --- a/src/bytes.rs +++ b/src/bytes.rs @@ -1,3 +1,4 @@ +use core::any::TypeId; use core::iter::FromIterator; use core::ops::{Deref, RangeBounds}; use core::{cmp, fmt, hash, mem, ptr, slice, usize}; @@ -114,6 +115,9 @@ pub unsafe trait BytesImpl: 'static { /// Decompose `Self` into parts used by `Bytes`. fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize); + /// Creates itself directly from the raw bytes parts decomposed with `into_bytes_parts`. + unsafe fn from_bytes_parts(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self; + /// Returns new `Bytes` based on the current parts. unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes; @@ -132,6 +136,7 @@ pub unsafe trait BytesImpl: 'static { } struct Vtable { + type_id: fn() -> TypeId, /// fn(data, ptr, len) clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes, /// fn(data, ptr, len) @@ -192,6 +197,7 @@ impl Bytes { #[cfg(not(all(loom, test)))] pub const fn from_static(bytes: &'static [u8]) -> Bytes { const STATIC_VTABLE: Vtable = Vtable { + type_id: TypeId::of::, clone: ::clone, will_truncate: ::will_truncate, into_vec: ::into_vec, @@ -209,6 +215,7 @@ impl Bytes { #[cfg(all(loom, test))] pub fn from_static(bytes: &'static [u8]) -> Bytes { const STATIC_VTABLE: Vtable = Vtable { + type_id: TypeId::of::, clone: ::clone, will_truncate: ::will_truncate, into_vec: ::into_vec, @@ -235,6 +242,7 @@ impl Bytes { len, data, vtable: &Vtable { + type_id: TypeId::of::, clone: T::clone, will_truncate: T::will_truncate, into_vec: T::into_vec, @@ -543,6 +551,19 @@ impl Bytes { self.truncate(0); } + /// Downcast this `Bytes` into its underlying implementation. + #[inline] + pub fn downcast_impl(self) -> Result { + if TypeId::of::() == (self.vtable.type_id)() { + Ok(unsafe { + let this = &mut *mem::ManuallyDrop::new(self); + T::from_bytes_parts(&mut this.data, this.ptr, this.len) + }) + } else { + Err(self) + } + } + // private #[inline] @@ -891,6 +912,7 @@ impl From for Vec { impl fmt::Debug for Vtable { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Vtable") + .field("type_id", &self.type_id) .field("clone", &(self.clone as *const ())) .field("will_truncate", &(self.will_truncate as *const ())) .field("into_vec", &(self.into_vec as *const ())) @@ -906,7 +928,15 @@ struct StaticImpl(&'static [u8]); unsafe impl BytesImpl for StaticImpl { fn into_bytes_parts(this: Self) -> (AtomicPtr<()>, *const u8, usize) { let mut bytes = mem::ManuallyDrop::new(Bytes::from_static(this.0)); - (mem::take(&mut bytes.data), bytes.ptr, bytes.len) + ( + mem::replace(&mut bytes.data, AtomicPtr::default()), + bytes.ptr, + bytes.len, + ) + } + + unsafe fn from_bytes_parts(_data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self { + StaticImpl(slice::from_raw_parts(ptr, len)) } unsafe fn clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { @@ -932,7 +962,6 @@ struct PromotableOddImpl(Promotable); enum Promotable { Owned(Box<[u8]>), - #[allow(dead_code)] Shared(SharedImpl), } @@ -952,6 +981,12 @@ unsafe impl BytesImpl for PromotableEvenImpl { (AtomicPtr::new(data.cast()), ptr, len) } + unsafe fn from_bytes_parts(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self { + PromotableEvenImpl(promotable_from_bytes_parts(data, ptr, len, |shared| { + ptr_map(shared.cast(), |addr| addr & !KIND_MASK) + })) + } + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { let shared = data.load(Ordering::Acquire); let kind = shared as usize & KIND_MASK; @@ -994,6 +1029,30 @@ unsafe impl BytesImpl for PromotableEvenImpl { } } +unsafe fn promotable_from_bytes_parts( + data: &mut AtomicPtr<()>, + ptr: *const u8, + len: usize, + f: fn(*mut ()) -> *mut u8, +) -> Promotable { + let shared = data.with_mut(|p| *p); + let kind = shared as usize & KIND_MASK; + + if kind == KIND_ARC { + Promotable::Shared(SharedImpl::from_bytes_parts(data, ptr, len)) + } else { + debug_assert_eq!(kind, KIND_VEC); + + let buf = f(shared); + + let cap = (ptr as usize - buf as usize) + len; + + let vec = Vec::from_raw_parts(buf, cap, cap); + + Promotable::Owned(vec.into_boxed_slice()) + } +} + unsafe fn promotable_into_vec( data: &mut AtomicPtr<()>, ptr: *const u8, @@ -1034,6 +1093,12 @@ unsafe impl BytesImpl for PromotableOddImpl { (AtomicPtr::new(ptr.cast()), ptr, len) } + unsafe fn from_bytes_parts(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self { + PromotableOddImpl(promotable_from_bytes_parts(data, ptr, len, |shared| { + shared.cast() + })) + } + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { let shared = data.load(Ordering::Acquire); let kind = shared as usize & KIND_MASK; @@ -1114,6 +1179,14 @@ unsafe impl BytesImpl for SharedImpl { (AtomicPtr::new(this.shared.cast()), this.offset, this.len) } + unsafe fn from_bytes_parts(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self { + SharedImpl { + shared: (data.with_mut(|p| *p)).cast(), + offset: ptr, + len, + } + } + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { let shared = data.load(Ordering::Relaxed); shallow_clone_arc(shared as _, ptr, len) diff --git a/src/bytes_mut.rs b/src/bytes_mut.rs index 2920b4339..519f0a5fb 100644 --- a/src/bytes_mut.rs +++ b/src/bytes_mut.rs @@ -1683,6 +1683,14 @@ unsafe impl crate::BytesImpl for SharedImpl { (AtomicPtr::new(this.shared.cast()), this.ptr, this.len) } + unsafe fn from_bytes_parts(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) -> Self { + SharedImpl { + shared: (data.with_mut(|p| *p)).cast(), + ptr, + len, + } + } + unsafe fn clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes { let shared = data.load(Ordering::Relaxed) as *mut Shared; increment_shared(shared);