Skip to content
Merged
Show file tree
Hide file tree
Changes from 25 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
90e00ee
Impl `From<Bytes>` for `Vec<u8>`
NobodyXu Apr 28, 2022
418f280
Fix into-vec impl for odd/even/shared vtable in `bytes.rs`
NobodyXu May 1, 2022
fcd5dc6
Fix invalid exclusive reborrowing in `bytes.rs`
NobodyXu May 1, 2022
87d7275
Impl `test_bytes_into_vec` in `test_bytes_vec_alloc.rs`
NobodyXu May 1, 2022
afc8674
Impl `test_bytes_into_vec` in `test_bytes_odd_alloc.rs`
NobodyXu May 1, 2022
8437da0
Fix leaking in `into_vec` impl
NobodyXu May 1, 2022
977b5ff
Impl `test_bytes_into_vec` & `test_bytes_into_vec_promotable_even`
NobodyXu May 1, 2022
01aaf19
Add tests for cases where `offset != 0` for vtable in `bytes.rs`
NobodyXu May 1, 2022
b43705f
Add test for `Bytes` created from `BytesMut` where `offset != 0`
NobodyXu May 1, 2022
6ab1c25
Refactor: Extract fn `shared_to_vec_impl`
NobodyXu Jul 10, 2022
697b299
Fix `shared_to_vec_impl`: Add `Acquire` fence
NobodyXu Jul 10, 2022
4c74191
Refactor: Inline `dealloc_shared` that is used only once
NobodyXu Jul 10, 2022
b322d2d
Fix `shared_to_vec_impl`: Take `*mut Shared`
NobodyXu Jul 10, 2022
015496b
Add convenient script `run_tests.sh`
NobodyXu Jul 10, 2022
40f481b
Fix code style in `shared_to_vec_impl`
NobodyXu Jul 10, 2022
819968c
Refactor `shared_to_vec`: Avoid unnecessary deref
NobodyXu Jul 10, 2022
b1e7414
Refactor: Extract fn `promoteable_to_vec_impl`
NobodyXu Jul 10, 2022
730eaaf
Fix `promoteable_to_vec_impl`: Copy back buffer
NobodyXu Jul 10, 2022
bf4ac42
Refactor: Extract `promoteable_to_vec`
NobodyXu Jul 10, 2022
b5e67e3
Refactor: Inline `promotable_to_vec_impl`
NobodyXu Jul 10, 2022
21fe23a
Rm run_tests.sh
NobodyXu Jul 10, 2022
ff47582
Use `slice::to_vec` explicitly in `static_to_vec`
NobodyXu Jul 11, 2022
9cdd8c5
Explicitly `drop` the shared
NobodyXu Jul 11, 2022
f092e6d
Use `slice::to_vec` in `shared_to_vec_impl`
NobodyXu Jul 11, 2022
7eb6ca8
Use `slice::to_vec` in `shared_v_to_vec`
NobodyXu Jul 11, 2022
5ed21db
Fix `bytes_mut::shared_v_to_vec`: Avoid mut ref
NobodyXu Jul 13, 2022
0cd3f9e
Fix `shared_to_vec_impl`: Use `compare_exchange`
NobodyXu Jul 13, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 80 additions & 0 deletions src/bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,10 @@ pub(crate) struct Vtable {
/// fn(data, ptr, len)
pub clone: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Bytes,
/// fn(data, ptr, len)
///
/// takes `Bytes` to value
pub to_vec: unsafe fn(&AtomicPtr<()>, *const u8, usize) -> Vec<u8>,
/// fn(data, ptr, len)
pub drop: unsafe fn(&mut AtomicPtr<()>, *const u8, usize),
}

Expand Down Expand Up @@ -845,6 +849,13 @@ impl From<String> for Bytes {
}
}

impl From<Bytes> for Vec<u8> {
fn from(bytes: Bytes) -> Vec<u8> {
let bytes = mem::ManuallyDrop::new(bytes);
unsafe { (bytes.vtable.to_vec)(&bytes.data, bytes.ptr, bytes.len) }
}
}

// ===== impl Vtable =====

impl fmt::Debug for Vtable {
Expand All @@ -860,6 +871,7 @@ impl fmt::Debug for Vtable {

const STATIC_VTABLE: Vtable = Vtable {
clone: static_clone,
to_vec: static_to_vec,
drop: static_drop,
};

Expand All @@ -868,6 +880,11 @@ unsafe fn static_clone(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Bytes {
Bytes::from_static(slice)
}

unsafe fn static_to_vec(_: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
let slice = slice::from_raw_parts(ptr, len);
slice.to_vec()
}

unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {
// nothing to drop for &'static [u8]
}
Expand All @@ -876,11 +893,13 @@ unsafe fn static_drop(_: &mut AtomicPtr<()>, _: *const u8, _: usize) {

static PROMOTABLE_EVEN_VTABLE: Vtable = Vtable {
clone: promotable_even_clone,
to_vec: promotable_even_to_vec,
drop: promotable_even_drop,
};

static PROMOTABLE_ODD_VTABLE: Vtable = Vtable {
clone: promotable_odd_clone,
to_vec: promotable_odd_to_vec,
drop: promotable_odd_drop,
};

Expand All @@ -897,6 +916,38 @@ unsafe fn promotable_even_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize
}
}

unsafe fn promotable_to_vec(
data: &AtomicPtr<()>,
ptr: *const u8,
len: usize,
f: fn(*mut ()) -> *mut u8,
) -> Vec<u8> {
let shared = data.load(Ordering::Acquire);
let kind = shared as usize & KIND_MASK;

if kind == KIND_ARC {
shared_to_vec_impl(shared.cast(), ptr, len)
} else {
// If Bytes holds a Vec, then the offset must be 0.
debug_assert_eq!(kind, KIND_VEC);

let buf = f(shared);

let cap = (ptr as usize - buf as usize) + len;

// Copy back buffer
ptr::copy(ptr, buf, len);

Vec::from_raw_parts(buf, len, cap)
}
}

unsafe fn promotable_even_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
promotable_to_vec(data, ptr, len, |shared| {
ptr_map(shared.cast(), |addr| addr & !KIND_MASK)
})
}

unsafe fn promotable_even_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
Expand Down Expand Up @@ -924,6 +975,10 @@ unsafe fn promotable_odd_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize)
}
}

unsafe fn promotable_odd_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
promotable_to_vec(data, ptr, len, |shared| shared.cast())
}

unsafe fn promotable_odd_drop(data: &mut AtomicPtr<()>, ptr: *const u8, len: usize) {
data.with_mut(|shared| {
let shared = *shared;
Expand Down Expand Up @@ -967,6 +1022,7 @@ const _: [(); 0 - mem::align_of::<Shared>() % 2] = []; // Assert that the alignm

static SHARED_VTABLE: Vtable = Vtable {
clone: shared_clone,
to_vec: shared_to_vec,
drop: shared_drop,
};

Expand All @@ -979,6 +1035,30 @@ unsafe fn shared_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Byte
shallow_clone_arc(shared as _, ptr, len)
}

unsafe fn shared_to_vec_impl(shared: *mut Shared, ptr: *const u8, len: usize) -> Vec<u8> {
// This fence is needed for the same reason in release_shared.
if (*shared).ref_cnt.load(Ordering::Acquire) == 1 {
let buf = (*shared).buf;
let cap = (*shared).cap;

// Deallocate Shared
drop(Box::from_raw(shared as *mut mem::ManuallyDrop<Shared>));

// Copy back buffer
ptr::copy(ptr, buf, len);

Vec::from_raw_parts(buf, len, cap)
} else {
let v = slice::from_raw_parts(ptr, len).to_vec();
release_shared(shared);
v
}
}

unsafe fn shared_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
shared_to_vec_impl(data.load(Ordering::Relaxed).cast(), ptr, len)
}

unsafe fn shared_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(shared.cast());
Expand Down
21 changes: 21 additions & 0 deletions src/bytes_mut.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1610,6 +1610,7 @@ unsafe fn rebuild_vec(ptr: *mut u8, mut len: usize, mut cap: usize, off: usize)

static SHARED_VTABLE: Vtable = Vtable {
clone: shared_v_clone,
to_vec: shared_v_to_vec,
drop: shared_v_drop,
};

Expand All @@ -1621,6 +1622,26 @@ unsafe fn shared_v_clone(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> By
Bytes::with_vtable(ptr, len, data, &SHARED_VTABLE)
}

unsafe fn shared_v_to_vec(data: &AtomicPtr<()>, ptr: *const u8, len: usize) -> Vec<u8> {
let shared: &mut Shared = &mut *data.load(Ordering::Relaxed).cast();

if shared.is_unique() {
// Drop shared
let mut vec = mem::replace(&mut shared.vec, Vec::new());
release_shared(shared);

// Copy back buffer
ptr::copy(ptr, vec.as_mut_ptr(), len);
vec.set_len(len);

vec
} else {
let v = slice::from_raw_parts(ptr, len).to_vec();
release_shared(shared);
v
}
}

unsafe fn shared_v_drop(data: &mut AtomicPtr<()>, _ptr: *const u8, _len: usize) {
data.with_mut(|shared| {
release_shared(*shared as *mut Shared);
Expand Down
70 changes: 70 additions & 0 deletions tests/test_bytes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1065,3 +1065,73 @@ fn bytes_into_vec() {
let vec: Vec<u8> = bytes.into();
assert_eq!(&vec, prefix);
}

#[test]
fn test_bytes_into_vec() {
// Test STATIC_VTABLE.to_vec
let bs = b"1b23exfcz3r";
let vec: Vec<u8> = Bytes::from_static(bs).into();
assert_eq!(&*vec, bs);

// Test bytes_mut.SHARED_VTABLE.to_vec impl
eprintln!("1");
let mut bytes_mut: BytesMut = bs[..].into();

// Set kind to KIND_ARC so that after freeze, Bytes will use bytes_mut.SHARED_VTABLE
eprintln!("2");
drop(bytes_mut.split_off(bs.len()));

eprintln!("3");
let b1 = bytes_mut.freeze();
eprintln!("4");
let b2 = b1.clone();

eprintln!("{:#?}", (&*b1).as_ptr());

// shared.is_unique() = False
eprintln!("5");
assert_eq!(&*Vec::from(b2), bs);

// shared.is_unique() = True
eprintln!("6");
assert_eq!(&*Vec::from(b1), bs);

// Test bytes_mut.SHARED_VTABLE.to_vec impl where offset != 0
let mut bytes_mut1: BytesMut = bs[..].into();
let bytes_mut2 = bytes_mut1.split_off(9);

let b1 = bytes_mut1.freeze();
let b2 = bytes_mut2.freeze();

assert_eq!(Vec::from(b2), bs[9..]);
assert_eq!(Vec::from(b1), bs[..9]);
}

#[test]
fn test_bytes_into_vec_promotable_even() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}
29 changes: 29 additions & 0 deletions tests/test_bytes_odd_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,3 +66,32 @@ fn test_bytes_clone_drop() {
let b1 = Bytes::from(vec);
let _b2 = b1.clone();
}

#[test]
fn test_bytes_into_vec() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}
29 changes: 29 additions & 0 deletions tests/test_bytes_vec_alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -112,3 +112,32 @@ fn invalid_ptr<T>(addr: usize) -> *mut T {
debug_assert_eq!(ptr as usize, addr);
ptr.cast::<T>()
}

#[test]
fn test_bytes_into_vec() {
let vec = vec![33u8; 1024];

// Test cases where kind == KIND_VEC
let b1 = Bytes::from(vec.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 1
let b1 = Bytes::from(vec.clone());
drop(b1.clone());
assert_eq!(Vec::from(b1), vec);

// Test cases where kind == KIND_ARC, ref_cnt == 2
let b1 = Bytes::from(vec.clone());
let b2 = b1.clone();
assert_eq!(Vec::from(b1), vec);

// Test cases where vtable = SHARED_VTABLE, kind == KIND_ARC, ref_cnt == 1
assert_eq!(Vec::from(b2), vec);

// Test cases where offset != 0
let mut b1 = Bytes::from(vec.clone());
let b2 = b1.split_off(20);

assert_eq!(Vec::from(b2), vec[20..]);
assert_eq!(Vec::from(b1), vec[..20]);
}