From 2158b9d5a175197d7e19151f821daa2c14c2255b Mon Sep 17 00:00:00 2001 From: Joshua Liebow-Feeser Date: Wed, 13 Dec 2023 16:37:37 -0800 Subject: [PATCH] Fix soundness hole in Ref::into_ref and into_mut (#721) (#724) This commit implements the fix for #716 which will be released as a new version in version trains 0.2, 0.3, 0.4, 0.5, 0.6, and 0.7. See #716 for a description of the soundness hole and an explanation of why this fix is chosen. Unfortunately, due to dtolnay/trybuild#241, there is no way for us to write a UI test that will detect a failure post-monomorphization, which is when the code implemented in this change is designed to fail. I have manually verified that unsound uses of these APIs now fail to compile. Release 0.5.2. --- Cargo.toml.crates-io => Cargo.toml | 2 +- src/byteorder.rs | 78 +++++- src/lib.rs | 257 +++++++++++++----- ...ost_monomorphization_compile_fail_tests.rs | 118 ++++++++ .../{Cargo.toml.crates-io => Cargo.toml} | 0 5 files changed, 373 insertions(+), 82 deletions(-) rename Cargo.toml.crates-io => Cargo.toml (97%) create mode 100644 src/post_monomorphization_compile_fail_tests.rs rename zerocopy-derive/{Cargo.toml.crates-io => Cargo.toml} (100%) diff --git a/Cargo.toml.crates-io b/Cargo.toml similarity index 97% rename from Cargo.toml.crates-io rename to Cargo.toml index 097e3b1e60..f1a6e9c921 100644 --- a/Cargo.toml.crates-io +++ b/Cargo.toml @@ -7,7 +7,7 @@ [package] edition = "2018" name = "zerocopy" -version = "0.5.1" +version = "0.5.2" authors = ["Joshua Liebow-Feeser "] description = "Utilities for zero-copy parsing and serialization" license = "BSD-3-Clause" diff --git a/src/byteorder.rs b/src/byteorder.rs index 8aee6a14d0..9f27f068a9 100644 --- a/src/byteorder.rs +++ b/src/byteorder.rs @@ -311,9 +311,42 @@ define_type!( [u32, u64, u128, usize], [U32, U64, U128] ); -define_type!(A, U32, u32, 32, 4, read_u32, write_u32, unsigned, [u64, u128], [U64, U128]); -define_type!(A, U64, u64, 64, 8, read_u64, write_u64, unsigned, [u128], [U128]); -define_type!(A, U128, u128, 128, 16, read_u128, write_u128, unsigned, [], []); +define_type!( + A, + U32, + u32, + 32, + 4, + read_u32, + write_u32, + unsigned, + [u64, u128], + [U64, U128] +); +define_type!( + A, + U64, + u64, + 64, + 8, + read_u64, + write_u64, + unsigned, + [u128], + [U128] +); +define_type!( + A, + U128, + u128, + 128, + 16, + read_u128, + write_u128, + unsigned, + [], + [] +); define_type!( An, I16, @@ -326,9 +359,42 @@ define_type!( [i32, i64, i128, isize], [I32, I64, I128] ); -define_type!(An, I32, i32, 32, 4, read_i32, write_i32, signed, [i64, i128], [I64, I128]); -define_type!(An, I64, i64, 64, 8, read_i64, write_i64, signed, [i128], [I128]); -define_type!(An, I128, i128, 128, 16, read_i128, write_i128, signed, [], []); +define_type!( + An, + I32, + i32, + 32, + 4, + read_i32, + write_i32, + signed, + [i64, i128], + [I64, I128] +); +define_type!( + An, + I64, + i64, + 64, + 8, + read_i64, + write_i64, + signed, + [i128], + [I128] +); +define_type!( + An, + I128, + i128, + 128, + 16, + read_i128, + write_i128, + signed, + [], + [] +); #[cfg(test)] mod tests { diff --git a/src/lib.rs b/src/lib.rs index c3521789b0..5666c024a8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -46,6 +46,7 @@ #![recursion_limit = "2048"] pub mod byteorder; +mod post_monomorphization_compile_fail_tests; pub use crate::byteorder::*; pub use zerocopy_derive::*; @@ -1216,12 +1217,12 @@ where /// `into_ref` consumes the `LayoutVerified`, and returns a reference to /// `T`. pub fn into_ref(self) -> &'a T { - // NOTE: This is safe because `B` is guaranteed to live for the lifetime - // `'a`, meaning that a) the returned reference cannot outlive the `B` - // from which `self` was constructed and, b) no mutable methods on that - // `B` can be called during the lifetime of the returned reference. See - // the documentation on `deref_helper` for what invariants we are - // required to uphold. + assert!(B::INTO_REF_INTO_MUT_ARE_SOUND); + + // SAFETY: According to the safety preconditions on + // `ByteSlice::INTO_REF_INTO_MUT_ARE_SOUND`, the preceding assert + // ensures that, given `B: 'a`, it is sound to drop `self` and still + // access the underlying memory using reads for `'a`. unsafe { self.deref_helper() } } } @@ -1236,12 +1237,13 @@ where /// `into_mut` consumes the `LayoutVerified`, and returns a mutable /// reference to `T`. pub fn into_mut(mut self) -> &'a mut T { - // NOTE: This is safe because `B` is guaranteed to live for the lifetime - // `'a`, meaning that a) the returned reference cannot outlive the `B` - // from which `self` was constructed and, b) no other methods - mutable - // or immutable - on that `B` can be called during the lifetime of the - // returned reference. See the documentation on `deref_mut_helper` for - // what invariants we are required to uphold. + assert!(B::INTO_REF_INTO_MUT_ARE_SOUND); + + // SAFETY: According to the safety preconditions on + // `ByteSlice::INTO_REF_INTO_MUT_ARE_SOUND`, the preceding assert + // ensures that, given `B: 'a + ByteSliceMut`, it is sound to drop + // `self` and still access the underlying memory using both reads and + // writes for `'a`. unsafe { self.deref_mut_helper() } } } @@ -1256,12 +1258,12 @@ where /// `into_slice` consumes the `LayoutVerified`, and returns a reference to /// `[T]`. pub fn into_slice(self) -> &'a [T] { - // NOTE: This is safe because `B` is guaranteed to live for the lifetime - // `'a`, meaning that a) the returned reference cannot outlive the `B` - // from which `self` was constructed and, b) no mutable methods on that - // `B` can be called during the lifetime of the returned reference. See - // the documentation on `deref_slice_helper` for what invariants we are - // required to uphold. + assert!(B::INTO_REF_INTO_MUT_ARE_SOUND); + + // SAFETY: According to the safety preconditions on + // `ByteSlice::INTO_REF_INTO_MUT_ARE_SOUND`, the preceding assert + // ensures that, given `B: 'a`, it is sound to drop `self` and still + // access the underlying memory using reads for `'a`. unsafe { self.deref_slice_helper() } } } @@ -1276,12 +1278,13 @@ where /// `into_mut_slice` consumes the `LayoutVerified`, and returns a mutable /// reference to `[T]`. pub fn into_mut_slice(mut self) -> &'a mut [T] { - // NOTE: This is safe because `B` is guaranteed to live for the lifetime - // `'a`, meaning that a) the returned reference cannot outlive the `B` - // from which `self` was constructed and, b) no other methods - mutable - // or immutable - on that `B` can be called during the lifetime of the - // returned reference. See the documentation on `deref_mut_slice_helper` - // for what invariants we are required to uphold. + assert!(B::INTO_REF_INTO_MUT_ARE_SOUND); + + // SAFETY: According to the safety preconditions on + // `ByteSlice::INTO_REF_INTO_MUT_ARE_SOUND`, the preceding assert + // ensures that, given `B: 'a + ByteSliceMut`, it is sound to drop + // `self` and still access the underlying memory using both reads and + // writes for `'a`. unsafe { self.deref_mut_slice_helper() } } } @@ -1637,6 +1640,29 @@ mod sealed { /// [`Vec`]: std::vec::Vec /// [`split_at`]: crate::ByteSlice::split_at pub unsafe trait ByteSlice: Deref + Sized + self::sealed::Sealed { + /// Are the [`Ref::into_ref`] and [`Ref::into_mut`] methods sound when used + /// with `Self`? If not, evaluating this constant must panic at compile + /// time. + /// + /// This exists to work around #716 on versions of zerocopy prior to 0.8. + /// + /// # Safety + /// + /// This may only be set to true if the following holds: Given the + /// following: + /// - `Self: 'a` + /// - `bytes: Self` + /// - `let ptr = bytes.as_ptr()` + /// + /// ...then: + /// - Using `ptr` to read the memory previously addressed by `bytes` is + /// sound for `'a` even after `bytes` has been dropped. + /// - If `Self: ByteSliceMut`, using `ptr` to write the memory previously + /// addressed by `bytes` is sound for `'a` even after `bytes` has been + /// dropped. + #[doc(hidden)] + const INTO_REF_INTO_MUT_ARE_SOUND: bool; + /// Gets a raw pointer to the first byte in the slice. fn as_ptr(&self) -> *const u8; @@ -1661,33 +1687,73 @@ pub unsafe trait ByteSliceMut: ByteSlice + DerefMut { } unsafe impl<'a> ByteSlice for &'a [u8] { + // SAFETY: If `&'b [u8]: 'a`, then the underlying memory is treated as + // borrowed immutably for `'a` even if the slice itself is dropped. + const INTO_REF_INTO_MUT_ARE_SOUND: bool = true; + + #[inline] fn as_ptr(&self) -> *const u8 { <[u8]>::as_ptr(self) } + + #[inline] fn split_at(self, mid: usize) -> (Self, Self) { <[u8]>::split_at(self, mid) } } unsafe impl<'a> ByteSlice for &'a mut [u8] { + // SAFETY: If `&'b mut [u8]: 'a`, then the underlying memory is treated as + // borrowed mutably for `'a` even if the slice itself is dropped. + const INTO_REF_INTO_MUT_ARE_SOUND: bool = true; + + #[inline] fn as_ptr(&self) -> *const u8 { <[u8]>::as_ptr(self) } + + #[inline] fn split_at(self, mid: usize) -> (Self, Self) { <[u8]>::split_at_mut(self, mid) } } unsafe impl<'a> ByteSlice for Ref<'a, [u8]> { + const INTO_REF_INTO_MUT_ARE_SOUND: bool = if !cfg!(doc) { + panic!("Ref::into_ref and Ref::into_mut are unsound when used with core::cell::Ref; see https://github.com/google/zerocopy/issues/716") + } else { + // When compiling documentation, allow the evaluation of this constant + // to succeed. This doesn't represent a soundness hole - it just delays + // any error to runtime. The reason we need this is that, otherwise, + // `rustdoc` will fail when trying to document this item. + false + }; + + #[inline] fn as_ptr(&self) -> *const u8 { <[u8]>::as_ptr(self) } + + #[inline] fn split_at(self, mid: usize) -> (Self, Self) { Ref::map_split(self, |slice| <[u8]>::split_at(slice, mid)) } } unsafe impl<'a> ByteSlice for RefMut<'a, [u8]> { + const INTO_REF_INTO_MUT_ARE_SOUND: bool = if !cfg!(doc) { + panic!("Ref::into_ref and Ref::into_mut are unsound when used with core::cell::RefMut; see https://github.com/google/zerocopy/issues/716") + } else { + // When compiling documentation, allow the evaluation of this constant + // to succeed. This doesn't represent a soundness hole - it just delays + // any error to runtime. The reason we need this is that, otherwise, + // `rustdoc` will fail when trying to document this item. + false + }; + + #[inline] fn as_ptr(&self) -> *const u8 { <[u8]>::as_ptr(self) } + + #[inline] fn split_at(self, mid: usize) -> (Self, Self) { RefMut::map_split(self, |slice| <[u8]>::split_at_mut(slice, mid)) } @@ -1744,7 +1810,8 @@ mod alloc_support { unsafe { // This is a potentially overlapping copy. let ptr = v.as_mut_ptr(); - ptr.add(position).copy_to(ptr.add(position + additional), v.len() - position); + ptr.add(position) + .copy_to(ptr.add(position + additional), v.len() - position); ptr.add(position).write_bytes(0, additional); v.set_len(v.len() + additional); } @@ -1866,7 +1933,8 @@ mod tests { // assert that values written to the byte slice are reflected in the // typed value const VAL2: u64 = !VAL1; // different from VAL1 - lv.bytes_mut().copy_from_slice(&VAL2.to_ne_bytes().repeat(typed_len)); + lv.bytes_mut() + .copy_from_slice(&VAL2.to_ne_bytes().repeat(typed_len)); assert!(lv.iter().copied().all(|x| x == VAL2)); } @@ -1902,7 +1970,12 @@ mod tests { // assert that values written to the typed value are reflected in the // byte slice - let mut expected_bytes = [0xFF, 0x00].iter().copied().cycle().take(len).collect::>(); + let mut expected_bytes = [0xFF, 0x00] + .iter() + .copied() + .cycle() + .take(len) + .collect::>(); lv.copy_from_slice(&expected_bytes); assert_eq!(lv.bytes(), expected_bytes.as_slice()); @@ -1962,7 +2035,10 @@ mod tests { let mut buf = AlignedBuffer::::default(); // buf.buf should be aligned to 8 and have a length which is a multiple // of size_of::(), so this should always succeed - test_new_helper_slice(LayoutVerified::<_, [u64]>::new_slice(&mut buf.buf[..]).unwrap(), 2); + test_new_helper_slice( + LayoutVerified::<_, [u64]>::new_slice(&mut buf.buf[..]).unwrap(), + 2, + ); buf.buf = [0xFFu8; 16]; test_new_helper_slice( LayoutVerified::<_, [u64]>::new_slice_zeroed(&mut buf.buf[..]).unwrap(), @@ -2209,11 +2285,15 @@ mod tests { assert!(LayoutVerified::<_, u64>::new_from_suffix(&buf.buf[..]).is_none()); assert!(LayoutVerified::<_, u64>::new_from_suffix_zeroed(&mut buf.buf[..]).is_none()); assert!(LayoutVerified::<_, [u8; 8]>::new_unaligned_from_prefix(&buf.buf[..]).is_none()); - assert!(LayoutVerified::<_, [u8; 8]>::new_unaligned_from_prefix_zeroed(&mut buf.buf[..]) - .is_none()); + assert!( + LayoutVerified::<_, [u8; 8]>::new_unaligned_from_prefix_zeroed(&mut buf.buf[..]) + .is_none() + ); assert!(LayoutVerified::<_, [u8; 8]>::new_unaligned_from_suffix(&buf.buf[..]).is_none()); - assert!(LayoutVerified::<_, [u8; 8]>::new_unaligned_from_suffix_zeroed(&mut buf.buf[..]) - .is_none()); + assert!( + LayoutVerified::<_, [u8; 8]>::new_unaligned_from_suffix_zeroed(&mut buf.buf[..]) + .is_none() + ); // fail because the length is not a multiple of the element size @@ -2237,20 +2317,28 @@ mod tests { assert!( LayoutVerified::<_, [u64]>::new_slice_from_suffix_zeroed(&mut buf.buf[..], 2).is_none() ); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix(&buf.buf[..], 2) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix_zeroed( - &mut buf.buf[..], - 2 - ) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix(&buf.buf[..], 2) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix_zeroed( - &mut buf.buf[..], - 2 - ) - .is_none()); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix(&buf.buf[..], 2) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix_zeroed( + &mut buf.buf[..], + 2 + ) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix(&buf.buf[..], 2) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix_zeroed( + &mut buf.buf[..], + 2 + ) + .is_none() + ); // fail because the alignment is insufficient @@ -2265,11 +2353,15 @@ mod tests { assert!(LayoutVerified::<_, [u64]>::new_slice(&buf.buf[4..]).is_none()); assert!(LayoutVerified::<_, [u64]>::new_slice_zeroed(&mut buf.buf[4..]).is_none()); assert!(LayoutVerified::<_, [u64]>::new_slice_from_prefix(&buf.buf[4..], 1).is_none()); - assert!(LayoutVerified::<_, [u64]>::new_slice_from_prefix_zeroed(&mut buf.buf[4..], 1) - .is_none()); + assert!( + LayoutVerified::<_, [u64]>::new_slice_from_prefix_zeroed(&mut buf.buf[4..], 1) + .is_none() + ); assert!(LayoutVerified::<_, [u64]>::new_slice_from_suffix(&buf.buf[4..], 1).is_none()); - assert!(LayoutVerified::<_, [u64]>::new_slice_from_suffix_zeroed(&mut buf.buf[4..], 1) - .is_none()); + assert!( + LayoutVerified::<_, [u64]>::new_slice_from_suffix_zeroed(&mut buf.buf[4..], 1) + .is_none() + ); // slicing from 4 should be unnecessary because new_from_suffix[_zeroed] // use the suffix of the slice assert!(LayoutVerified::<_, u64>::new_from_suffix(&buf.buf[..]).is_none()); @@ -2279,40 +2371,52 @@ mod tests { let mut buf = AlignedBuffer::::default(); let unreasonable_len = std::usize::MAX / mem::size_of::() + 1; - assert!(LayoutVerified::<_, [u64]>::new_slice_from_prefix(&buf.buf[..], unreasonable_len) - .is_none()); + assert!( + LayoutVerified::<_, [u64]>::new_slice_from_prefix(&buf.buf[..], unreasonable_len) + .is_none() + ); assert!(LayoutVerified::<_, [u64]>::new_slice_from_prefix_zeroed( &mut buf.buf[..], unreasonable_len ) .is_none()); - assert!(LayoutVerified::<_, [u64]>::new_slice_from_suffix(&buf.buf[..], unreasonable_len) - .is_none()); + assert!( + LayoutVerified::<_, [u64]>::new_slice_from_suffix(&buf.buf[..], unreasonable_len) + .is_none() + ); assert!(LayoutVerified::<_, [u64]>::new_slice_from_suffix_zeroed( &mut buf.buf[..], unreasonable_len ) .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix( - &buf.buf[..], - unreasonable_len - ) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix_zeroed( - &mut buf.buf[..], - unreasonable_len - ) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix( - &buf.buf[..], - unreasonable_len - ) - .is_none()); - assert!(LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix_zeroed( - &mut buf.buf[..], - unreasonable_len - ) - .is_none()); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix( + &buf.buf[..], + unreasonable_len + ) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_prefix_zeroed( + &mut buf.buf[..], + unreasonable_len + ) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix( + &buf.buf[..], + unreasonable_len + ) + .is_none() + ); + assert!( + LayoutVerified::<_, [[u8; 8]]>::new_slice_unaligned_from_suffix_zeroed( + &mut buf.buf[..], + unreasonable_len + ) + .is_none() + ); } // Tests for ensuring that, if a ZST is passed into a slice-like function, we always @@ -2367,7 +2471,10 @@ mod tests { // Do the same tests for a slice, which ensures that this logic works // for unsized types as well. let foo = &mut [Foo { a: 1, b: 2 }, Foo { a: 3, b: 4 }]; - assert_eq!(foo.as_bytes(), [1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0]); + assert_eq!( + foo.as_bytes(), + [1, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0] + ); foo.as_bytes_mut()[8] = 5; assert_eq!(foo, &mut [Foo { a: 1, b: 2 }, Foo { a: 5, b: 4 }]); } diff --git a/src/post_monomorphization_compile_fail_tests.rs b/src/post_monomorphization_compile_fail_tests.rs new file mode 100644 index 0000000000..32505b6693 --- /dev/null +++ b/src/post_monomorphization_compile_fail_tests.rs @@ -0,0 +1,118 @@ +// Copyright 2018 The Fuchsia Authors +// +// Licensed under the 2-Clause BSD License , Apache License, Version 2.0 +// , or the MIT +// license , at your option. +// This file may not be copied, modified, or distributed except according to +// those terms. + +//! Code that should fail to compile during the post-monomorphization compiler +//! pass. +//! +//! Due to [a limitation with the `trybuild` crate][trybuild-issue], we cannot +//! use our UI testing framework to test compilation failures that are +//! encountered after monomorphization has complated. This module has one item +//! for each such test we would prefer to have as a UI test, with the code in +//! question appearing as a rustdoc example which is marked with `compile_fail`. +//! This has the effect of causing doctests to fail if any of these examples +//! compile successfully. +//! +//! This is very much a hack and not a complete replacement for UI tests - most +//! notably because this only provides a single "compile vs fail" bit of +//! information, but does not allow us to depend upon the specific error that +//! causes compilation to fail. +//! +//! [trybuild-issue]: https://github.com/dtolnay/trybuild/issues/241 + +// Miri doesn't detect post-monimorphization failures as compile-time failures, +// but instead as runtime failures. +#![cfg(not(miri))] + +/// ```compile_fail +/// use core::cell::{Ref, RefCell}; +/// +/// let refcell = RefCell::new([0u8, 1, 2, 3]); +/// let core_ref = refcell.borrow(); +/// let core_ref = Ref::map(core_ref, |bytes| &bytes[..]); +/// +/// // `zc_ref` now stores `core_ref` internally. +/// let zc_ref = zerocopy::Ref::<_, u32>::new(core_ref).unwrap(); +/// +/// // This causes `core_ref` to get dropped and synthesizes a Rust +/// // reference to the memory `core_ref` was pointing at. +/// let rust_ref = zc_ref.into_ref(); +/// +/// // UB!!! This mutates `rust_ref`'s referent while it's alive. +/// *refcell.borrow_mut() = [0, 0, 0, 0]; +/// +/// println!("{}", rust_ref); +/// ``` +#[allow(unused)] +const REFCELL_REF_INTO_REF: () = (); + +/// ```compile_fail +/// use core::cell::{RefCell, RefMut}; +/// +/// let refcell = RefCell::new([0u8, 1, 2, 3]); +/// let core_ref_mut = refcell.borrow_mut(); +/// let core_ref_mut = RefMut::map(core_ref_mut, |bytes| &mut bytes[..]); +/// +/// // `zc_ref` now stores `core_ref_mut` internally. +/// let zc_ref = zerocopy::Ref::<_, u32>::new(core_ref_mut).unwrap(); +/// +/// // This causes `core_ref_mut` to get dropped and synthesizes a Rust +/// // reference to the memory `core_ref` was pointing at. +/// let rust_ref_mut = zc_ref.into_mut(); +/// +/// // UB!!! This mutates `rust_ref_mut`'s referent while it's alive. +/// *refcell.borrow_mut() = [0, 0, 0, 0]; +/// +/// println!("{}", rust_ref_mut); +/// ``` +#[allow(unused)] +const REFCELL_REFMUT_INTO_MUT: () = (); + +/// ```compile_fail +/// use core::cell::{Ref, RefCell}; +/// +/// let refcell = RefCell::new([0u8, 1, 2, 3]); +/// let core_ref = refcell.borrow(); +/// let core_ref = Ref::map(core_ref, |bytes| &bytes[..]); +/// +/// // `zc_ref` now stores `core_ref` internally. +/// let zc_ref = zerocopy::Ref::<_, [u16]>::new_slice(core_ref).unwrap(); +/// +/// // This causes `core_ref` to get dropped and synthesizes a Rust +/// // reference to the memory `core_ref` was pointing at. +/// let rust_ref = zc_ref.into_slice(); +/// +/// // UB!!! This mutates `rust_ref`'s referent while it's alive. +/// *refcell.borrow_mut() = [0, 0, 0, 0]; +/// +/// println!("{:?}", rust_ref); +/// ``` +#[allow(unused)] +const REFCELL_REFMUT_INTO_SLICE: () = (); + +/// ```compile_fail +/// use core::cell::{RefCell, RefMut}; +/// +/// let refcell = RefCell::new([0u8, 1, 2, 3]); +/// let core_ref_mut = refcell.borrow_mut(); +/// let core_ref_mut = RefMut::map(core_ref_mut, |bytes| &mut bytes[..]); +/// +/// // `zc_ref` now stores `core_ref_mut` internally. +/// let zc_ref = zerocopy::Ref::<_, [u16]>::new_slice(core_ref_mut).unwrap(); +/// +/// // This causes `core_ref_mut` to get dropped and synthesizes a Rust +/// // reference to the memory `core_ref` was pointing at. +/// let rust_ref_mut = zc_ref.into_mut_slice(); +/// +/// // UB!!! This mutates `rust_ref_mut`'s referent while it's alive. +/// *refcell.borrow_mut() = [0, 0, 0, 0]; +/// +/// println!("{:?}", rust_ref_mut); +/// ``` +#[allow(unused)] +const REFCELL_REFMUT_INTO_MUT_SLICE: () = (); diff --git a/zerocopy-derive/Cargo.toml.crates-io b/zerocopy-derive/Cargo.toml similarity index 100% rename from zerocopy-derive/Cargo.toml.crates-io rename to zerocopy-derive/Cargo.toml