Skip to content

Commit

Permalink
Final adjustments and remove metadata functions
Browse files Browse the repository at this point in the history
  • Loading branch information
celinval committed Oct 16, 2024
1 parent 5fc28bd commit 18b61ff
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 231 deletions.
234 changes: 77 additions & 157 deletions library/core/src/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,11 @@
)]
#![allow(missing_docs)]

use safety::{ensures, requires};
use crate::marker::DiscriminantKind;
use crate::marker::Tuple;
use crate::ptr;
use crate::ub_checks;
use safety::{ensures, requires};

#[cfg(kani)]
use crate::kani;
Expand All @@ -77,12 +77,19 @@ pub mod simd;

// These imports are used for simplifying intra-doc links
#[allow(unused_imports)]
#[cfg(all(target_has_atomic = "8", target_has_atomic = "32", target_has_atomic = "ptr"))]
#[cfg(all(
target_has_atomic = "8",
target_has_atomic = "32",
target_has_atomic = "ptr"
))]
use crate::sync::atomic::{self, AtomicBool, AtomicI32, AtomicIsize, AtomicU32, Ordering};

#[stable(feature = "drop_in_place", since = "1.8.0")]
#[rustc_allowed_through_unstable_modules]
#[deprecated(note = "no longer an intrinsic - use `ptr::drop_in_place` directly", since = "1.52.0")]
#[deprecated(
note = "no longer an intrinsic - use `ptr::drop_in_place` directly",
since = "1.52.0"
)]
#[inline]
pub unsafe fn drop_in_place<T: ?Sized>(to_drop: *mut T) {
// SAFETY: see `ptr::drop_in_place`
Expand Down Expand Up @@ -1032,7 +1039,11 @@ pub const fn unlikely(b: bool) -> bool {
#[miri::intrinsic_fallback_is_spec]
#[inline]
pub fn select_unpredictable<T>(b: bool, true_val: T, false_val: T) -> T {
if b { true_val } else { false_val }
if b {
true_val
} else {
false_val
}
}

extern "rust-intrinsic" {
Expand Down Expand Up @@ -2937,7 +2948,7 @@ pub const fn is_val_statically_known<T: Copy>(_arg: T) -> bool {
#[requires(ub_checks::can_dereference(x) && ub_checks::can_write(x))]
#[requires(ub_checks::can_dereference(y) && ub_checks::can_write(y))]
#[requires(x.addr() != y.addr() || core::mem::size_of::<T>() == 0)]
#[requires(ub_checks::is_nonoverlapping(x as *const (), x as *const (), size_of::<T>(), 1))]
#[requires(ub_checks::is_nonoverlapping(x as *const (), y as *const (), size_of::<T>(), 1))]
#[ensures(|_| ub_checks::can_dereference(x) && ub_checks::can_dereference(y))]
pub const unsafe fn typed_swap<T>(x: *mut T, y: *mut T) {
// SAFETY: The caller provided single non-overlapping items behind
Expand Down Expand Up @@ -3013,9 +3024,9 @@ pub const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize)
#[unstable(feature = "core_intrinsics", issue = "none")]
#[rustc_intrinsic]
#[rustc_intrinsic_must_be_overridden]
// FIXME(kani): Cannot verify intrinsics contract yet.
// <https://github.com/model-checking/kani/issues/3345>
#[requires(ub_checks::can_dereference(_ptr as *const crate::ptr::DynMetadata<dyn crate::any::Any>))]
// VTable pointers must be valid for dereferencing at least 3 `usize` (size, alignment and drop):
// <https://github.com/rust-lang/unsafe-code-guidelines/issues/166>
#[requires(ub_checks::can_dereference(_ptr as *const [usize; 3]))]
pub unsafe fn vtable_size(_ptr: *const ()) -> usize {
unreachable!()
}
Expand All @@ -3029,9 +3040,9 @@ pub unsafe fn vtable_size(_ptr: *const ()) -> usize {
#[unstable(feature = "core_intrinsics", issue = "none")]
#[rustc_intrinsic]
#[rustc_intrinsic_must_be_overridden]
// FIXME(kani): Cannot verify intrinsics contract yet.
// <https://github.com/model-checking/kani/issues/3345>
#[requires(ub_checks::can_dereference(_ptr as *const crate::ptr::DynMetadata<dyn crate::any::Any>))]
// VTable pointers must be valid for dereferencing at least 3 `usize` (size, alignment and drop):
// <https://github.com/rust-lang/unsafe-code-guidelines/issues/166>
#[requires(ub_checks::can_dereference(_ptr as *const [usize; 3]))]
pub unsafe fn vtable_align(_ptr: *const ()) -> usize {
unreachable!()
}
Expand Down Expand Up @@ -3116,14 +3127,6 @@ pub const fn variant_count<T>() -> usize {
#[rustc_const_unstable(feature = "const_size_of_val", issue = "46571")]
#[rustc_intrinsic]
#[rustc_intrinsic_must_be_overridden]
#[cfg_attr(not(kani), requires(matches!(
<T as Pointee>::Metadata::map_dyn(crate::ptr::metadata(_val)::metadata(),
|dyn_meta| { ub_checks::can_dereference(dyn_meta)}),
None | Some(true))))]
#[cfg_attr(not(kani), requires(matches!(
<T as Pointee>::Metadata::map_len(crate::ptr::metadata(_val)::metadata(),
|_| { ub_checks::can_dereference(_val)}),
None | Some(true))))]
pub const unsafe fn size_of_val<T: ?Sized>(_ptr: *const T) -> usize {
unreachable!()
}
Expand Down Expand Up @@ -3324,7 +3327,6 @@ pub const fn ptr_metadata<P: ptr::Pointee<Metadata = M> + ?Sized, M>(_ptr: *cons
&& ub_checks::can_write(core::ptr::slice_from_raw_parts_mut(dst, count))
&& ub_checks::is_nonoverlapping(src as *const (), dst as *const (), size_of::<T>(), count))]
#[ensures(|_| { check_copy_untyped(src, dst, count)})]
#[cfg_attr(kani, kani::modifies(crate::ptr::slice_from_raw_parts(dst, count)))]
pub const unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
extern "rust-intrinsic" {
#[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")]
Expand Down Expand Up @@ -3517,7 +3519,7 @@ pub const unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {
#[rustc_diagnostic_item = "ptr_write_bytes"]
#[requires(!count.overflowing_mul(size_of::<T>()).1
&& ub_checks::can_write(core::ptr::slice_from_raw_parts_mut(dst, count)))]
#[requires(count > 0 || ub_checks::is_aligned_and_not_null(dst as *const (), align_of::<T>()))]
#[requires(ub_checks::is_aligned_and_not_null(dst as *const (), align_of::<T>()))]
#[ensures(|_|
ub_checks::can_dereference(crate::ptr::slice_from_raw_parts(dst as *const u8, count * size_of::<T>())))]
#[cfg_attr(kani, kani::modifies(crate::ptr::slice_from_raw_parts(dst, count)))]
Expand Down Expand Up @@ -3551,10 +3553,10 @@ pub const unsafe fn write_bytes<T>(dst: *mut T, val: u8, count: usize) {
fn check_copy_untyped<T>(src: *const T, dst: *mut T, count: usize) -> bool {
#[cfg(kani)]
if count > 0 {
let byte = kani::any_where(| sz: &usize | *sz < size_of::< T >());
let elem = kani::any_where(| val: &usize | *val < count);
let src_data = src as * const u8;
let dst_data = unsafe { dst.add(elem) } as * const u8;
let byte = kani::any_where(|sz: &usize| *sz < size_of::<T>());
let elem = kani::any_where(|val: &usize| *val < count);
let src_data = src as *const u8;
let dst_data = unsafe { dst.add(elem) } as *const u8;
ub_checks::can_dereference(unsafe { src_data.add(byte) })
== ub_checks::can_dereference(unsafe { dst_data.add(byte) })
} else {
Expand Down Expand Up @@ -3587,163 +3589,81 @@ pub(crate) const fn miri_promise_symbolic_alignment(ptr: *const (), align: usize
}

#[cfg(kani)]
#[unstable(feature="kani", issue="none")]
#[unstable(feature = "kani", issue = "none")]
mod verify {
use core::{cmp, fmt};
use core::ptr::addr_of_mut;
use core::mem::MaybeUninit;
use super::*;
use crate::kani;
use core::mem::MaybeUninit;
use core::ptr::addr_of_mut;
use core::{cmp, fmt};
use kani::{AllocationStatus, Arbitrary, ArbitraryPointer, PointerGenerator};

#[kani::proof_for_contract(typed_swap)]
pub fn check_typed_swap_u8() {
check_swap::<u8>()
run_with_arbitrary_ptrs::<u8>(|x, y| unsafe { typed_swap(x, y) });
}

#[kani::proof_for_contract(typed_swap)]
pub fn check_typed_swap_char() {
check_swap::<char>()
run_with_arbitrary_ptrs::<char>(|x, y| unsafe { typed_swap(x, y) });
}

#[kani::proof_for_contract(typed_swap)]
pub fn check_typed_swap_non_zero() {
check_swap::<core::num::NonZeroI32>()
}

pub fn check_swap<T: kani::Arbitrary + Copy + cmp::PartialEq + fmt::Debug>() {
let mut x = kani::any::<T>();
let old_x = x;
let mut y = kani::any::<T>();
let old_y = y;

unsafe { typed_swap(&mut x, &mut y) };
assert_eq!(y, old_x);
assert_eq!(x, old_y);
run_with_arbitrary_ptrs::<core::num::NonZeroI32>(|x, y| unsafe { typed_swap(x, y) });
}

#[derive(Copy, Clone, Debug, PartialEq, Eq, kani::Arbitrary)]
enum AllocationStatus {
/// Dangling pointers
Dangling,
/// Pointer to dead object
DeadObj,
/// Null pointers
Null,
/// In bounds pointer (it may be unaligned)
InBounds,
/// Out of bounds
OutBounds,
}

struct PointerGenerator<T, const BUF_LEN: usize> {
// Internal allocation that may be used to generate valid pointers.
buf: MaybeUninit<[T; BUF_LEN]>,
}

struct ArbitraryPointer<T> {
pub ptr: *mut T,
pub status: AllocationStatus,
pub is_initialized: bool,
}

impl<T: kani::Arbitrary, const BUF_LEN: usize> PointerGenerator<T, BUF_LEN> {
const SZ : usize = BUF_LEN * size_of::<T>();

pub fn new() -> Self {
PointerGenerator {
buf: MaybeUninit::uninit()
}
}

/// Create another ArbitraryPointer that may have the same precedence as the `other` pointer.
///
/// I.e.: Non-deterministically derive a pointer from the given pointer or create a new
/// arbitrary pointer.
pub fn generate_ptr(&mut self) -> ArbitraryPointer<T> {
// Create an arbitrary pointer, but leave `ptr` as unset for now.
let mut arbitrary = ArbitraryPointer {
ptr: ptr::null_mut::<T>(),
is_initialized: false,
status: kani::any(),
};

let buf_ptr = addr_of_mut!(self.buf) as *mut T;

// Offset is used to potentially generate unaligned pointer.
let offset = kani::any_where(|b: &usize| *b < size_of::<T>());
let ptr = match arbitrary.status {
AllocationStatus::Dangling => {
crate::ptr::dangling_mut::<T>().wrapping_add(offset)
}
AllocationStatus::DeadObj => {
let mut obj: T = kani::any();
&mut obj as *mut _
}
AllocationStatus::Null => {
crate::ptr::null_mut::<T>()
}
AllocationStatus::InBounds => {
let pos = kani::any_where(|i: &usize| *i < (BUF_LEN - 1));
let ptr: *mut T = buf_ptr.wrapping_add(pos).wrapping_byte_add(offset);
if kani::any() {
arbitrary.is_initialized = true;
// This should be in bounds of arbitrary.alloc.
unsafe { ptr.write_unaligned(kani::any()) };
}
ptr
}
AllocationStatus::OutBounds => {
if kani::any() {
buf_ptr.wrapping_add(BUF_LEN).wrapping_byte_sub(offset)
} else {
buf_ptr.wrapping_add(BUF_LEN).wrapping_byte_add(offset)
}
}
};

arbitrary
}
}


#[kani::proof_for_contract(copy)]
fn check_copy() {
let mut generator = PointerGenerator::<char, 10>::new();
let src = generator.generate_ptr();
let dst = if kani::any() {
generator.generate_ptr()
} else {
PointerGenerator::<char, 10>::new().generate_ptr()
};
// Skip dangling for now since it makes Kani contract panic.
kani::assume(src.status != AllocationStatus::Dangling);
kani::assume(dst.status != AllocationStatus::Dangling);
unsafe { copy(src.ptr, dst.ptr, kani::any()) }
run_with_arbitrary_ptrs::<char>(|src, dst| unsafe { copy(src, dst, kani::any()) });
}

#[kani::proof_for_contract(copy_nonoverlapping)]
fn check_copy_nonoverlapping() {
let mut generator = PointerGenerator::<char, 10>::new();
let src = generator.generate_ptr();
// Destination may or may not have the same provenance as src.
let dst = if kani::any() {
generator.generate_ptr()
} else {
PointerGenerator::<char, 10>::new().generate_ptr()
};
// Skip dangling for now since it makes Kani contract panic.
kani::assume(src.status != AllocationStatus::Dangling);
kani::assume(dst.status != AllocationStatus::Dangling);
unsafe { copy_nonoverlapping(src.ptr, dst.ptr, kani::any()) }
run_with_arbitrary_ptrs::<char>(|src, dst| unsafe {
copy_nonoverlapping(src, dst, kani::any())
});
}

#[kani::proof_for_contract(write_bytes)]
fn check_write_bytes() {
let mut generator = PointerGenerator::<char, 10>::new();
let src = generator.generate_ptr();
kani::assume(src.status != AllocationStatus::Dangling);
// Skip dangling for now since it makes Kani contract panic.
// Verify this case in a separate harness.
unsafe { write_bytes(src.ptr, kani::any(), kani::any()) }
let mut generator = PointerGenerator::<100>::new();
let ArbitraryPointer {
ptr: src,
status: src_status,
..
} = generator.any_alloc_status::<char>();
kani::assume(supported_status(src_status));
unsafe { write_bytes(src, kani::any(), kani::any()) };
}

fn run_with_arbitrary_ptrs<T: Arbitrary>(harness: impl Fn(*mut T, *mut T)) {
let mut generator1 = PointerGenerator::<100>::new();
let mut generator2 = PointerGenerator::<100>::new();
let ArbitraryPointer {
ptr: src,
status: src_status,
..
} = generator1.any_alloc_status::<T>();
let ArbitraryPointer {
ptr: dst,
status: dst_status,
..
} = if kani::any() {
generator1.any_alloc_status::<T>()
} else {
generator2.any_alloc_status::<T>()
};
kani::assume(supported_status(src_status));
kani::assume(supported_status(dst_status));
harness(src, dst);
}

/// Return whether the current status is supported by Kani's contract.
///
/// Kani memory predicates currently doesn't support pointers to dangling or dead allocations.
/// Thus, we have to explicitly exclude those cases.
fn supported_status(status: AllocationStatus) -> bool {
status != AllocationStatus::Dangling && status != AllocationStatus::DeadObject
}
}
Loading

0 comments on commit 18b61ff

Please sign in to comment.