Rollup merge of #96167 - CAD97:weak-dlsym-less-ptr-crime, r=thomcc

Replace sys/unix/weak AtomicUsize with AtomicPtr

Should fix #96163. Can't easily test on Windows though...
This commit is contained in:
Dylan DPC 2022-04-20 18:26:03 +02:00 committed by GitHub
commit 53f028d790
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -25,7 +25,8 @@
use crate::ffi::CStr;
use crate::marker::PhantomData;
use crate::mem;
use crate::sync::atomic::{self, AtomicUsize, Ordering};
use crate::ptr;
use crate::sync::atomic::{self, AtomicPtr, Ordering};
// We can use true weak linkage on ELF targets.
#[cfg(not(any(target_os = "macos", target_os = "ios")))]
@ -83,13 +84,13 @@ pub(crate) macro dlsym {
}
pub(crate) struct DlsymWeak<F> {
name: &'static str,
addr: AtomicUsize,
func: AtomicPtr<libc::c_void>,
_marker: PhantomData<F>,
}
impl<F> DlsymWeak<F> {
pub(crate) const fn new(name: &'static str) -> Self {
DlsymWeak { name, addr: AtomicUsize::new(1), _marker: PhantomData }
DlsymWeak { name, func: AtomicPtr::new(ptr::invalid_mut(1)), _marker: PhantomData }
}
#[inline]
@ -97,11 +98,11 @@ impl<F> DlsymWeak<F> {
unsafe {
// Relaxed is fine here because we fence before reading through the
// pointer (see the comment below).
match self.addr.load(Ordering::Relaxed) {
1 => self.initialize(),
0 => None,
addr => {
let func = mem::transmute_copy::<usize, F>(&addr);
match self.func.load(Ordering::Relaxed) {
func if func.addr() == 1 => self.initialize(),
func if func.is_null() => None,
func => {
let func = mem::transmute_copy::<*mut libc::c_void, F>(&func);
// The caller is presumably going to read through this value
// (by calling the function we've dlsymed). This means we'd
// need to have loaded it with at least C11's consume
@ -129,25 +130,22 @@ impl<F> DlsymWeak<F> {
// Cold because it should only happen during first-time initialization.
#[cold]
unsafe fn initialize(&self) -> Option<F> {
assert_eq!(mem::size_of::<F>(), mem::size_of::<usize>());
assert_eq!(mem::size_of::<F>(), mem::size_of::<*mut libc::c_void>());
let val = fetch(self.name);
// This synchronizes with the acquire fence in `get`.
self.addr.store(val, Ordering::Release);
self.func.store(val, Ordering::Release);
match val {
0 => None,
addr => Some(mem::transmute_copy::<usize, F>(&addr)),
}
if val.is_null() { None } else { Some(mem::transmute_copy::<*mut libc::c_void, F>(&val)) }
}
}
unsafe fn fetch(name: &str) -> usize {
unsafe fn fetch(name: &str) -> *mut libc::c_void {
let name = match CStr::from_bytes_with_nul(name.as_bytes()) {
Ok(cstr) => cstr,
Err(..) => return 0,
Err(..) => return ptr::null_mut(),
};
libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr()) as usize
libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr())
}
#[cfg(not(any(target_os = "linux", target_os = "android")))]