// We can still be zero-sized in this branch, in which case we have to
// return `None`.
- if size.bytes() == 0 {
- // We may be reading from a static.
- // In order to ensure that `static FOO: Type = FOO;` causes a cycle error
- // instead of magically pulling *any* ZST value from the ether, we need to
- // actually access the referenced allocation. The caller is likely
- // to short-circuit on `None`, so we trigger the access here to
- // make sure it happens.
- self.get_raw(ptr.alloc_id)?;
- None
- } else {
- Some(ptr)
- }
+ if size.bytes() == 0 { None } else { Some(ptr) }
}
})
}
{
Some(ptr) => ptr,
None => {
+ if let Scalar::Ptr(ptr) = mplace.ptr {
+ // We may be reading from a static.
+ // In order to ensure that `static FOO: Type = FOO;` causes a cycle error
+ // instead of magically pulling *any* ZST value from the ether, we need to
+ // actually access the referenced allocation. The caller is likely
+ // to short-circuit on `None`, so we trigger the access here to
+ // make sure it happens.
+ self.memory.get_raw(ptr.alloc_id)?;
+ }
return Ok(Some(ImmTy {
// zero-sized type
imm: Scalar::zst().into(),
--- /dev/null
+// check-pass
+
+#[derive(Copy, Clone)]
+pub struct Glfw;
+
+static mut GLFW: Option<Glfw> = None;
+pub fn new() -> Glfw {
+ unsafe {
+ if let Some(glfw) = GLFW {
+ return glfw;
+ } else {
+ todo!()
+ }
+ };
+}
+
+extern "C" {
+ static _dispatch_queue_attr_concurrent: [u8; 0];
+}
+
+static DISPATCH_QUEUE_CONCURRENT: &'static [u8; 0] =
+ unsafe { &_dispatch_queue_attr_concurrent };
+
+fn main() {
+ *DISPATCH_QUEUE_CONCURRENT;
+ new();
+}