1use crate::update_counts;
5use std::{
6 mem::{ManuallyDrop, size_of},
7 rc::Rc,
8};
9
10#[allow(clippy::cast_ptr_alignment)]
11#[unsafe(no_mangle)]
12pub extern "C" fn __quantum__rt__tuple_create(size: u64) -> *mut *const Vec<u8> {
13 let mut mem = vec![
14 0_u8;
15 <usize as std::convert::TryFrom<u64>>::try_from(size)
16 .expect("Tuple size too large for `usize` type on this platform.")
17 + size_of::<*const Vec<u8>>()
18 ];
19
20 unsafe {
21 let header = mem.as_mut_ptr().cast::<*const Vec<u8>>();
22 *header = Rc::into_raw(Rc::new(mem));
23 header.wrapping_add(1)
24 }
25}
26
27#[allow(clippy::cast_ptr_alignment)]
28#[unsafe(no_mangle)]
29pub unsafe extern "C" fn __quantum__rt__tuple_copy(
30 raw_tup: *mut *const Vec<u8>,
31 force: bool,
32) -> *mut *const Vec<u8> {
33 unsafe {
34 let rc = ManuallyDrop::new(Rc::from_raw(*(raw_tup).wrapping_sub(1)));
35 if force || Rc::weak_count(&rc) > 0 {
36 let mut copy = rc.as_ref().clone();
37 let header = copy.as_mut_ptr().cast::<*const Vec<u8>>();
38 *header = Rc::into_raw(Rc::new(copy));
39 header.wrapping_add(1)
40 } else {
41 let _ = Rc::into_raw(Rc::clone(&rc));
42 raw_tup
43 }
44 }
45}
46
47#[unsafe(no_mangle)]
48pub unsafe extern "C" fn __quantum__rt__tuple_update_reference_count(
49 raw_tup: *mut *const Vec<u8>,
50 update: i32,
51) {
52 unsafe {
53 update_counts(*raw_tup.wrapping_sub(1), update, false);
54 }
55}
56
57#[unsafe(no_mangle)]
58pub unsafe extern "C" fn __quantum__rt__tuple_update_alias_count(
59 raw_tup: *mut *const Vec<u8>,
60 update: i32,
61) {
62 unsafe {
63 update_counts(*raw_tup.wrapping_sub(1), update, true);
64 }
65}
66
67#[cfg(test)]
68mod tests {
69 use super::*;
70
71 #[test]
72 fn test_tuple_create() {
73 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
74 unsafe {
75 *tup.cast::<u32>() = 42;
76 __quantum__rt__tuple_update_reference_count(tup, -1);
77 }
78 }
79
80 #[test]
81 fn test_tuple_update_reference_count() {
82 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
83 unsafe {
84 let rc = ManuallyDrop::new(Rc::from_raw(*tup.cast::<*const Vec<u8>>().wrapping_sub(1)));
85 assert_eq!(Rc::strong_count(&rc), 1);
86 __quantum__rt__tuple_update_reference_count(tup, 2);
87 assert_eq!(Rc::strong_count(&rc), 3);
88 __quantum__rt__tuple_update_reference_count(tup, -2);
89 assert_eq!(Rc::strong_count(&rc), 1);
90 __quantum__rt__tuple_update_reference_count(tup, -1);
91 }
92 }
93
94 #[test]
95 fn test_tuple_update_alias_count() {
96 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
97 unsafe {
98 let rc = ManuallyDrop::new(Rc::from_raw(*tup.cast::<*const Vec<u8>>().wrapping_sub(1)));
99 assert_eq!(Rc::strong_count(&rc), 1);
100 assert_eq!(Rc::weak_count(&rc), 0);
101 __quantum__rt__tuple_update_alias_count(tup, 2);
102 assert_eq!(Rc::weak_count(&rc), 2);
103 __quantum__rt__tuple_update_alias_count(tup, -2);
104 assert_eq!(Rc::weak_count(&rc), 0);
105 __quantum__rt__tuple_update_reference_count(tup, -1);
106 }
107 }
108
109 #[test]
110 fn test_tuple_copy() {
111 let tup1 = __quantum__rt__tuple_create(size_of::<u32>() as u64);
112 unsafe {
113 *tup1.cast::<u32>() = 42;
114 let tup2 = __quantum__rt__tuple_copy(tup1, false);
115 assert_eq!(tup2, tup1);
116 assert_eq!(*tup2.cast::<u32>(), 42);
117 __quantum__rt__tuple_update_reference_count(tup2, -1);
118 assert_eq!(*tup1.cast::<u32>(), 42);
119 let tup3 = __quantum__rt__tuple_copy(tup1, true);
120 assert_ne!(tup3, tup1);
121 assert_eq!(*tup3.cast::<u32>(), 42);
122 __quantum__rt__tuple_update_reference_count(tup3, -1);
123 assert_eq!(*tup1.cast::<u32>(), 42);
124 __quantum__rt__tuple_update_alias_count(tup1, 1);
125 let tup4 = __quantum__rt__tuple_copy(tup1, false);
126 assert_ne!(tup4, tup1);
127 assert_eq!(*tup4.cast::<u32>(), 42);
128 __quantum__rt__tuple_update_reference_count(tup4, -1);
129 assert_eq!(*tup1.cast::<u32>(), 42);
130 __quantum__rt__tuple_update_alias_count(tup1, -1);
131 __quantum__rt__tuple_update_reference_count(tup1, -1);
132 }
133 }
134}