1use crate::update_counts;
5use std::{
6 mem::{size_of, ManuallyDrop},
7 rc::Rc,
8};
9
10#[allow(clippy::cast_ptr_alignment)]
11#[no_mangle]
12pub extern "C" fn __quantum__rt__tuple_create(size: u64) -> *mut *const Vec<u8> {
13 let mut mem = vec![
14 0_u8;
15 <usize as std::convert::TryFrom<u64>>::try_from(size)
16 .expect("Tuple size too large for `usize` type on this platform.")
17 + size_of::<*const Vec<u8>>()
18 ];
19
20 unsafe {
21 let header = mem.as_mut_ptr().cast::<*const Vec<u8>>();
22 *header = Rc::into_raw(Rc::new(mem));
23 header.wrapping_add(1)
24 }
25}
26
27#[allow(clippy::cast_ptr_alignment)]
28#[no_mangle]
29pub unsafe extern "C" fn __quantum__rt__tuple_copy(
30 raw_tup: *mut *const Vec<u8>,
31 force: bool,
32) -> *mut *const Vec<u8> {
33 let rc = ManuallyDrop::new(Rc::from_raw(*(raw_tup).wrapping_sub(1)));
34 if force || Rc::weak_count(&rc) > 0 {
35 let mut copy = rc.as_ref().clone();
36 let header = copy.as_mut_ptr().cast::<*const Vec<u8>>();
37 *header = Rc::into_raw(Rc::new(copy));
38 header.wrapping_add(1)
39 } else {
40 let _ = Rc::into_raw(Rc::clone(&rc));
41 raw_tup
42 }
43}
44
45#[no_mangle]
46pub unsafe extern "C" fn __quantum__rt__tuple_update_reference_count(
47 raw_tup: *mut *const Vec<u8>,
48 update: i32,
49) {
50 update_counts(*raw_tup.wrapping_sub(1), update, false);
51}
52
53#[no_mangle]
54pub unsafe extern "C" fn __quantum__rt__tuple_update_alias_count(
55 raw_tup: *mut *const Vec<u8>,
56 update: i32,
57) {
58 update_counts(*raw_tup.wrapping_sub(1), update, true);
59}
60
61#[cfg(test)]
62mod tests {
63 use super::*;
64
65 #[test]
66 fn test_tuple_create() {
67 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
68 unsafe {
69 *tup.cast::<u32>() = 42;
70 __quantum__rt__tuple_update_reference_count(tup, -1);
71 }
72 }
73
74 #[test]
75 fn test_tuple_update_reference_count() {
76 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
77 unsafe {
78 let rc = ManuallyDrop::new(Rc::from_raw(*tup.cast::<*const Vec<u8>>().wrapping_sub(1)));
79 assert_eq!(Rc::strong_count(&rc), 1);
80 __quantum__rt__tuple_update_reference_count(tup, 2);
81 assert_eq!(Rc::strong_count(&rc), 3);
82 __quantum__rt__tuple_update_reference_count(tup, -2);
83 assert_eq!(Rc::strong_count(&rc), 1);
84 __quantum__rt__tuple_update_reference_count(tup, -1);
85 }
86 }
87
88 #[test]
89 fn test_tuple_update_alias_count() {
90 let tup = __quantum__rt__tuple_create(size_of::<u32>() as u64);
91 unsafe {
92 let rc = ManuallyDrop::new(Rc::from_raw(*tup.cast::<*const Vec<u8>>().wrapping_sub(1)));
93 assert_eq!(Rc::strong_count(&rc), 1);
94 assert_eq!(Rc::weak_count(&rc), 0);
95 __quantum__rt__tuple_update_alias_count(tup, 2);
96 assert_eq!(Rc::weak_count(&rc), 2);
97 __quantum__rt__tuple_update_alias_count(tup, -2);
98 assert_eq!(Rc::weak_count(&rc), 0);
99 __quantum__rt__tuple_update_reference_count(tup, -1);
100 }
101 }
102
103 #[test]
104 fn test_tuple_copy() {
105 let tup1 = __quantum__rt__tuple_create(size_of::<u32>() as u64);
106 unsafe {
107 *tup1.cast::<u32>() = 42;
108 let tup2 = __quantum__rt__tuple_copy(tup1, false);
109 assert_eq!(tup2, tup1);
110 assert_eq!(*tup2.cast::<u32>(), 42);
111 __quantum__rt__tuple_update_reference_count(tup2, -1);
112 assert_eq!(*tup1.cast::<u32>(), 42);
113 let tup3 = __quantum__rt__tuple_copy(tup1, true);
114 assert_ne!(tup3, tup1);
115 assert_eq!(*tup3.cast::<u32>(), 42);
116 __quantum__rt__tuple_update_reference_count(tup3, -1);
117 assert_eq!(*tup1.cast::<u32>(), 42);
118 __quantum__rt__tuple_update_alias_count(tup1, 1);
119 let tup4 = __quantum__rt__tuple_copy(tup1, false);
120 assert_ne!(tup4, tup1);
121 assert_eq!(*tup4.cast::<u32>(), 42);
122 __quantum__rt__tuple_update_reference_count(tup4, -1);
123 assert_eq!(*tup1.cast::<u32>(), 42);
124 __quantum__rt__tuple_update_alias_count(tup1, -1);
125 __quantum__rt__tuple_update_reference_count(tup1, -1);
126 }
127 }
128}