xref: /DragonOS/kernel/crates/bitmap/src/alloc_bitmap.rs (revision 352ee04918f4585ad4f8a896ca6e18b1ef7d7934)
1 use core::ops::BitAnd;
2 
3 use alloc::vec::Vec;
4 
5 use crate::{bitmap_core::BitMapCore, traits::BitMapOps};
6 
7 #[derive(Clone)]
8 pub struct AllocBitmap {
9     elements: usize,
10     data: Vec<usize>,
11     core: BitMapCore<usize>,
12 }
13 
14 impl AllocBitmap {
15     pub fn new(elements: usize) -> Self {
16         let data = vec![0usize; (elements + usize::BITS as usize - 1) / (usize::BITS as usize)];
17         Self {
18             elements,
19             data,
20             core: BitMapCore::new(),
21         }
22     }
23 
24     pub fn bitand_assign(&mut self, rhs: &Self) {
25         for i in 0..rhs.data.len() {
26             self.data[i] &= rhs.data[i];
27         }
28     }
29 }
30 
31 impl BitMapOps<usize> for AllocBitmap {
32     #[inline]
33     fn get(&self, index: usize) -> Option<bool> {
34         return self.core.get(self.elements, &self.data, index);
35     }
36 
37     #[inline]
38     fn set(&mut self, index: usize, value: bool) -> Option<bool> {
39         return self.core.set(self.elements, &mut self.data, index, value);
40     }
41 
42     #[inline]
43     fn len(&self) -> usize {
44         self.elements
45     }
46 
47     #[inline]
48     fn size(&self) -> usize {
49         self.data.len() * core::mem::size_of::<usize>()
50     }
51 
52     #[inline]
53     fn first_index(&self) -> Option<usize> {
54         self.core.first_index(&self.data)
55     }
56 
57     #[inline]
58     fn first_false_index(&self) -> Option<usize> {
59         self.core.first_false_index(self.elements, &self.data)
60     }
61 
62     #[inline]
63     fn last_index(&self) -> Option<usize> {
64         self.core.last_index(self.elements, &self.data)
65     }
66 
67     #[inline]
68     fn last_false_index(&self) -> Option<usize> {
69         self.core.last_false_index(self.elements, &self.data)
70     }
71 
72     #[inline]
73     fn next_index(&self, index: usize) -> Option<usize> {
74         self.core.next_index(self.elements, &self.data, index)
75     }
76 
77     #[inline]
78     fn next_false_index(&self, index: usize) -> Option<usize> {
79         self.core.next_false_index(self.elements, &self.data, index)
80     }
81 
82     #[inline]
83     fn prev_index(&self, index: usize) -> Option<usize> {
84         self.core.prev_index(self.elements, &self.data, index)
85     }
86 
87     #[inline]
88     fn prev_false_index(&self, index: usize) -> Option<usize> {
89         self.core.prev_false_index(self.elements, &self.data, index)
90     }
91 
92     #[inline]
93     fn invert(&mut self) {
94         self.core.invert(self.elements, &mut self.data);
95     }
96 
97     #[inline]
98     fn is_full(&self) -> bool {
99         self.core.is_full(self.elements, &self.data)
100     }
101 
102     #[inline]
103     fn is_empty(&self) -> bool {
104         self.core.is_empty(&self.data)
105     }
106 
107     #[inline]
108     unsafe fn as_bytes(&self) -> &[u8] {
109         core::slice::from_raw_parts(
110             self.data.as_ptr() as *const u8,
111             core::mem::size_of::<Self>(),
112         )
113     }
114 
115     fn set_all(&mut self, value: bool) {
116         self.core.set_all(self.elements, &mut self.data, value);
117     }
118 }
119 
120 impl BitAnd for &AllocBitmap {
121     type Output = AllocBitmap;
122 
123     fn bitand(self, rhs: Self) -> Self::Output {
124         let mut result = AllocBitmap::new(self.elements);
125         for i in 0..rhs.data.len() {
126             result.data[i] = self.data[i] & rhs.data[i];
127         }
128         result
129     }
130 }
131 
132 impl BitAnd for AllocBitmap {
133     type Output = AllocBitmap;
134 
135     fn bitand(self, rhs: Self) -> Self::Output {
136         &self & &rhs
137     }
138 }
139