feoxdb/storage/
free_space.rs1use crate::constants::*;
2use crate::error::{FeoxError, Result};
3use std::collections::BTreeMap;
4
5#[derive(Debug, Clone, PartialEq, Eq)]
7pub struct FreeSpace {
8 pub start: u64, pub size: u64, }
11
12pub struct FreeSpaceManager {
15 by_size: BTreeMap<(u64, u64), FreeSpace>,
18
19 by_start: BTreeMap<u64, FreeSpace>,
21
22 total_free: u64,
24
25 device_size: u64,
27
28 fragmentation_percent: u32,
30}
31
32impl FreeSpaceManager {
33 pub fn new() -> Self {
35 Self {
36 by_size: BTreeMap::new(),
37 by_start: BTreeMap::new(),
38 total_free: 0,
39 device_size: 0,
40 fragmentation_percent: 0,
41 }
42 }
43
44 pub fn initialize(&mut self, device_size: u64) -> Result<()> {
46 self.device_size = device_size;
47
48 let metadata_sectors = 16;
50 let total_sectors = device_size / FEOX_BLOCK_SIZE as u64;
51
52 if total_sectors <= metadata_sectors {
53 return Err(FeoxError::InvalidDevice);
54 }
55
56 let free_sectors = total_sectors - metadata_sectors;
58 self.insert_free_space(FreeSpace {
59 start: metadata_sectors,
60 size: free_sectors,
61 })?;
62
63 Ok(())
64 }
65
66 pub fn set_device_size(&mut self, device_size: u64) {
68 self.device_size = device_size;
69 }
70
71 pub fn allocate_sectors(&mut self, sectors_needed: u64) -> Result<u64> {
73 if sectors_needed == 0 {
74 return Err(FeoxError::InvalidArgument);
75 }
76
77 let mut best_fit = None;
79
80 for ((size, start), space) in &self.by_size {
82 if *size >= sectors_needed {
83 best_fit = Some((*size, *start, space.clone()));
84 break; }
86 }
87
88 if let Some((size, start, space)) = best_fit {
89 if !self.is_valid_free_space(&space) {
91 return Err(FeoxError::CorruptedData);
92 }
93
94 self.by_size.remove(&(size, start));
96 self.by_start.remove(&space.start);
97
98 let allocated_start = space.start;
99
100 if space.size > sectors_needed {
102 let remaining = FreeSpace {
103 start: space.start + sectors_needed,
104 size: space.size - sectors_needed,
105 };
106
107 if let Err(e) = self.insert_free_space(remaining) {
109 let _ = self.insert_free_space(space);
111 return Err(e);
112 }
113 }
114
115 self.total_free -= sectors_needed * FEOX_BLOCK_SIZE as u64;
116 self.update_fragmentation();
117
118 Ok(allocated_start)
119 } else {
120 Err(FeoxError::OutOfSpace)
121 }
122 }
123
124 pub fn release_sectors(&mut self, start: u64, count: u64) -> Result<()> {
126 if start == 0 || count == 0 {
127 return Err(FeoxError::InvalidArgument);
128 }
129
130 if !self.is_valid_sector_range(start, count) {
132 return Err(FeoxError::InvalidArgument);
133 }
134
135 let merged = self.try_merge_spaces(start, count)?;
137
138 self.insert_free_space(merged)?;
140
141 self.update_fragmentation();
142
143 Ok(())
144 }
145
146 fn try_merge_spaces(&mut self, start: u64, size: u64) -> Result<FreeSpace> {
148 let end = start + size;
149 let mut merged_start = start;
150 let mut merged_size = size;
151
152 let mut prev = None;
154 for (&s, space) in self.by_start.range(..start).rev().take(1) {
155 if s + space.size == start {
156 prev = Some(space.clone());
157 }
158 }
159
160 let next = self.by_start.get(&end).cloned();
162
163 if let Some(prev_space) = prev {
165 self.by_size.remove(&(prev_space.size, prev_space.start));
167 self.by_start.remove(&prev_space.start);
168
169 self.total_free -= prev_space.size * FEOX_BLOCK_SIZE as u64;
171
172 merged_start = prev_space.start;
173 merged_size += prev_space.size;
174 }
175
176 if let Some(next_space) = next {
178 self.by_size.remove(&(next_space.size, next_space.start));
180 self.by_start.remove(&next_space.start);
181
182 self.total_free -= next_space.size * FEOX_BLOCK_SIZE as u64;
184
185 merged_size += next_space.size;
186 }
187
188 Ok(FreeSpace {
189 start: merged_start,
190 size: merged_size,
191 })
192 }
193
194 fn insert_free_space(&mut self, space: FreeSpace) -> Result<()> {
196 if space.size == 0 {
197 return Err(FeoxError::InvalidArgument);
198 }
199
200 if !self.is_valid_free_space(&space) {
202 return Err(FeoxError::InvalidArgument);
203 }
204
205 if self.by_start.contains_key(&space.start) {
207 return Err(FeoxError::DuplicateKey);
208 }
209
210 self.total_free += space.size * FEOX_BLOCK_SIZE as u64;
212
213 self.by_size
215 .insert((space.size, space.start), space.clone());
216 self.by_start.insert(space.start, space);
217
218 Ok(())
219 }
220
221 fn is_valid_free_space(&self, space: &FreeSpace) -> bool {
223 if space.start == 0 {
225 return false;
226 }
227
228 if self.device_size > 0 {
230 let device_sectors = self.device_size / FEOX_BLOCK_SIZE as u64;
231 if space.start >= device_sectors {
232 return false;
233 }
234 if space.start + space.size > device_sectors {
235 return false;
236 }
237 }
238
239 true
240 }
241
242 fn is_valid_sector_range(&self, start: u64, count: u64) -> bool {
244 if start == 0 || count == 0 {
245 return false;
246 }
247
248 if self.device_size > 0 {
249 let device_sectors = self.device_size / FEOX_BLOCK_SIZE as u64;
250 if start >= device_sectors {
251 return false;
252 }
253 if start + count > device_sectors {
254 return false;
255 }
256 }
257
258 true
259 }
260
261 fn update_fragmentation(&mut self) {
263 if self.total_free == 0 {
264 self.fragmentation_percent = 0;
265 return;
266 }
267
268 let num_chunks = self.by_start.len() as u64;
269 if num_chunks <= 1 {
270 self.fragmentation_percent = 0;
271 return;
272 }
273
274 let largest = self
276 .by_size
277 .iter()
278 .next_back()
279 .map(|(_, space)| space.size * FEOX_BLOCK_SIZE as u64)
280 .unwrap_or(0);
281
282 if self.total_free > 0 {
284 let fragmented = self.total_free - largest;
285 self.fragmentation_percent = ((fragmented * 100) / self.total_free) as u32;
286 }
287 }
288
289 pub fn get_total_free(&self) -> u64 {
291 self.total_free
292 }
293
294 pub fn get_fragmentation(&self) -> u32 {
296 self.fragmentation_percent
297 }
298
299 pub fn get_free_chunks_count(&self) -> usize {
301 self.by_start.len()
302 }
303
304 pub fn get_largest_free_chunk(&self) -> u64 {
306 self.by_size
307 .iter()
308 .next_back()
309 .map(|(_, space)| space.size * FEOX_BLOCK_SIZE as u64)
310 .unwrap_or(0)
311 }
312}
313
314impl Default for FreeSpaceManager {
315 fn default() -> Self {
316 Self::new()
317 }
318}