Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
torvalds
GitHub Repository: torvalds/linux
Path: blob/master/rust/kernel/alloc/allocator.rs
29266 views
1
// SPDX-License-Identifier: GPL-2.0
2
3
//! Allocator support.
4
//!
5
//! Documentation for the kernel's memory allocators can found in the "Memory Allocation Guide"
6
//! linked below. For instance, this includes the concept of "get free page" (GFP) flags and the
7
//! typical application of the different kernel allocators.
8
//!
9
//! Reference: <https://docs.kernel.org/core-api/memory-allocation.html>
10
11
use super::Flags;
12
use core::alloc::Layout;
13
use core::ptr;
14
use core::ptr::NonNull;
15
16
use crate::alloc::{AllocError, Allocator, NumaNode};
17
use crate::bindings;
18
use crate::page;
19
20
const ARCH_KMALLOC_MINALIGN: usize = bindings::ARCH_KMALLOC_MINALIGN;
21
22
mod iter;
23
pub use self::iter::VmallocPageIter;
24
25
/// The contiguous kernel allocator.
26
///
27
/// `Kmalloc` is typically used for physically contiguous allocations up to page size, but also
28
/// supports larger allocations up to `bindings::KMALLOC_MAX_SIZE`, which is hardware specific.
29
///
30
/// For more details see [self].
31
pub struct Kmalloc;
32
33
/// The virtually contiguous kernel allocator.
34
///
35
/// `Vmalloc` allocates pages from the page level allocator and maps them into the contiguous kernel
36
/// virtual space. It is typically used for large allocations. The memory allocated with this
37
/// allocator is not physically contiguous.
38
///
39
/// For more details see [self].
40
pub struct Vmalloc;
41
42
/// The kvmalloc kernel allocator.
43
///
44
/// `KVmalloc` attempts to allocate memory with `Kmalloc` first, but falls back to `Vmalloc` upon
45
/// failure. This allocator is typically used when the size for the requested allocation is not
46
/// known and may exceed the capabilities of `Kmalloc`.
47
///
48
/// For more details see [self].
49
pub struct KVmalloc;
50
51
/// # Invariants
52
///
53
/// One of the following: `krealloc_node_align`, `vrealloc_node_align`, `kvrealloc_node_align`.
54
struct ReallocFunc(
55
unsafe extern "C" fn(
56
*const crate::ffi::c_void,
57
usize,
58
crate::ffi::c_ulong,
59
u32,
60
crate::ffi::c_int,
61
) -> *mut crate::ffi::c_void,
62
);
63
64
impl ReallocFunc {
65
// INVARIANT: `krealloc_node_align` satisfies the type invariants.
66
const KREALLOC: Self = Self(bindings::krealloc_node_align);
67
68
// INVARIANT: `vrealloc_node_align` satisfies the type invariants.
69
const VREALLOC: Self = Self(bindings::vrealloc_node_align);
70
71
// INVARIANT: `kvrealloc_node_align` satisfies the type invariants.
72
const KVREALLOC: Self = Self(bindings::kvrealloc_node_align);
73
74
/// # Safety
75
///
76
/// This method has the same safety requirements as [`Allocator::realloc`].
77
///
78
/// # Guarantees
79
///
80
/// This method has the same guarantees as `Allocator::realloc`. Additionally
81
/// - it accepts any pointer to a valid memory allocation allocated by this function.
82
/// - memory allocated by this function remains valid until it is passed to this function.
83
#[inline]
84
unsafe fn call(
85
&self,
86
ptr: Option<NonNull<u8>>,
87
layout: Layout,
88
old_layout: Layout,
89
flags: Flags,
90
nid: NumaNode,
91
) -> Result<NonNull<[u8]>, AllocError> {
92
let size = layout.size();
93
let ptr = match ptr {
94
Some(ptr) => {
95
if old_layout.size() == 0 {
96
ptr::null()
97
} else {
98
ptr.as_ptr()
99
}
100
}
101
None => ptr::null(),
102
};
103
104
// SAFETY:
105
// - `self.0` is one of `krealloc`, `vrealloc`, `kvrealloc` and thus only requires that
106
// `ptr` is NULL or valid.
107
// - `ptr` is either NULL or valid by the safety requirements of this function.
108
//
109
// GUARANTEE:
110
// - `self.0` is one of `krealloc`, `vrealloc`, `kvrealloc`.
111
// - Those functions provide the guarantees of this function.
112
let raw_ptr = unsafe {
113
// If `size == 0` and `ptr != NULL` the memory behind the pointer is freed.
114
self.0(ptr.cast(), size, layout.align(), flags.0, nid.0).cast()
115
};
116
117
let ptr = if size == 0 {
118
crate::alloc::dangling_from_layout(layout)
119
} else {
120
NonNull::new(raw_ptr).ok_or(AllocError)?
121
};
122
123
Ok(NonNull::slice_from_raw_parts(ptr, size))
124
}
125
}
126
127
impl Kmalloc {
128
/// Returns a [`Layout`] that makes [`Kmalloc`] fulfill the requested size and alignment of
129
/// `layout`.
130
pub fn aligned_layout(layout: Layout) -> Layout {
131
// Note that `layout.size()` (after padding) is guaranteed to be a multiple of
132
// `layout.align()` which together with the slab guarantees means that `Kmalloc` will return
133
// a properly aligned object (see comments in `kmalloc()` for more information).
134
layout.pad_to_align()
135
}
136
}
137
138
// SAFETY: `realloc` delegates to `ReallocFunc::call`, which guarantees that
139
// - memory remains valid until it is explicitly freed,
140
// - passing a pointer to a valid memory allocation is OK,
141
// - `realloc` satisfies the guarantees, since `ReallocFunc::call` has the same.
142
unsafe impl Allocator for Kmalloc {
143
const MIN_ALIGN: usize = ARCH_KMALLOC_MINALIGN;
144
145
#[inline]
146
unsafe fn realloc(
147
ptr: Option<NonNull<u8>>,
148
layout: Layout,
149
old_layout: Layout,
150
flags: Flags,
151
nid: NumaNode,
152
) -> Result<NonNull<[u8]>, AllocError> {
153
let layout = Kmalloc::aligned_layout(layout);
154
155
// SAFETY: `ReallocFunc::call` has the same safety requirements as `Allocator::realloc`.
156
unsafe { ReallocFunc::KREALLOC.call(ptr, layout, old_layout, flags, nid) }
157
}
158
}
159
160
impl Vmalloc {
161
/// Convert a pointer to a [`Vmalloc`] allocation to a [`page::BorrowedPage`].
162
///
163
/// # Examples
164
///
165
/// ```
166
/// # use core::ptr::{NonNull, from_mut};
167
/// # use kernel::{page, prelude::*};
168
/// use kernel::alloc::allocator::Vmalloc;
169
///
170
/// let mut vbox = VBox::<[u8; page::PAGE_SIZE]>::new_uninit(GFP_KERNEL)?;
171
///
172
/// {
173
/// // SAFETY: By the type invariant of `Box` the inner pointer of `vbox` is non-null.
174
/// let ptr = unsafe { NonNull::new_unchecked(from_mut(&mut *vbox)) };
175
///
176
/// // SAFETY:
177
/// // `ptr` is a valid pointer to a `Vmalloc` allocation.
178
/// // `ptr` is valid for the entire lifetime of `page`.
179
/// let page = unsafe { Vmalloc::to_page(ptr.cast()) };
180
///
181
/// // SAFETY: There is no concurrent read or write to the same page.
182
/// unsafe { page.fill_zero_raw(0, page::PAGE_SIZE)? };
183
/// }
184
/// # Ok::<(), Error>(())
185
/// ```
186
///
187
/// # Safety
188
///
189
/// - `ptr` must be a valid pointer to a [`Vmalloc`] allocation.
190
/// - `ptr` must remain valid for the entire duration of `'a`.
191
pub unsafe fn to_page<'a>(ptr: NonNull<u8>) -> page::BorrowedPage<'a> {
192
// SAFETY: `ptr` is a valid pointer to `Vmalloc` memory.
193
let page = unsafe { bindings::vmalloc_to_page(ptr.as_ptr().cast()) };
194
195
// SAFETY: `vmalloc_to_page` returns a valid pointer to a `struct page` for a valid pointer
196
// to `Vmalloc` memory.
197
let page = unsafe { NonNull::new_unchecked(page) };
198
199
// SAFETY:
200
// - `page` is a valid pointer to a `struct page`, given that by the safety requirements of
201
// this function `ptr` is a valid pointer to a `Vmalloc` allocation.
202
// - By the safety requirements of this function `ptr` is valid for the entire lifetime of
203
// `'a`.
204
unsafe { page::BorrowedPage::from_raw(page) }
205
}
206
}
207
208
// SAFETY: `realloc` delegates to `ReallocFunc::call`, which guarantees that
209
// - memory remains valid until it is explicitly freed,
210
// - passing a pointer to a valid memory allocation is OK,
211
// - `realloc` satisfies the guarantees, since `ReallocFunc::call` has the same.
212
unsafe impl Allocator for Vmalloc {
213
const MIN_ALIGN: usize = kernel::page::PAGE_SIZE;
214
215
#[inline]
216
unsafe fn realloc(
217
ptr: Option<NonNull<u8>>,
218
layout: Layout,
219
old_layout: Layout,
220
flags: Flags,
221
nid: NumaNode,
222
) -> Result<NonNull<[u8]>, AllocError> {
223
// SAFETY: If not `None`, `ptr` is guaranteed to point to valid memory, which was previously
224
// allocated with this `Allocator`.
225
unsafe { ReallocFunc::VREALLOC.call(ptr, layout, old_layout, flags, nid) }
226
}
227
}
228
229
// SAFETY: `realloc` delegates to `ReallocFunc::call`, which guarantees that
230
// - memory remains valid until it is explicitly freed,
231
// - passing a pointer to a valid memory allocation is OK,
232
// - `realloc` satisfies the guarantees, since `ReallocFunc::call` has the same.
233
unsafe impl Allocator for KVmalloc {
234
const MIN_ALIGN: usize = ARCH_KMALLOC_MINALIGN;
235
236
#[inline]
237
unsafe fn realloc(
238
ptr: Option<NonNull<u8>>,
239
layout: Layout,
240
old_layout: Layout,
241
flags: Flags,
242
nid: NumaNode,
243
) -> Result<NonNull<[u8]>, AllocError> {
244
// `KVmalloc` may use the `Kmalloc` backend, hence we have to enforce a `Kmalloc`
245
// compatible layout.
246
let layout = Kmalloc::aligned_layout(layout);
247
248
// SAFETY: If not `None`, `ptr` is guaranteed to point to valid memory, which was previously
249
// allocated with this `Allocator`.
250
unsafe { ReallocFunc::KVREALLOC.call(ptr, layout, old_layout, flags, nid) }
251
}
252
}
253
254
#[macros::kunit_tests(rust_allocator)]
255
mod tests {
256
use super::*;
257
use core::mem::MaybeUninit;
258
use kernel::prelude::*;
259
260
#[test]
261
fn test_alignment() -> Result {
262
const TEST_SIZE: usize = 1024;
263
const TEST_LARGE_ALIGN_SIZE: usize = kernel::page::PAGE_SIZE * 4;
264
265
// These two structs are used to test allocating aligned memory.
266
// they don't need to be accessed, so they're marked as dead_code.
267
#[expect(dead_code)]
268
#[repr(align(128))]
269
struct Blob([u8; TEST_SIZE]);
270
#[expect(dead_code)]
271
#[repr(align(8192))]
272
struct LargeAlignBlob([u8; TEST_LARGE_ALIGN_SIZE]);
273
274
struct TestAlign<T, A: Allocator>(Box<MaybeUninit<T>, A>);
275
impl<T, A: Allocator> TestAlign<T, A> {
276
fn new() -> Result<Self> {
277
Ok(Self(Box::<_, A>::new_uninit(GFP_KERNEL)?))
278
}
279
280
fn is_aligned_to(&self, align: usize) -> bool {
281
assert!(align.is_power_of_two());
282
283
let addr = self.0.as_ptr() as usize;
284
addr & (align - 1) == 0
285
}
286
}
287
288
let ta = TestAlign::<Blob, Kmalloc>::new()?;
289
assert!(ta.is_aligned_to(128));
290
291
let ta = TestAlign::<LargeAlignBlob, Kmalloc>::new()?;
292
assert!(ta.is_aligned_to(8192));
293
294
let ta = TestAlign::<Blob, Vmalloc>::new()?;
295
assert!(ta.is_aligned_to(128));
296
297
let ta = TestAlign::<LargeAlignBlob, Vmalloc>::new()?;
298
assert!(ta.is_aligned_to(8192));
299
300
let ta = TestAlign::<Blob, KVmalloc>::new()?;
301
assert!(ta.is_aligned_to(128));
302
303
let ta = TestAlign::<LargeAlignBlob, KVmalloc>::new()?;
304
assert!(ta.is_aligned_to(8192));
305
306
Ok(())
307
}
308
}
309
310