1818#include <sof/trace/trace.h>
1919#include <rtos/symbol.h>
2020#include <rtos/wait.h>
21+ #if CONFIG_VIRTUAL_HEAP
22+ #include <sof/lib/regions_mm.h>
23+
24+ struct vmh_heap * virtual_buffers_heap [CONFIG_MP_MAX_NUM_CPUS ];
25+ struct k_spinlock vmh_lock ;
26+
27+ #undef HEAPMEM_SIZE
28+ /* Buffers are allocated from virtual space so we can safely reduce the heap size.
29+ */
30+ #define HEAPMEM_SIZE 0x40000
31+ #endif /* CONFIG_VIRTUAL_HEAP */
32+
2133
2234/* Zephyr includes */
2335#include <zephyr/init.h>
@@ -193,6 +205,89 @@ static void l3_heap_free(struct k_heap *h, void *mem)
193205
194206#endif
195207
208+ #if CONFIG_VIRTUAL_HEAP
209+ static void * virtual_heap_alloc (struct vmh_heap * heap , uint32_t flags , uint32_t caps , size_t bytes ,
210+ uint32_t align )
211+ {
212+ void * mem = vmh_alloc (heap , bytes );
213+
214+ if (!mem )
215+ return NULL ;
216+
217+ assert (IS_ALIGNED (mem , align ));
218+
219+ if (flags & SOF_MEM_FLAG_COHERENT )
220+ return sys_cache_uncached_ptr_get ((__sparse_force void __sparse_cache * )mem );
221+
222+ return mem ;
223+ }
224+
225+ /**
226+ * Checks whether pointer is from virtual memory range.
227+ * @param ptr Pointer to memory being checked.
228+ * @return True if pointer falls into virtual memory region, false otherwise.
229+ */
230+ static bool is_virtual_heap_pointer (void * ptr )
231+ {
232+ uintptr_t virtual_heap_start = POINTER_TO_UINT (sys_cache_cached_ptr_get (& heapmem )) +
233+ HEAPMEM_SIZE ;
234+ uintptr_t virtual_heap_end = CONFIG_KERNEL_VM_BASE + CONFIG_KERNEL_VM_SIZE ;
235+
236+ if (!is_cached (ptr ))
237+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
238+
239+ return ((POINTER_TO_UINT (ptr ) >= virtual_heap_start ) &&
240+ (POINTER_TO_UINT (ptr ) < virtual_heap_end ));
241+ }
242+
243+ static void virtual_heap_free (void * ptr )
244+ {
245+ struct vmh_heap * const heap = virtual_buffers_heap [cpu_get_id ()];
246+ int ret ;
247+
248+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
249+
250+ ret = vmh_free (heap , ptr );
251+ if (ret )
252+ tr_err (& zephyr_tr , "Unable to free %p! %d" , ptr , ret );
253+ }
254+
255+ static const struct vmh_heap_config static_hp_buffers = {
256+ {
257+ { 128 , 32 },
258+ { 512 , 8 },
259+ { 1024 , 44 },
260+ { 2048 , 8 },
261+ { 4096 , 11 },
262+ { 8192 , 10 },
263+ { 65536 , 3 },
264+ { 131072 , 1 },
265+ { 524288 , 1 } /* buffer for kpb */
266+ },
267+ };
268+
269+ static int virtual_heap_init (void )
270+ {
271+ int core ;
272+
273+ k_spinlock_init (& vmh_lock );
274+
275+ for (core = 0 ; core < CONFIG_MP_MAX_NUM_CPUS ; core ++ ) {
276+ struct vmh_heap * heap = vmh_init_heap (& static_hp_buffers , MEM_REG_ATTR_CORE_HEAP ,
277+ core , false);
278+ if (!heap )
279+ tr_err (& zephyr_tr , "Unable to init virtual heap for core %d!" , core );
280+
281+ virtual_buffers_heap [core ] = heap ;
282+ }
283+
284+ return 0 ;
285+ }
286+
287+ SYS_INIT (virtual_heap_init , POST_KERNEL , 1 );
288+
289+ #endif /* CONFIG_VIRTUAL_HEAP */
290+
196291static void * heap_alloc_aligned (struct k_heap * h , size_t min_align , size_t bytes )
197292{
198293 k_spinlock_key_t key ;
@@ -384,6 +479,9 @@ EXPORT_SYMBOL(rzalloc);
384479void * rballoc_align (uint32_t flags , uint32_t caps , size_t bytes ,
385480 uint32_t align )
386481{
482+ #if CONFIG_VIRTUAL_HEAP
483+ struct vmh_heap * virtual_heap ;
484+ #endif
387485 struct k_heap * heap ;
388486
389487 /* choose a heap */
@@ -399,6 +497,13 @@ void *rballoc_align(uint32_t flags, uint32_t caps, size_t bytes,
399497 heap = & sof_heap ;
400498 }
401499
500+ #if CONFIG_VIRTUAL_HEAP
501+ /* Use virtual heap if it is available */
502+ virtual_heap = virtual_buffers_heap [cpu_get_id ()];
503+ if (virtual_heap )
504+ return virtual_heap_alloc (virtual_heap , flags , caps , bytes , align );
505+ #endif /* CONFIG_VIRTUAL_HEAP */
506+
402507 if (flags & SOF_MEM_FLAG_COHERENT )
403508 return heap_alloc_aligned (heap , align , bytes );
404509
@@ -421,6 +526,13 @@ void rfree(void *ptr)
421526 }
422527#endif
423528
529+ #if CONFIG_VIRTUAL_HEAP
530+ if (is_virtual_heap_pointer (ptr )) {
531+ virtual_heap_free (ptr );
532+ return ;
533+ }
534+ #endif
535+
424536 heap_free (& sof_heap , ptr );
425537}
426538EXPORT_SYMBOL (rfree );
0 commit comments