1818#include <sof/trace/trace.h>
1919#include <rtos/symbol.h>
2020#include <rtos/wait.h>
21+ #if CONFIG_VIRTUAL_HEAP
22+ #include <sof/lib/regions_mm.h>
23+
24+ struct vmh_heap * virtual_buffers_heap ;
25+ struct k_spinlock vmh_lock ;
26+ #endif /* CONFIG_VIRTUAL_HEAP */
27+
2128
2229/* Zephyr includes */
2330#include <zephyr/init.h>
@@ -189,6 +196,91 @@ static void l3_heap_free(struct k_heap *h, void *mem)
189196
190197#endif
191198
199+ #if CONFIG_VIRTUAL_HEAP
200+ static void * virtual_heap_alloc (struct vmh_heap * heap , uint32_t flags , uint32_t caps , size_t bytes ,
201+ uint32_t align )
202+ {
203+ void * mem ;
204+
205+ K_SPINLOCK (& vmh_lock ) {
206+ heap -> core_id = cpu_get_id ();
207+ mem = vmh_alloc (heap , bytes );
208+ }
209+
210+ if (!mem )
211+ return NULL ;
212+
213+ assert (IS_ALIGNED (mem , align ));
214+
215+ if (flags & SOF_MEM_FLAG_COHERENT )
216+ return sys_cache_uncached_ptr_get ((__sparse_force void __sparse_cache * )mem );
217+
218+ return mem ;
219+ }
220+
221+ /**
222+ * Checks whether pointer is from virtual memory range.
223+ * @param ptr Pointer to memory being checked.
224+ * @return True if pointer falls into virtual memory region, false otherwise.
225+ */
226+ static bool is_virtual_heap_pointer (void * ptr )
227+ {
228+ uintptr_t virtual_heap_start = POINTER_TO_UINT (sys_cache_cached_ptr_get (& heapmem )) +
229+ HEAPMEM_SIZE ;
230+ uintptr_t virtual_heap_end = CONFIG_KERNEL_VM_BASE + CONFIG_KERNEL_VM_SIZE ;
231+
232+ if (!is_cached (ptr ))
233+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
234+
235+ return ((POINTER_TO_UINT (ptr ) >= virtual_heap_start ) &&
236+ (POINTER_TO_UINT (ptr ) < virtual_heap_end ));
237+ }
238+
239+ static void virtual_heap_free (void * ptr )
240+ {
241+ int ret ;
242+
243+ ptr = (__sparse_force void * )sys_cache_cached_ptr_get (ptr );
244+
245+ K_SPINLOCK (& vmh_lock ) {
246+ virtual_buffers_heap -> core_id = cpu_get_id ();
247+ ret = vmh_free (virtual_buffers_heap , ptr );
248+ }
249+
250+ if (ret )
251+ tr_err (& zephyr_tr , "Unable to free %p! %d" , ptr , ret );
252+ }
253+
254+ static const struct vmh_heap_config static_hp_buffers = {
255+ {
256+ { 128 , 32 },
257+ { 512 , 8 },
258+ { 1024 , 44 },
259+ { 2048 , 8 },
260+ { 4096 , 11 },
261+ { 8192 , 10 },
262+ { 65536 , 3 },
263+ { 131072 , 1 },
264+ { 524288 , 1 } /* buffer for kpb */
265+ },
266+ };
267+
268+ static int virtual_heap_init (void )
269+ {
270+ k_spinlock_init (& vmh_lock );
271+
272+ virtual_buffers_heap = vmh_init_heap (& static_hp_buffers , MEM_REG_ATTR_SHARED_HEAP , 0 ,
273+ false);
274+ if (!virtual_buffers_heap )
275+ tr_err (& zephyr_tr , "Unable to init virtual buffers heap!" );
276+
277+ return 0 ;
278+ }
279+
280+ SYS_INIT (virtual_heap_init , POST_KERNEL , 1 );
281+
282+ #endif /* CONFIG_VIRTUAL_HEAP */
283+
192284static void * heap_alloc_aligned (struct k_heap * h , size_t min_align , size_t bytes )
193285{
194286 k_spinlock_key_t key ;
@@ -395,6 +487,12 @@ void *rballoc_align(uint32_t flags, uint32_t caps, size_t bytes,
395487 heap = & sof_heap ;
396488 }
397489
490+ #if CONFIG_VIRTUAL_HEAP
491+ /* Use virtual heap if it is available */
492+ if (virtual_buffers_heap )
493+ return virtual_heap_alloc (virtual_buffers_heap , flags , caps , bytes , align );
494+ #endif /* CONFIG_VIRTUAL_HEAP */
495+
398496 if (flags & SOF_MEM_FLAG_COHERENT )
399497 return heap_alloc_aligned (heap , align , bytes );
400498
@@ -417,6 +515,13 @@ void rfree(void *ptr)
417515 }
418516#endif
419517
518+ #if CONFIG_VIRTUAL_HEAP
519+ if (is_virtual_heap_pointer (ptr )) {
520+ virtual_heap_free (ptr );
521+ return ;
522+ }
523+ #endif
524+
420525 heap_free (& sof_heap , ptr );
421526}
422527EXPORT_SYMBOL (rfree );
@@ -428,7 +533,6 @@ static int heap_init(void)
428533#if CONFIG_L3_HEAP
429534 sys_heap_init (& l3_heap .heap , UINT_TO_POINTER (get_l3_heap_start ()), get_l3_heap_size ());
430535#endif
431-
432536 return 0 ;
433537}
434538
0 commit comments