15aca024eSPaolo Bonzini /* 25aca024eSPaolo Bonzini * This work is licensed under the terms of the GNU LGPL, version 2. 35aca024eSPaolo Bonzini * 45aca024eSPaolo Bonzini * This is a simple allocator that provides contiguous physical addresses 55aca024eSPaolo Bonzini * with byte granularity. 65aca024eSPaolo Bonzini */ 75aca024eSPaolo Bonzini 85aca024eSPaolo Bonzini #ifndef ALLOC_PAGE_H 95aca024eSPaolo Bonzini #define ALLOC_PAGE_H 1 105aca024eSPaolo Bonzini 118131e91aSClaudio Imbrenda #include <asm/memory_areas.h> 128131e91aSClaudio Imbrenda 138131e91aSClaudio Imbrenda /* Returns true if the page allocator has been initialized */ 14bf62a925SAndrew Jones bool page_alloc_initialized(void); 158131e91aSClaudio Imbrenda 168131e91aSClaudio Imbrenda /* 178131e91aSClaudio Imbrenda * Initializes a memory area. 188131e91aSClaudio Imbrenda * n is the number of the area to initialize 198131e91aSClaudio Imbrenda * base_pfn is the physical frame number of the start of the area to initialize 208131e91aSClaudio Imbrenda * top_pfn is the physical frame number of the first page immediately after 218131e91aSClaudio Imbrenda * the end of the area to initialize 228131e91aSClaudio Imbrenda */ 238131e91aSClaudio Imbrenda void page_alloc_init_area(u8 n, uintptr_t base_pfn, uintptr_t top_pfn); 248131e91aSClaudio Imbrenda 258131e91aSClaudio Imbrenda /* Enables the page allocator. At least one area must have been initialized */ 26be60de6fSAndrew Jones void page_alloc_ops_enable(void); 278131e91aSClaudio Imbrenda 288131e91aSClaudio Imbrenda /* 298131e91aSClaudio Imbrenda * Allocate aligned memory from the specified areas. 308131e91aSClaudio Imbrenda * areas is a bitmap of allowed areas 318131e91aSClaudio Imbrenda * alignment must be a power of 2 328131e91aSClaudio Imbrenda */ 338131e91aSClaudio Imbrenda void *memalign_pages_area(unsigned int areas, size_t alignment, size_t size); 348131e91aSClaudio Imbrenda 358131e91aSClaudio Imbrenda /* 368131e91aSClaudio Imbrenda * Allocate aligned memory from any area. 378131e91aSClaudio Imbrenda * Equivalent to memalign_pages_area(~0, alignment, size). 388131e91aSClaudio Imbrenda */ 398131e91aSClaudio Imbrenda void *memalign_pages(size_t alignment, size_t size); 408131e91aSClaudio Imbrenda 418131e91aSClaudio Imbrenda /* 428131e91aSClaudio Imbrenda * Allocate naturally aligned memory from the specified areas. 438131e91aSClaudio Imbrenda * Equivalent to memalign_pages_area(areas, 1ull << order, 1ull << order). 448131e91aSClaudio Imbrenda */ 458131e91aSClaudio Imbrenda void *alloc_pages_area(unsigned int areas, unsigned int order); 468131e91aSClaudio Imbrenda 478131e91aSClaudio Imbrenda /* 488131e91aSClaudio Imbrenda * Allocate one page from any area. 498131e91aSClaudio Imbrenda * Equivalent to alloc_pages(0); 508131e91aSClaudio Imbrenda */ 51da7eceb3SThomas Huth void *alloc_page(void); 528131e91aSClaudio Imbrenda 538131e91aSClaudio Imbrenda /* 548131e91aSClaudio Imbrenda * Allocate naturally aligned memory from any area. 558131e91aSClaudio Imbrenda * Equivalent to alloc_pages_area(~0, order); 568131e91aSClaudio Imbrenda */ 5773f4b202SClaudio Imbrenda void *alloc_pages(unsigned int order); 588131e91aSClaudio Imbrenda 598131e91aSClaudio Imbrenda /* 608131e91aSClaudio Imbrenda * Frees a memory block allocated with any of the memalign_pages* or 618131e91aSClaudio Imbrenda * alloc_pages* functions. 628131e91aSClaudio Imbrenda * The pointer must point to the start of the block. 638131e91aSClaudio Imbrenda */ 64*f90ddba3SClaudio Imbrenda void free_pages(void *mem); 658131e91aSClaudio Imbrenda 668131e91aSClaudio Imbrenda /* For backwards compatibility */ 678131e91aSClaudio Imbrenda static inline void free_page(void *mem) 688131e91aSClaudio Imbrenda { 69*f90ddba3SClaudio Imbrenda return free_pages(mem); 708131e91aSClaudio Imbrenda } 718131e91aSClaudio Imbrenda 728131e91aSClaudio Imbrenda /* For backwards compatibility */ 738131e91aSClaudio Imbrenda static inline void free_pages_by_order(void *mem, unsigned int order) 748131e91aSClaudio Imbrenda { 75*f90ddba3SClaudio Imbrenda free_pages(mem); 768131e91aSClaudio Imbrenda } 775aca024eSPaolo Bonzini 785aca024eSPaolo Bonzini #endif 79