Release 4.11 include/linux/cma.h
#ifndef __CMA_H__
#define __CMA_H__
#include <linux/init.h>
#include <linux/types.h>
/*
* There is always at least global CMA area and a few optional
* areas configured in kernel .config.
*/
#ifdef CONFIG_CMA_AREAS
#define MAX_CMA_AREAS (1 + CONFIG_CMA_AREAS)
#else
#define MAX_CMA_AREAS (0)
#endif
struct cma;
extern unsigned long totalcma_pages;
extern phys_addr_t cma_get_base(const struct cma *cma);
extern unsigned long cma_get_size(const struct cma *cma);
extern int __init cma_declare_contiguous(phys_addr_t base,
phys_addr_t size, phys_addr_t limit,
phys_addr_t alignment, unsigned int order_per_bit,
bool fixed, struct cma **res_cma);
extern int cma_init_reserved_mem(phys_addr_t base, phys_addr_t size,
unsigned int order_per_bit,
struct cma **res_cma);
extern struct page *cma_alloc(struct cma *cma, size_t count, unsigned int align,
gfp_t gfp_mask);
extern bool cma_release(struct cma *cma, const struct page *pages, unsigned int count);
#endif
Overall Contributors
Person | Tokens | Prop | Commits | CommitProp |
JoonSoo Kim | 113 | 72.90% | 2 | 22.22% |
Marek Szyprowski | 18 | 11.61% | 1 | 11.11% |
Thierry Reding | 6 | 3.87% | 1 | 11.11% |
Sasha Levin | 5 | 3.23% | 1 | 11.11% |
Pintu Kumar | 5 | 3.23% | 1 | 11.11% |
Weijie Yang | 4 | 2.58% | 1 | 11.11% |
Lucas Stach | 3 | 1.94% | 1 | 11.11% |
Rohit Vaswani | 1 | 0.65% | 1 | 11.11% |
Total | 155 | 100.00% | 9 | 100.00% |
Information contained on this website is for historical information purposes only and does not indicate or represent copyright ownership.