mirror of
https://mirrors.bfsu.edu.cn/git/linux.git
synced 2024-11-25 05:04:09 +08:00
genalloc:support memory-allocation with bytes-alignment to genalloc
Bytes alignment is required to manage some special RAM, so add gen_pool_first_fit_align to genalloc, meanwhile add gen_pool_alloc_algo to pass algo in case user layer using more than one algo, and pass data to gen_pool_first_fit_align(modify gen_pool_alloc as a wrapper) Signed-off-by: Zhao Qiang <qiang.zhao@freescale.com> Signed-off-by: Scott Wood <scottwood@freescale.com>
This commit is contained in:
parent
e9d764f803
commit
de2dd0eb30
@ -30,10 +30,12 @@
|
|||||||
#ifndef __GENALLOC_H__
|
#ifndef __GENALLOC_H__
|
||||||
#define __GENALLOC_H__
|
#define __GENALLOC_H__
|
||||||
|
|
||||||
|
#include <linux/types.h>
|
||||||
#include <linux/spinlock_types.h>
|
#include <linux/spinlock_types.h>
|
||||||
|
|
||||||
struct device;
|
struct device;
|
||||||
struct device_node;
|
struct device_node;
|
||||||
|
struct gen_pool;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Allocation callback function type definition
|
* Allocation callback function type definition
|
||||||
@ -47,7 +49,7 @@ typedef unsigned long (*genpool_algo_t)(unsigned long *map,
|
|||||||
unsigned long size,
|
unsigned long size,
|
||||||
unsigned long start,
|
unsigned long start,
|
||||||
unsigned int nr,
|
unsigned int nr,
|
||||||
void *data);
|
void *data, struct gen_pool *pool);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* General purpose special memory pool descriptor.
|
* General purpose special memory pool descriptor.
|
||||||
@ -75,6 +77,13 @@ struct gen_pool_chunk {
|
|||||||
unsigned long bits[0]; /* bitmap for allocating memory chunk */
|
unsigned long bits[0]; /* bitmap for allocating memory chunk */
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/*
|
||||||
|
* gen_pool data descriptor for gen_pool_first_fit_align.
|
||||||
|
*/
|
||||||
|
struct genpool_data_align {
|
||||||
|
int align; /* alignment by bytes for starting address */
|
||||||
|
};
|
||||||
|
|
||||||
extern struct gen_pool *gen_pool_create(int, int);
|
extern struct gen_pool *gen_pool_create(int, int);
|
||||||
extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long);
|
extern phys_addr_t gen_pool_virt_to_phys(struct gen_pool *pool, unsigned long);
|
||||||
extern int gen_pool_add_virt(struct gen_pool *, unsigned long, phys_addr_t,
|
extern int gen_pool_add_virt(struct gen_pool *, unsigned long, phys_addr_t,
|
||||||
@ -98,6 +107,8 @@ static inline int gen_pool_add(struct gen_pool *pool, unsigned long addr,
|
|||||||
}
|
}
|
||||||
extern void gen_pool_destroy(struct gen_pool *);
|
extern void gen_pool_destroy(struct gen_pool *);
|
||||||
extern unsigned long gen_pool_alloc(struct gen_pool *, size_t);
|
extern unsigned long gen_pool_alloc(struct gen_pool *, size_t);
|
||||||
|
extern unsigned long gen_pool_alloc_algo(struct gen_pool *, size_t,
|
||||||
|
genpool_algo_t algo, void *data);
|
||||||
extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size,
|
extern void *gen_pool_dma_alloc(struct gen_pool *pool, size_t size,
|
||||||
dma_addr_t *dma);
|
dma_addr_t *dma);
|
||||||
extern void gen_pool_free(struct gen_pool *, unsigned long, size_t);
|
extern void gen_pool_free(struct gen_pool *, unsigned long, size_t);
|
||||||
@ -110,14 +121,22 @@ extern void gen_pool_set_algo(struct gen_pool *pool, genpool_algo_t algo,
|
|||||||
void *data);
|
void *data);
|
||||||
|
|
||||||
extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
|
extern unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
|
||||||
unsigned long start, unsigned int nr, void *data);
|
unsigned long start, unsigned int nr, void *data,
|
||||||
|
struct gen_pool *pool);
|
||||||
|
|
||||||
|
extern unsigned long gen_pool_first_fit_align(unsigned long *map,
|
||||||
|
unsigned long size, unsigned long start, unsigned int nr,
|
||||||
|
void *data, struct gen_pool *pool);
|
||||||
|
|
||||||
|
|
||||||
extern unsigned long gen_pool_first_fit_order_align(unsigned long *map,
|
extern unsigned long gen_pool_first_fit_order_align(unsigned long *map,
|
||||||
unsigned long size, unsigned long start, unsigned int nr,
|
unsigned long size, unsigned long start, unsigned int nr,
|
||||||
void *data);
|
void *data, struct gen_pool *pool);
|
||||||
|
|
||||||
extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
|
extern unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
|
||||||
unsigned long start, unsigned int nr, void *data);
|
unsigned long start, unsigned int nr, void *data,
|
||||||
|
struct gen_pool *pool);
|
||||||
|
|
||||||
|
|
||||||
extern struct gen_pool *devm_gen_pool_create(struct device *dev,
|
extern struct gen_pool *devm_gen_pool_create(struct device *dev,
|
||||||
int min_alloc_order, int nid, const char *name);
|
int min_alloc_order, int nid, const char *name);
|
||||||
|
@ -269,6 +269,25 @@ EXPORT_SYMBOL(gen_pool_destroy);
|
|||||||
* NMI-safe cmpxchg implementation.
|
* NMI-safe cmpxchg implementation.
|
||||||
*/
|
*/
|
||||||
unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
|
unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
|
||||||
|
{
|
||||||
|
return gen_pool_alloc_algo(pool, size, pool->algo, pool->data);
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(gen_pool_alloc);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gen_pool_alloc_algo - allocate special memory from the pool
|
||||||
|
* @pool: pool to allocate from
|
||||||
|
* @size: number of bytes to allocate from the pool
|
||||||
|
* @algo: algorithm passed from caller
|
||||||
|
* @data: data passed to algorithm
|
||||||
|
*
|
||||||
|
* Allocate the requested number of bytes from the specified pool.
|
||||||
|
* Uses the pool allocation function (with first-fit algorithm by default).
|
||||||
|
* Can not be used in NMI handler on architectures without
|
||||||
|
* NMI-safe cmpxchg implementation.
|
||||||
|
*/
|
||||||
|
unsigned long gen_pool_alloc_algo(struct gen_pool *pool, size_t size,
|
||||||
|
genpool_algo_t algo, void *data)
|
||||||
{
|
{
|
||||||
struct gen_pool_chunk *chunk;
|
struct gen_pool_chunk *chunk;
|
||||||
unsigned long addr = 0;
|
unsigned long addr = 0;
|
||||||
@ -290,8 +309,8 @@ unsigned long gen_pool_alloc(struct gen_pool *pool, size_t size)
|
|||||||
|
|
||||||
end_bit = chunk_size(chunk) >> order;
|
end_bit = chunk_size(chunk) >> order;
|
||||||
retry:
|
retry:
|
||||||
start_bit = pool->algo(chunk->bits, end_bit, start_bit, nbits,
|
start_bit = algo(chunk->bits, end_bit, start_bit,
|
||||||
pool->data);
|
nbits, data, pool);
|
||||||
if (start_bit >= end_bit)
|
if (start_bit >= end_bit)
|
||||||
continue;
|
continue;
|
||||||
remain = bitmap_set_ll(chunk->bits, start_bit, nbits);
|
remain = bitmap_set_ll(chunk->bits, start_bit, nbits);
|
||||||
@ -310,7 +329,7 @@ retry:
|
|||||||
rcu_read_unlock();
|
rcu_read_unlock();
|
||||||
return addr;
|
return addr;
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL(gen_pool_alloc);
|
EXPORT_SYMBOL(gen_pool_alloc_algo);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* gen_pool_dma_alloc - allocate special memory from the pool for DMA usage
|
* gen_pool_dma_alloc - allocate special memory from the pool for DMA usage
|
||||||
@ -501,14 +520,41 @@ EXPORT_SYMBOL(gen_pool_set_algo);
|
|||||||
* @start: The bitnumber to start searching at
|
* @start: The bitnumber to start searching at
|
||||||
* @nr: The number of zeroed bits we're looking for
|
* @nr: The number of zeroed bits we're looking for
|
||||||
* @data: additional data - unused
|
* @data: additional data - unused
|
||||||
|
* @pool: pool to find the fit region memory from
|
||||||
*/
|
*/
|
||||||
unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
|
unsigned long gen_pool_first_fit(unsigned long *map, unsigned long size,
|
||||||
unsigned long start, unsigned int nr, void *data)
|
unsigned long start, unsigned int nr, void *data,
|
||||||
|
struct gen_pool *pool)
|
||||||
{
|
{
|
||||||
return bitmap_find_next_zero_area(map, size, start, nr, 0);
|
return bitmap_find_next_zero_area(map, size, start, nr, 0);
|
||||||
}
|
}
|
||||||
EXPORT_SYMBOL(gen_pool_first_fit);
|
EXPORT_SYMBOL(gen_pool_first_fit);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gen_pool_first_fit_align - find the first available region
|
||||||
|
* of memory matching the size requirement (alignment constraint)
|
||||||
|
* @map: The address to base the search on
|
||||||
|
* @size: The bitmap size in bits
|
||||||
|
* @start: The bitnumber to start searching at
|
||||||
|
* @nr: The number of zeroed bits we're looking for
|
||||||
|
* @data: data for alignment
|
||||||
|
* @pool: pool to get order from
|
||||||
|
*/
|
||||||
|
unsigned long gen_pool_first_fit_align(unsigned long *map, unsigned long size,
|
||||||
|
unsigned long start, unsigned int nr, void *data,
|
||||||
|
struct gen_pool *pool)
|
||||||
|
{
|
||||||
|
struct genpool_data_align *alignment;
|
||||||
|
unsigned long align_mask;
|
||||||
|
int order;
|
||||||
|
|
||||||
|
alignment = data;
|
||||||
|
order = pool->min_alloc_order;
|
||||||
|
align_mask = ((alignment->align + (1UL << order) - 1) >> order) - 1;
|
||||||
|
return bitmap_find_next_zero_area(map, size, start, nr, align_mask);
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(gen_pool_first_fit_align);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* gen_pool_first_fit_order_align - find the first available region
|
* gen_pool_first_fit_order_align - find the first available region
|
||||||
* of memory matching the size requirement. The region will be aligned
|
* of memory matching the size requirement. The region will be aligned
|
||||||
@ -518,10 +564,11 @@ EXPORT_SYMBOL(gen_pool_first_fit);
|
|||||||
* @start: The bitnumber to start searching at
|
* @start: The bitnumber to start searching at
|
||||||
* @nr: The number of zeroed bits we're looking for
|
* @nr: The number of zeroed bits we're looking for
|
||||||
* @data: additional data - unused
|
* @data: additional data - unused
|
||||||
|
* @pool: pool to find the fit region memory from
|
||||||
*/
|
*/
|
||||||
unsigned long gen_pool_first_fit_order_align(unsigned long *map,
|
unsigned long gen_pool_first_fit_order_align(unsigned long *map,
|
||||||
unsigned long size, unsigned long start,
|
unsigned long size, unsigned long start,
|
||||||
unsigned int nr, void *data)
|
unsigned int nr, void *data, struct gen_pool *pool)
|
||||||
{
|
{
|
||||||
unsigned long align_mask = roundup_pow_of_two(nr) - 1;
|
unsigned long align_mask = roundup_pow_of_two(nr) - 1;
|
||||||
|
|
||||||
@ -537,12 +584,14 @@ EXPORT_SYMBOL(gen_pool_first_fit_order_align);
|
|||||||
* @start: The bitnumber to start searching at
|
* @start: The bitnumber to start searching at
|
||||||
* @nr: The number of zeroed bits we're looking for
|
* @nr: The number of zeroed bits we're looking for
|
||||||
* @data: additional data - unused
|
* @data: additional data - unused
|
||||||
|
* @pool: pool to find the fit region memory from
|
||||||
*
|
*
|
||||||
* Iterate over the bitmap to find the smallest free region
|
* Iterate over the bitmap to find the smallest free region
|
||||||
* which we can allocate the memory.
|
* which we can allocate the memory.
|
||||||
*/
|
*/
|
||||||
unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
|
unsigned long gen_pool_best_fit(unsigned long *map, unsigned long size,
|
||||||
unsigned long start, unsigned int nr, void *data)
|
unsigned long start, unsigned int nr, void *data,
|
||||||
|
struct gen_pool *pool)
|
||||||
{
|
{
|
||||||
unsigned long start_bit = size;
|
unsigned long start_bit = size;
|
||||||
unsigned long len = size + 1;
|
unsigned long len = size + 1;
|
||||||
|
Loading…
Reference in New Issue
Block a user