1*99a2dd95SBruce Richardson /* SPDX-License-Identifier: BSD-3-Clause
2*99a2dd95SBruce Richardson * Copyright(c) 2016 Intel Corporation.
3*99a2dd95SBruce Richardson * Copyright(c) 2016 6WIND S.A.
4*99a2dd95SBruce Richardson * Copyright(c) 2018 Solarflare Communications Inc.
5*99a2dd95SBruce Richardson */
6*99a2dd95SBruce Richardson
7*99a2dd95SBruce Richardson #include <rte_mempool.h>
8*99a2dd95SBruce Richardson
9*99a2dd95SBruce Richardson ssize_t
rte_mempool_op_calc_mem_size_helper(const struct rte_mempool * mp,uint32_t obj_num,uint32_t pg_shift,size_t chunk_reserve,size_t * min_chunk_size,size_t * align)10*99a2dd95SBruce Richardson rte_mempool_op_calc_mem_size_helper(const struct rte_mempool *mp,
11*99a2dd95SBruce Richardson uint32_t obj_num, uint32_t pg_shift,
12*99a2dd95SBruce Richardson size_t chunk_reserve,
13*99a2dd95SBruce Richardson size_t *min_chunk_size, size_t *align)
14*99a2dd95SBruce Richardson {
15*99a2dd95SBruce Richardson size_t total_elt_sz;
16*99a2dd95SBruce Richardson size_t obj_per_page, pg_sz, objs_in_last_page;
17*99a2dd95SBruce Richardson size_t mem_size;
18*99a2dd95SBruce Richardson
19*99a2dd95SBruce Richardson total_elt_sz = mp->header_size + mp->elt_size + mp->trailer_size;
20*99a2dd95SBruce Richardson if (total_elt_sz == 0) {
21*99a2dd95SBruce Richardson mem_size = 0;
22*99a2dd95SBruce Richardson } else if (pg_shift == 0) {
23*99a2dd95SBruce Richardson mem_size = total_elt_sz * obj_num + chunk_reserve;
24*99a2dd95SBruce Richardson } else {
25*99a2dd95SBruce Richardson pg_sz = (size_t)1 << pg_shift;
26*99a2dd95SBruce Richardson if (chunk_reserve >= pg_sz)
27*99a2dd95SBruce Richardson return -EINVAL;
28*99a2dd95SBruce Richardson obj_per_page = (pg_sz - chunk_reserve) / total_elt_sz;
29*99a2dd95SBruce Richardson if (obj_per_page == 0) {
30*99a2dd95SBruce Richardson /*
31*99a2dd95SBruce Richardson * Note that if object size is bigger than page size,
32*99a2dd95SBruce Richardson * then it is assumed that pages are grouped in subsets
33*99a2dd95SBruce Richardson * of physically continuous pages big enough to store
34*99a2dd95SBruce Richardson * at least one object.
35*99a2dd95SBruce Richardson */
36*99a2dd95SBruce Richardson mem_size = RTE_ALIGN_CEIL(total_elt_sz + chunk_reserve,
37*99a2dd95SBruce Richardson pg_sz) * obj_num;
38*99a2dd95SBruce Richardson } else {
39*99a2dd95SBruce Richardson /* In the best case, the allocator will return a
40*99a2dd95SBruce Richardson * page-aligned address. For example, with 5 objs,
41*99a2dd95SBruce Richardson * the required space is as below:
42*99a2dd95SBruce Richardson * | page0 | page1 | page2 (last) |
43*99a2dd95SBruce Richardson * |obj0 |obj1 |xxx|obj2 |obj3 |xxx|obj4|
44*99a2dd95SBruce Richardson * <------------- mem_size ------------->
45*99a2dd95SBruce Richardson */
46*99a2dd95SBruce Richardson objs_in_last_page = ((obj_num - 1) % obj_per_page) + 1;
47*99a2dd95SBruce Richardson /* room required for the last page */
48*99a2dd95SBruce Richardson mem_size = objs_in_last_page * total_elt_sz +
49*99a2dd95SBruce Richardson chunk_reserve;
50*99a2dd95SBruce Richardson /* room required for other pages */
51*99a2dd95SBruce Richardson mem_size += ((obj_num - objs_in_last_page) /
52*99a2dd95SBruce Richardson obj_per_page) << pg_shift;
53*99a2dd95SBruce Richardson
54*99a2dd95SBruce Richardson /* In the worst case, the allocator returns a
55*99a2dd95SBruce Richardson * non-aligned pointer, wasting up to
56*99a2dd95SBruce Richardson * total_elt_sz. Add a margin for that.
57*99a2dd95SBruce Richardson */
58*99a2dd95SBruce Richardson mem_size += total_elt_sz - 1;
59*99a2dd95SBruce Richardson }
60*99a2dd95SBruce Richardson }
61*99a2dd95SBruce Richardson
62*99a2dd95SBruce Richardson *min_chunk_size = total_elt_sz;
63*99a2dd95SBruce Richardson *align = RTE_MEMPOOL_ALIGN;
64*99a2dd95SBruce Richardson
65*99a2dd95SBruce Richardson return mem_size;
66*99a2dd95SBruce Richardson }
67*99a2dd95SBruce Richardson
68*99a2dd95SBruce Richardson ssize_t
rte_mempool_op_calc_mem_size_default(const struct rte_mempool * mp,uint32_t obj_num,uint32_t pg_shift,size_t * min_chunk_size,size_t * align)69*99a2dd95SBruce Richardson rte_mempool_op_calc_mem_size_default(const struct rte_mempool *mp,
70*99a2dd95SBruce Richardson uint32_t obj_num, uint32_t pg_shift,
71*99a2dd95SBruce Richardson size_t *min_chunk_size, size_t *align)
72*99a2dd95SBruce Richardson {
73*99a2dd95SBruce Richardson return rte_mempool_op_calc_mem_size_helper(mp, obj_num, pg_shift,
74*99a2dd95SBruce Richardson 0, min_chunk_size, align);
75*99a2dd95SBruce Richardson }
76*99a2dd95SBruce Richardson
77*99a2dd95SBruce Richardson /* Returns -1 if object crosses a page boundary, else returns 0 */
78*99a2dd95SBruce Richardson static int
check_obj_bounds(char * obj,size_t pg_sz,size_t elt_sz)79*99a2dd95SBruce Richardson check_obj_bounds(char *obj, size_t pg_sz, size_t elt_sz)
80*99a2dd95SBruce Richardson {
81*99a2dd95SBruce Richardson if (pg_sz == 0)
82*99a2dd95SBruce Richardson return 0;
83*99a2dd95SBruce Richardson if (elt_sz > pg_sz)
84*99a2dd95SBruce Richardson return 0;
85*99a2dd95SBruce Richardson if (RTE_PTR_ALIGN(obj, pg_sz) != RTE_PTR_ALIGN(obj + elt_sz - 1, pg_sz))
86*99a2dd95SBruce Richardson return -1;
87*99a2dd95SBruce Richardson return 0;
88*99a2dd95SBruce Richardson }
89*99a2dd95SBruce Richardson
90*99a2dd95SBruce Richardson int
rte_mempool_op_populate_helper(struct rte_mempool * mp,unsigned int flags,unsigned int max_objs,void * vaddr,rte_iova_t iova,size_t len,rte_mempool_populate_obj_cb_t * obj_cb,void * obj_cb_arg)91*99a2dd95SBruce Richardson rte_mempool_op_populate_helper(struct rte_mempool *mp, unsigned int flags,
92*99a2dd95SBruce Richardson unsigned int max_objs, void *vaddr, rte_iova_t iova,
93*99a2dd95SBruce Richardson size_t len, rte_mempool_populate_obj_cb_t *obj_cb,
94*99a2dd95SBruce Richardson void *obj_cb_arg)
95*99a2dd95SBruce Richardson {
96*99a2dd95SBruce Richardson char *va = vaddr;
97*99a2dd95SBruce Richardson size_t total_elt_sz, pg_sz;
98*99a2dd95SBruce Richardson size_t off;
99*99a2dd95SBruce Richardson unsigned int i;
100*99a2dd95SBruce Richardson void *obj;
101*99a2dd95SBruce Richardson int ret;
102*99a2dd95SBruce Richardson
103*99a2dd95SBruce Richardson ret = rte_mempool_get_page_size(mp, &pg_sz);
104*99a2dd95SBruce Richardson if (ret < 0)
105*99a2dd95SBruce Richardson return ret;
106*99a2dd95SBruce Richardson
107*99a2dd95SBruce Richardson total_elt_sz = mp->header_size + mp->elt_size + mp->trailer_size;
108*99a2dd95SBruce Richardson
109*99a2dd95SBruce Richardson if (flags & RTE_MEMPOOL_POPULATE_F_ALIGN_OBJ)
110*99a2dd95SBruce Richardson off = total_elt_sz - (((uintptr_t)(va - 1) % total_elt_sz) + 1);
111*99a2dd95SBruce Richardson else
112*99a2dd95SBruce Richardson off = 0;
113*99a2dd95SBruce Richardson for (i = 0; i < max_objs; i++) {
114*99a2dd95SBruce Richardson /* avoid objects to cross page boundaries */
115*99a2dd95SBruce Richardson if (check_obj_bounds(va + off, pg_sz, total_elt_sz) < 0) {
116*99a2dd95SBruce Richardson off += RTE_PTR_ALIGN_CEIL(va + off, pg_sz) - (va + off);
117*99a2dd95SBruce Richardson if (flags & RTE_MEMPOOL_POPULATE_F_ALIGN_OBJ)
118*99a2dd95SBruce Richardson off += total_elt_sz -
119*99a2dd95SBruce Richardson (((uintptr_t)(va + off - 1) %
120*99a2dd95SBruce Richardson total_elt_sz) + 1);
121*99a2dd95SBruce Richardson }
122*99a2dd95SBruce Richardson
123*99a2dd95SBruce Richardson if (off + total_elt_sz > len)
124*99a2dd95SBruce Richardson break;
125*99a2dd95SBruce Richardson
126*99a2dd95SBruce Richardson off += mp->header_size;
127*99a2dd95SBruce Richardson obj = va + off;
128*99a2dd95SBruce Richardson obj_cb(mp, obj_cb_arg, obj,
129*99a2dd95SBruce Richardson (iova == RTE_BAD_IOVA) ? RTE_BAD_IOVA : (iova + off));
130*99a2dd95SBruce Richardson rte_mempool_ops_enqueue_bulk(mp, &obj, 1);
131*99a2dd95SBruce Richardson off += mp->elt_size + mp->trailer_size;
132*99a2dd95SBruce Richardson }
133*99a2dd95SBruce Richardson
134*99a2dd95SBruce Richardson return i;
135*99a2dd95SBruce Richardson }
136*99a2dd95SBruce Richardson
137*99a2dd95SBruce Richardson int
rte_mempool_op_populate_default(struct rte_mempool * mp,unsigned int max_objs,void * vaddr,rte_iova_t iova,size_t len,rte_mempool_populate_obj_cb_t * obj_cb,void * obj_cb_arg)138*99a2dd95SBruce Richardson rte_mempool_op_populate_default(struct rte_mempool *mp, unsigned int max_objs,
139*99a2dd95SBruce Richardson void *vaddr, rte_iova_t iova, size_t len,
140*99a2dd95SBruce Richardson rte_mempool_populate_obj_cb_t *obj_cb,
141*99a2dd95SBruce Richardson void *obj_cb_arg)
142*99a2dd95SBruce Richardson {
143*99a2dd95SBruce Richardson return rte_mempool_op_populate_helper(mp, 0, max_objs, vaddr, iova,
144*99a2dd95SBruce Richardson len, obj_cb, obj_cb_arg);
145*99a2dd95SBruce Richardson }
146