Lines Matching full:mem
27 int mem_check_range(struct rxe_mem *mem, u64 iova, size_t length) in mem_check_range() argument
29 switch (mem->type) { in mem_check_range()
35 if (iova < mem->iova || in mem_check_range()
36 length > mem->length || in mem_check_range()
37 iova > mem->iova + mem->length - length) in mem_check_range()
50 static void rxe_mem_init(int access, struct rxe_mem *mem) in rxe_mem_init() argument
52 u32 lkey = mem->pelem.index << 8 | rxe_get_key(); in rxe_mem_init()
55 mem->ibmr.lkey = lkey; in rxe_mem_init()
56 mem->ibmr.rkey = rkey; in rxe_mem_init()
57 mem->state = RXE_MEM_STATE_INVALID; in rxe_mem_init()
58 mem->type = RXE_MEM_TYPE_NONE; in rxe_mem_init()
59 mem->map_shift = ilog2(RXE_BUF_PER_MAP); in rxe_mem_init()
64 struct rxe_mem *mem = container_of(arg, typeof(*mem), pelem); in rxe_mem_cleanup() local
67 ib_umem_release(mem->umem); in rxe_mem_cleanup()
69 if (mem->map) { in rxe_mem_cleanup()
70 for (i = 0; i < mem->num_map; i++) in rxe_mem_cleanup()
71 kfree(mem->map[i]); in rxe_mem_cleanup()
73 kfree(mem->map); in rxe_mem_cleanup()
77 static int rxe_mem_alloc(struct rxe_mem *mem, int num_buf) in rxe_mem_alloc() argument
81 struct rxe_map **map = mem->map; in rxe_mem_alloc()
85 mem->map = kmalloc_array(num_map, sizeof(*map), GFP_KERNEL); in rxe_mem_alloc()
86 if (!mem->map) in rxe_mem_alloc()
90 mem->map[i] = kmalloc(sizeof(**map), GFP_KERNEL); in rxe_mem_alloc()
91 if (!mem->map[i]) in rxe_mem_alloc()
97 mem->map_shift = ilog2(RXE_BUF_PER_MAP); in rxe_mem_alloc()
98 mem->map_mask = RXE_BUF_PER_MAP - 1; in rxe_mem_alloc()
100 mem->num_buf = num_buf; in rxe_mem_alloc()
101 mem->num_map = num_map; in rxe_mem_alloc()
102 mem->max_buf = num_map * RXE_BUF_PER_MAP; in rxe_mem_alloc()
108 kfree(mem->map[i]); in rxe_mem_alloc()
110 kfree(mem->map); in rxe_mem_alloc()
116 int access, struct rxe_mem *mem) in rxe_mem_init_dma() argument
118 rxe_mem_init(access, mem); in rxe_mem_init_dma()
120 mem->ibmr.pd = &pd->ibpd; in rxe_mem_init_dma()
121 mem->access = access; in rxe_mem_init_dma()
122 mem->state = RXE_MEM_STATE_VALID; in rxe_mem_init_dma()
123 mem->type = RXE_MEM_TYPE_DMA; in rxe_mem_init_dma()
128 struct rxe_mem *mem) in rxe_mem_init_user() argument
146 mem->umem = umem; in rxe_mem_init_user()
149 rxe_mem_init(access, mem); in rxe_mem_init_user()
151 err = rxe_mem_alloc(mem, num_buf); in rxe_mem_init_user()
158 mem->page_shift = PAGE_SHIFT; in rxe_mem_init_user()
159 mem->page_mask = PAGE_SIZE - 1; in rxe_mem_init_user()
162 map = mem->map; in rxe_mem_init_user()
189 mem->ibmr.pd = &pd->ibpd; in rxe_mem_init_user()
190 mem->umem = umem; in rxe_mem_init_user()
191 mem->access = access; in rxe_mem_init_user()
192 mem->length = length; in rxe_mem_init_user()
193 mem->iova = iova; in rxe_mem_init_user()
194 mem->va = start; in rxe_mem_init_user()
195 mem->offset = ib_umem_offset(umem); in rxe_mem_init_user()
196 mem->state = RXE_MEM_STATE_VALID; in rxe_mem_init_user()
197 mem->type = RXE_MEM_TYPE_MR; in rxe_mem_init_user()
206 int max_pages, struct rxe_mem *mem) in rxe_mem_init_fast() argument
210 rxe_mem_init(0, mem); in rxe_mem_init_fast()
213 mem->ibmr.rkey = mem->ibmr.lkey; in rxe_mem_init_fast()
215 err = rxe_mem_alloc(mem, max_pages); in rxe_mem_init_fast()
219 mem->ibmr.pd = &pd->ibpd; in rxe_mem_init_fast()
220 mem->max_buf = max_pages; in rxe_mem_init_fast()
221 mem->state = RXE_MEM_STATE_FREE; in rxe_mem_init_fast()
222 mem->type = RXE_MEM_TYPE_MR; in rxe_mem_init_fast()
231 struct rxe_mem *mem, in lookup_iova() argument
237 size_t offset = iova - mem->iova + mem->offset; in lookup_iova()
242 if (likely(mem->page_shift)) { in lookup_iova()
243 *offset_out = offset & mem->page_mask; in lookup_iova()
244 offset >>= mem->page_shift; in lookup_iova()
245 *n_out = offset & mem->map_mask; in lookup_iova()
246 *m_out = offset >> mem->map_shift; in lookup_iova()
251 length = mem->map[map_index]->buf[buf_index].size; in lookup_iova()
261 length = mem->map[map_index]->buf[buf_index].size; in lookup_iova()
270 void *iova_to_vaddr(struct rxe_mem *mem, u64 iova, int length) in iova_to_vaddr() argument
276 if (mem->state != RXE_MEM_STATE_VALID) { in iova_to_vaddr()
277 pr_warn("mem not in valid state\n"); in iova_to_vaddr()
282 if (!mem->map) { in iova_to_vaddr()
287 if (mem_check_range(mem, iova, length)) { in iova_to_vaddr()
293 lookup_iova(mem, iova, &m, &n, &offset); in iova_to_vaddr()
295 if (offset + length > mem->map[m]->buf[n].size) { in iova_to_vaddr()
301 addr = (void *)(uintptr_t)mem->map[m]->buf[n].addr + offset; in iova_to_vaddr()
308 * a mem object starting at iova. Compute incremental value of
309 * crc32 if crcp is not zero. caller must hold a reference to mem
311 int rxe_mem_copy(struct rxe_mem *mem, u64 iova, void *addr, int length, in rxe_mem_copy() argument
327 if (mem->type == RXE_MEM_TYPE_DMA) { in rxe_mem_copy()
339 *crcp = rxe_crc32(to_rdev(mem->ibmr.device), in rxe_mem_copy()
345 WARN_ON_ONCE(!mem->map); in rxe_mem_copy()
347 err = mem_check_range(mem, iova, length); in rxe_mem_copy()
353 lookup_iova(mem, iova, &m, &i, &offset); in rxe_mem_copy()
355 map = mem->map + m; in rxe_mem_copy()
373 crc = rxe_crc32(to_rdev(mem->ibmr.device), in rxe_mem_copy()
415 struct rxe_mem *mem = NULL; in copy_data() local
428 mem = lookup_mem(pd, access, sge->lkey, lookup_local); in copy_data()
429 if (!mem) { in copy_data()
439 if (mem) { in copy_data()
440 rxe_drop_ref(mem); in copy_data()
441 mem = NULL; in copy_data()
453 mem = lookup_mem(pd, access, sge->lkey, in copy_data()
455 if (!mem) { in copy_data()
470 err = rxe_mem_copy(mem, iova, addr, bytes, dir, crcp); in copy_data()
484 if (mem) in copy_data()
485 rxe_drop_ref(mem); in copy_data()
490 if (mem) in copy_data()
491 rxe_drop_ref(mem); in copy_data()
529 /* (1) find the mem (mr or mw) corresponding to lkey/rkey
531 * (2) verify that the (qp) pd matches the mem pd
532 * (3) verify that the mem can support the requested access
533 * (4) verify that mem state is valid
538 struct rxe_mem *mem; in lookup_mem() local
542 mem = rxe_pool_get_index(&rxe->mr_pool, index); in lookup_mem()
543 if (!mem) in lookup_mem()
546 if (unlikely((type == lookup_local && mr_lkey(mem) != key) || in lookup_mem()
547 (type == lookup_remote && mr_rkey(mem) != key) || in lookup_mem()
548 mr_pd(mem) != pd || in lookup_mem()
549 (access && !(access & mem->access)) || in lookup_mem()
550 mem->state != RXE_MEM_STATE_VALID)) { in lookup_mem()
551 rxe_drop_ref(mem); in lookup_mem()
552 mem = NULL; in lookup_mem()
555 return mem; in lookup_mem()