Lines Matching refs:header
232 static void vmw_cmdbuf_header_inline_free(struct vmw_cmdbuf_header *header) in vmw_cmdbuf_header_inline_free() argument
236 if (WARN_ON_ONCE(!header->inline_space)) in vmw_cmdbuf_header_inline_free()
239 dheader = container_of(header->cb_header, struct vmw_cmdbuf_dheader, in vmw_cmdbuf_header_inline_free()
241 dma_pool_free(header->man->dheaders, dheader, header->handle); in vmw_cmdbuf_header_inline_free()
242 kfree(header); in vmw_cmdbuf_header_inline_free()
253 static void __vmw_cmdbuf_header_free(struct vmw_cmdbuf_header *header) in __vmw_cmdbuf_header_free() argument
255 struct vmw_cmdbuf_man *man = header->man; in __vmw_cmdbuf_header_free()
259 if (header->inline_space) { in __vmw_cmdbuf_header_free()
260 vmw_cmdbuf_header_inline_free(header); in __vmw_cmdbuf_header_free()
264 drm_mm_remove_node(&header->node); in __vmw_cmdbuf_header_free()
266 if (header->cb_header) in __vmw_cmdbuf_header_free()
267 dma_pool_free(man->headers, header->cb_header, in __vmw_cmdbuf_header_free()
268 header->handle); in __vmw_cmdbuf_header_free()
269 kfree(header); in __vmw_cmdbuf_header_free()
278 void vmw_cmdbuf_header_free(struct vmw_cmdbuf_header *header) in vmw_cmdbuf_header_free() argument
280 struct vmw_cmdbuf_man *man = header->man; in vmw_cmdbuf_header_free()
283 if (header->inline_space) { in vmw_cmdbuf_header_free()
284 vmw_cmdbuf_header_inline_free(header); in vmw_cmdbuf_header_free()
288 __vmw_cmdbuf_header_free(header); in vmw_cmdbuf_header_free()
298 static int vmw_cmdbuf_header_submit(struct vmw_cmdbuf_header *header) in vmw_cmdbuf_header_submit() argument
300 struct vmw_cmdbuf_man *man = header->man; in vmw_cmdbuf_header_submit()
303 val = upper_32_bits(header->handle); in vmw_cmdbuf_header_submit()
306 val = lower_32_bits(header->handle); in vmw_cmdbuf_header_submit()
307 val |= header->cb_context & SVGA_CB_CONTEXT_MASK; in vmw_cmdbuf_header_submit()
310 return header->cb_header->status; in vmw_cmdbuf_header_submit()
471 struct vmw_cmdbuf_header *header, in vmw_cmdbuf_ctx_add() argument
474 if (!(header->cb_header->flags & SVGA_CB_FLAG_DX_CONTEXT)) in vmw_cmdbuf_ctx_add()
475 header->cb_header->dxContext = 0; in vmw_cmdbuf_ctx_add()
476 header->cb_context = cb_context; in vmw_cmdbuf_ctx_add()
477 list_add_tail(&header->list, &man->ctx[cb_context].submitted); in vmw_cmdbuf_ctx_add()
530 SVGA3dCmdHeader *header = (SVGA3dCmdHeader *) in vmw_cmdbuf_work_func() local
539 if (!vmw_cmd_describe(header, &error_cmd_size, &cmd_name)) { in vmw_cmdbuf_work_func()
860 struct vmw_cmdbuf_header *header, in vmw_cmdbuf_space_pool() argument
871 ret = vmw_cmdbuf_alloc_space(man, &header->node, size, interruptible); in vmw_cmdbuf_space_pool()
876 header->cb_header = dma_pool_zalloc(man->headers, GFP_KERNEL, in vmw_cmdbuf_space_pool()
877 &header->handle); in vmw_cmdbuf_space_pool()
878 if (!header->cb_header) { in vmw_cmdbuf_space_pool()
883 header->size = header->node.size << PAGE_SHIFT; in vmw_cmdbuf_space_pool()
884 cb_hdr = header->cb_header; in vmw_cmdbuf_space_pool()
885 offset = header->node.start << PAGE_SHIFT; in vmw_cmdbuf_space_pool()
886 header->cmd = man->map + offset; in vmw_cmdbuf_space_pool()
899 drm_mm_remove_node(&header->node); in vmw_cmdbuf_space_pool()
914 struct vmw_cmdbuf_header *header, in vmw_cmdbuf_space_inline() argument
924 &header->handle); in vmw_cmdbuf_space_inline()
928 header->inline_space = true; in vmw_cmdbuf_space_inline()
929 header->size = VMW_CMDBUF_INLINE_SIZE; in vmw_cmdbuf_space_inline()
931 header->cb_header = cb_hdr; in vmw_cmdbuf_space_inline()
932 header->cmd = dheader->cmd; in vmw_cmdbuf_space_inline()
935 cb_hdr->ptr.pa = (u64)header->handle + in vmw_cmdbuf_space_inline()
958 struct vmw_cmdbuf_header *header; in vmw_cmdbuf_alloc() local
963 header = kzalloc(sizeof(*header), GFP_KERNEL); in vmw_cmdbuf_alloc()
964 if (!header) in vmw_cmdbuf_alloc()
968 ret = vmw_cmdbuf_space_inline(man, header, size); in vmw_cmdbuf_alloc()
970 ret = vmw_cmdbuf_space_pool(man, header, size, interruptible); in vmw_cmdbuf_alloc()
973 kfree(header); in vmw_cmdbuf_alloc()
977 header->man = man; in vmw_cmdbuf_alloc()
978 INIT_LIST_HEAD(&header->list); in vmw_cmdbuf_alloc()
979 header->cb_header->status = SVGA_CB_STATUS_NONE; in vmw_cmdbuf_alloc()
980 *p_header = header; in vmw_cmdbuf_alloc()
982 return header->cmd; in vmw_cmdbuf_alloc()
1074 struct vmw_cmdbuf_header *header) in vmw_cmdbuf_reserve() argument
1076 if (!header) in vmw_cmdbuf_reserve()
1079 if (size > header->size) in vmw_cmdbuf_reserve()
1083 header->cb_header->flags |= SVGA_CB_FLAG_DX_CONTEXT; in vmw_cmdbuf_reserve()
1084 header->cb_header->dxContext = ctx_id; in vmw_cmdbuf_reserve()
1087 header->reserved = size; in vmw_cmdbuf_reserve()
1088 return header->cmd; in vmw_cmdbuf_reserve()
1101 struct vmw_cmdbuf_header *header, bool flush) in vmw_cmdbuf_commit() argument
1103 if (!header) { in vmw_cmdbuf_commit()
1110 WARN_ON(size > header->reserved); in vmw_cmdbuf_commit()
1111 man->cur = header; in vmw_cmdbuf_commit()
1114 header->cb_header->flags &= ~SVGA_CB_FLAG_DX_CONTEXT; in vmw_cmdbuf_commit()
1134 struct vmw_cmdbuf_header *header; in vmw_cmdbuf_send_device_command() local
1136 void *cmd = vmw_cmdbuf_alloc(man, size, false, &header); in vmw_cmdbuf_send_device_command()
1142 header->cb_header->length = size; in vmw_cmdbuf_send_device_command()
1143 header->cb_context = SVGA_CB_CONTEXT_DEVICE; in vmw_cmdbuf_send_device_command()
1145 status = vmw_cmdbuf_header_submit(header); in vmw_cmdbuf_send_device_command()
1147 vmw_cmdbuf_header_free(header); in vmw_cmdbuf_send_device_command()