Lines Matching full:size

80 	void* (*alloc)(struct device *dev, size_t size,
83 void (*free)(struct device *dev, size_t size,
94 unsigned long offset, size_t size,
98 size_t size, enum dma_data_direction dir,
112 size_t size, enum dma_data_direction dir,
115 size_t size, enum dma_data_direction dir,
118 dma_addr_t dma_handle, size_t size,
121 dma_addr_t dma_handle, size_t size,
129 void (*cache_sync)(struct device *dev, void *vaddr, size_t size,
158 int dma_alloc_from_dev_coherent(struct device *dev, ssize_t size,
163 void *cpu_addr, size_t size, int *ret);
165 void *dma_alloc_from_global_coherent(ssize_t size, dma_addr_t *dma_handle);
168 size_t size, int *ret);
171 #define dma_alloc_from_dev_coherent(dev, size, handle, ret) (0) argument
175 static inline void *dma_alloc_from_global_coherent(ssize_t size, in dma_alloc_from_global_coherent() argument
187 void *cpu_addr, size_t size, in dma_mmap_from_global_coherent() argument
204 unsigned long offset, size_t size, enum dma_data_direction dir,
209 size_t size, enum dma_data_direction dir, unsigned long attrs);
214 dma_addr_t addr, size_t size, enum dma_data_direction dir);
219 dma_addr_t addr, size_t size, enum dma_data_direction dir) in dma_direct_sync_single_for_device() argument
232 size_t size, enum dma_data_direction dir, unsigned long attrs);
236 dma_addr_t addr, size_t size, enum dma_data_direction dir);
241 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_direct_unmap_page() argument
250 dma_addr_t addr, size_t size, enum dma_data_direction dir) in dma_direct_sync_single_for_cpu() argument
278 struct page *page, size_t offset, size_t size, in dma_map_page_attrs() argument
286 addr = dma_direct_map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
288 addr = ops->map_page(dev, page, offset, size, dir, attrs); in dma_map_page_attrs()
289 debug_dma_map_page(dev, page, offset, size, dir, addr); in dma_map_page_attrs()
295 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_unmap_page_attrs() argument
301 dma_direct_unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
303 ops->unmap_page(dev, addr, size, dir, attrs); in dma_unmap_page_attrs()
304 debug_dma_unmap_page(dev, addr, size, dir); in dma_unmap_page_attrs()
345 size_t size, in dma_map_resource() argument
359 addr = dma_direct_map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
361 addr = ops->map_resource(dev, phys_addr, size, dir, attrs); in dma_map_resource()
363 debug_dma_map_resource(dev, phys_addr, size, dir, addr); in dma_map_resource()
368 size_t size, enum dma_data_direction dir, in dma_unmap_resource() argument
375 ops->unmap_resource(dev, addr, size, dir, attrs); in dma_unmap_resource()
376 debug_dma_unmap_resource(dev, addr, size, dir); in dma_unmap_resource()
380 size_t size, in dma_sync_single_for_cpu() argument
387 dma_direct_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
389 ops->sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
390 debug_dma_sync_single_for_cpu(dev, addr, size, dir); in dma_sync_single_for_cpu()
394 dma_addr_t addr, size_t size, in dma_sync_single_for_device() argument
401 dma_direct_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
403 ops->sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
404 debug_dma_sync_single_for_device(dev, addr, size, dir); in dma_sync_single_for_device()
445 void *dma_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle,
447 void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr,
449 void *dmam_alloc_attrs(struct device *dev, size_t size, dma_addr_t *dma_handle,
451 void dmam_free_coherent(struct device *dev, size_t size, void *vaddr,
453 void dma_cache_sync(struct device *dev, void *vaddr, size_t size,
456 void *cpu_addr, dma_addr_t dma_addr, size_t size,
459 void *cpu_addr, dma_addr_t dma_addr, size_t size,
470 struct page *page, size_t offset, size_t size, in dma_map_page_attrs() argument
476 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_unmap_page_attrs() argument
490 phys_addr_t phys_addr, size_t size, enum dma_data_direction dir, in dma_map_resource() argument
496 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_unmap_resource() argument
500 size_t size, enum dma_data_direction dir) in dma_sync_single_for_cpu() argument
504 dma_addr_t addr, size_t size, enum dma_data_direction dir) in dma_sync_single_for_device() argument
519 static inline void *dma_alloc_attrs(struct device *dev, size_t size, in dma_alloc_attrs() argument
524 static void dma_free_attrs(struct device *dev, size_t size, void *cpu_addr, in dma_free_attrs() argument
528 static inline void *dmam_alloc_attrs(struct device *dev, size_t size, in dmam_alloc_attrs() argument
533 static inline void dmam_free_coherent(struct device *dev, size_t size, in dmam_free_coherent() argument
537 static inline void dma_cache_sync(struct device *dev, void *vaddr, size_t size, in dma_cache_sync() argument
543 size_t size, unsigned long attrs) in dma_get_sgtable_attrs() argument
548 void *cpu_addr, dma_addr_t dma_addr, size_t size, in dma_mmap_attrs() argument
584 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_map_single_attrs() argument
586 debug_dma_map_single(dev, ptr, size); in dma_map_single_attrs()
588 size, dir, attrs); in dma_map_single_attrs()
592 size_t size, enum dma_data_direction dir, unsigned long attrs) in dma_unmap_single_attrs() argument
594 return dma_unmap_page_attrs(dev, addr, size, dir, attrs); in dma_unmap_single_attrs()
598 dma_addr_t addr, unsigned long offset, size_t size, in dma_sync_single_range_for_cpu() argument
601 return dma_sync_single_for_cpu(dev, addr + offset, size, dir); in dma_sync_single_range_for_cpu()
605 dma_addr_t addr, unsigned long offset, size_t size, in dma_sync_single_range_for_device() argument
608 return dma_sync_single_for_device(dev, addr + offset, size, dir); in dma_sync_single_range_for_device()
621 void *cpu_addr, dma_addr_t dma_addr, size_t size,
625 void *dma_common_contiguous_remap(struct page *page, size_t size,
628 void *dma_common_pages_remap(struct page **pages, size_t size,
630 void dma_common_free_remap(void *cpu_addr, size_t size);
632 bool dma_in_atomic_pool(void *start, size_t size);
633 void *dma_alloc_from_pool(size_t size, struct page **ret_page, gfp_t flags);
634 bool dma_free_from_pool(void *start, size_t size);
638 dma_addr_t dma_addr, size_t size, unsigned long attrs);
640 static inline void *dma_alloc_coherent(struct device *dev, size_t size, in dma_alloc_coherent() argument
644 return dma_alloc_attrs(dev, size, dma_handle, gfp, in dma_alloc_coherent()
648 static inline void dma_free_coherent(struct device *dev, size_t size, in dma_free_coherent() argument
651 return dma_free_attrs(dev, size, cpu_addr, dma_handle, 0); in dma_free_coherent()
701 void arch_setup_dma_ops(struct device *dev, u64 dma_base, u64 size,
705 u64 size, const struct iommu_ops *iommu, bool coherent) in arch_setup_dma_ops() argument
725 static inline int dma_set_max_seg_size(struct device *dev, unsigned int size) in dma_set_max_seg_size() argument
728 dev->dma_parms->max_segment_size = size; in dma_set_max_seg_size()
760 dma_addr_t device_addr, size_t size);
764 dma_addr_t device_addr, size_t size) in dma_declare_coherent_memory() argument
770 static inline void *dmam_alloc_coherent(struct device *dev, size_t size, in dmam_alloc_coherent() argument
773 return dmam_alloc_attrs(dev, size, dma_handle, gfp, in dmam_alloc_coherent()
777 static inline void *dma_alloc_wc(struct device *dev, size_t size, in dma_alloc_wc() argument
785 return dma_alloc_attrs(dev, size, dma_addr, gfp, attrs); in dma_alloc_wc()
788 static inline void dma_free_wc(struct device *dev, size_t size, in dma_free_wc() argument
791 return dma_free_attrs(dev, size, cpu_addr, dma_addr, in dma_free_wc()
798 size_t size) in dma_mmap_wc() argument
800 return dma_mmap_attrs(dev, vma, cpu_addr, dma_addr, size, in dma_mmap_wc()