/Linux-v5.4/drivers/md/ |
D | dm-clone-metadata.c | 115 struct dirty_map dmap[2]; member 466 cmd->dmap[0].changed = 0; in dirty_map_init() 467 cmd->dmap[0].dirty_words = kvzalloc(bitmap_size(cmd->nr_words), GFP_KERNEL); in dirty_map_init() 469 if (!cmd->dmap[0].dirty_words) { in dirty_map_init() 474 cmd->dmap[1].changed = 0; in dirty_map_init() 475 cmd->dmap[1].dirty_words = kvzalloc(bitmap_size(cmd->nr_words), GFP_KERNEL); in dirty_map_init() 477 if (!cmd->dmap[1].dirty_words) { in dirty_map_init() 479 kvfree(cmd->dmap[0].dirty_words); in dirty_map_init() 483 cmd->current_dmap = &cmd->dmap[0]; in dirty_map_init() 490 kvfree(cmd->dmap[0].dirty_words); in dirty_map_exit() [all …]
|
D | dm-zoned-metadata.c | 1342 struct dmz_map *dmap; in dmz_load_mapping() local 1361 dmap = (struct dmz_map *) dmap_mblk->data; in dmz_load_mapping() 1367 dzone_id = le32_to_cpu(dmap[e].dzone_id); in dmz_load_mapping() 1388 bzone_id = le32_to_cpu(dmap[e].bzone_id); in dmz_load_mapping() 1465 struct dmz_map *dmap = (struct dmz_map *) dmap_mblk->data; in dmz_set_chunk_mapping() local 1468 dmap[map_idx].dzone_id = cpu_to_le32(dzone_id); in dmz_set_chunk_mapping() 1469 dmap[map_idx].bzone_id = cpu_to_le32(bzone_id); in dmz_set_chunk_mapping() 1637 struct dmz_map *dmap = (struct dmz_map *) dmap_mblk->data; in dmz_get_chunk_mapping() local 1646 dzone_id = le32_to_cpu(dmap[dmap_idx].dzone_id); in dmz_get_chunk_mapping()
|
/Linux-v5.4/fs/nilfs2/ |
D | page.c | 241 int nilfs_copy_dirty_pages(struct address_space *dmap, in nilfs_copy_dirty_pages() argument 261 dpage = grab_cache_page(dmap, page->index); in nilfs_copy_dirty_pages() 295 void nilfs_copy_back_pages(struct address_space *dmap, in nilfs_copy_back_pages() argument 313 dpage = find_lock_page(dmap, offset); in nilfs_copy_back_pages() 331 xa_lock_irq(&dmap->i_pages); in nilfs_copy_back_pages() 332 p = __xa_store(&dmap->i_pages, offset, page, GFP_NOFS); in nilfs_copy_back_pages() 338 page->mapping = dmap; in nilfs_copy_back_pages() 339 dmap->nrpages++; in nilfs_copy_back_pages() 341 __xa_set_mark(&dmap->i_pages, offset, in nilfs_copy_back_pages() 344 xa_unlock_irq(&dmap->i_pages); in nilfs_copy_back_pages()
|
/Linux-v5.4/fs/jfs/ |
D | jfs_dmap.c | 64 static void dbAllocBits(struct bmap * bmp, struct dmap * dp, s64 blkno, 73 static int dbAllocNext(struct bmap * bmp, struct dmap * dp, s64 blkno, 75 static int dbAllocNear(struct bmap * bmp, struct dmap * dp, s64 blkno, 78 static int dbAllocDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, 80 static int dbAllocDmapLev(struct bmap * bmp, struct dmap * dp, int nblocks, 91 static int dbFreeBits(struct bmap * bmp, struct dmap * dp, s64 blkno, 93 static int dbFreeDmap(struct bmap * bmp, struct dmap * dp, s64 blkno, 101 static int dbAllocDmapBU(struct bmap * bmp, struct dmap * dp, s64 blkno, 103 static int dbInitDmap(struct dmap * dp, s64 blkno, int nblocks); 104 static int dbInitDmapTree(struct dmap * dp); [all …]
|
D | jfs_dmap.h | 147 struct dmap { struct
|
/Linux-v5.4/drivers/misc/sgi-gru/ |
D | grufault.c | 143 struct gru_tlb_fault_map *dmap) in get_clear_fault_map() argument 158 dmap->fault_bits[i] = k; in get_clear_fault_map() 522 struct gru_tlb_fault_map imap, dmap; in gru_intr() local 536 get_clear_fault_map(gru, &imap, &dmap); in gru_intr() 541 dmap.fault_bits[0], dmap.fault_bits[1]); in gru_intr() 543 for_each_cbr_in_tfm(cbrnum, dmap.fault_bits) { in gru_intr()
|
/Linux-v5.4/drivers/staging/media/ipu3/ |
D | ipu3.c | 76 &imgu_pipe->queues[i].dmap); in imgu_dummybufs_cleanup() 97 &imgu_pipe->queues[i].dmap, size)) { in imgu_dummybufs_preallocate() 137 &imgu_pipe->queues[i].dmap, in imgu_dummybufs_init() 145 imgu_pipe->queues[i].dmap.daddr); in imgu_dummybufs_init() 162 if (WARN_ON(!imgu_pipe->queues[queue].dmap.vaddr)) in imgu_dummybufs_get() 175 imgu_pipe->queues[queue].dmap.daddr); in imgu_dummybufs_get()
|
D | ipu3.h | 110 struct imgu_css_map dmap; member
|
/Linux-v5.4/drivers/iommu/ |
D | tegra-smmu.c | 549 dma_addr_t *dmap) in tegra_smmu_pte_lookup() argument 560 *dmap = smmu_pde_to_dma(pd[pd_index]); in tegra_smmu_pte_lookup() 566 dma_addr_t *dmap) in as_get_pte() argument 598 *dmap = dma; in as_get_pte() 602 *dmap = smmu_pde_to_dma(pd[pde]); in as_get_pte()
|
/Linux-v5.4/drivers/edac/ |
D | pnd2_edac.c | 439 static struct d_cr_dmap dmap[DNV_NUM_CHANNELS]; variable 495 RD_REGP(&dmap[i], d_cr_dmap, dnv_dports[i]) || in dnv_get_registers() 1000 daddr->rank = dnv_get_bit(pmiaddr, dmap[pmiidx].rs0 + 13, 0); in dnv_pmi2mem() 1002 daddr->rank |= dnv_get_bit(pmiaddr, dmap[pmiidx].rs1 + 13, 1); in dnv_pmi2mem() 1010 daddr->bank = dnv_get_bit(pmiaddr, dmap[pmiidx].ba0 + 6, 0); in dnv_pmi2mem() 1011 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].ba1 + 6, 1); in dnv_pmi2mem() 1012 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg0 + 6, 2); in dnv_pmi2mem() 1014 daddr->bank |= dnv_get_bit(pmiaddr, dmap[pmiidx].bg1 + 6, 3); in dnv_pmi2mem()
|
/Linux-v5.4/drivers/net/ethernet/apple/ |
D | bmac.c | 179 dbdma_continue(volatile struct dbdma_regs __iomem *dmap) in dbdma_continue() argument 181 dbdma_st32(&dmap->control, in dbdma_continue() 187 dbdma_reset(volatile struct dbdma_regs __iomem *dmap) in dbdma_reset() argument 189 dbdma_st32(&dmap->control, in dbdma_reset() 192 while (dbdma_ld32(&dmap->status) & RUN) in dbdma_reset()
|
/Linux-v5.4/drivers/media/platform/xilinx/ |
D | xilinx-vipp.c | 496 struct xvip_dma *dmap; in xvip_graph_cleanup() local 502 list_for_each_entry_safe(dma, dmap, &xdev->dmas, list) { in xvip_graph_cleanup()
|
/Linux-v5.4/fs/ocfs2/dlm/ |
D | dlmdomain.c | 51 static inline void byte_copymap(u8 dmap[], unsigned long smap[], in byte_copymap() argument 59 memset(dmap, 0, ((sz + 7) >> 3)); in byte_copymap() 62 byte_set_bit(nn, dmap); in byte_copymap()
|
/Linux-v5.4/drivers/gpu/drm/vmwgfx/device_include/ |
D | svga3d_types.h | 677 uint32 dmap : 1; member
|
/Linux-v5.4/fs/f2fs/ |
D | segment.c | 1841 unsigned long *dmap = SIT_I(sbi)->tmp_map; in add_discard_addrs() local 1860 dmap[i] = force ? ~ckpt_map[i] & ~discard_map[i] : in add_discard_addrs() 1865 start = __find_rev_next_bit(dmap, max_blocks, end + 1); in add_discard_addrs() 1869 end = __find_rev_next_zero_bit(dmap, max_blocks, start + 1); in add_discard_addrs()
|