| /Linux-v5.15/fs/btrfs/ |
| D | extent-io-tree.h | 126 u32 bits, int filled, struct extent_state *cached_state); 171 struct extent_state **cached_state, gfp_t mask, 184 u64 end, struct extent_state **cached_state) in clear_extent_uptodate() argument 187 cached_state, GFP_NOFS, NULL); in clear_extent_uptodate() 207 struct extent_state **cached_state); 211 struct extent_state **cached_state) in set_extent_delalloc() argument 215 0, NULL, cached_state, GFP_NOFS, NULL); in set_extent_delalloc() 219 u64 end, struct extent_state **cached_state) in set_extent_defrag() argument 223 0, NULL, cached_state, GFP_NOFS, NULL); in set_extent_defrag() 234 u64 end, struct extent_state **cached_state, gfp_t mask) in set_extent_uptodate() argument [all …]
|
| D | extent_io.c | 707 struct extent_state **cached_state, in __clear_extent_bit() argument 742 if (cached_state) { in __clear_extent_bit() 743 cached = *cached_state; in __clear_extent_bit() 746 *cached_state = NULL; in __clear_extent_bit() 747 cached_state = NULL; in __clear_extent_bit() 974 struct extent_state **cached_state, gfp_t mask, in set_extent_bit() argument 1006 if (cached_state && *cached_state) { in set_extent_bit() 1007 state = *cached_state; in set_extent_bit() 1027 cache_state(prealloc, cached_state); in set_extent_bit() 1050 cache_state(state, cached_state); in set_extent_bit() [all …]
|
| D | file.c | 1435 struct extent_state **cached_state) in lock_and_cleanup_extent_if_need() argument 1450 cached_state); in lock_and_cleanup_extent_if_need() 1457 last_pos, cached_state); in lock_and_cleanup_extent_if_need() 1694 struct extent_state *cached_state = NULL; in btrfs_buffered_write() local 1773 &lockend, &cached_state); in btrfs_buffered_write() 1830 &cached_state, only_release_metadata); in btrfs_buffered_write() 1841 lockstart, lockend, &cached_state); in btrfs_buffered_write() 1843 free_extent_state(cached_state); in btrfs_buffered_write() 2497 struct extent_state **cached_state) in btrfs_punch_hole_lock_range() argument 2517 cached_state); in btrfs_punch_hole_lock_range() [all …]
|
| D | inode.c | 2581 struct extent_state **cached_state) in btrfs_find_new_delalloc_bytes() argument 2607 EXTENT_DELALLOC_NEW, 0, NULL, cached_state, in btrfs_find_new_delalloc_bytes() 2620 struct extent_state **cached_state) in btrfs_set_extent_delalloc() argument 2636 cached_state); in btrfs_set_extent_delalloc() 2642 cached_state); in btrfs_set_extent_delalloc() 2656 struct extent_state *cached_state = NULL; in btrfs_writepage_fixup_worker() local 2720 lock_extent_bits(&inode->io_tree, page_start, page_end, &cached_state); in btrfs_writepage_fixup_worker() 2729 &cached_state); in btrfs_writepage_fixup_worker() 2737 &cached_state); in btrfs_writepage_fixup_worker() 2756 &cached_state); in btrfs_writepage_fixup_worker() [all …]
|
| D | transaction.c | 1028 struct extent_state *cached_state = NULL; in btrfs_write_marked_extents() local 1034 mark, &cached_state)) { in btrfs_write_marked_extents() 1039 mark, &cached_state); in btrfs_write_marked_extents() 1063 free_extent_state(cached_state); in btrfs_write_marked_extents() 1064 cached_state = NULL; in btrfs_write_marked_extents() 1084 struct extent_state *cached_state = NULL; in __btrfs_wait_marked_extents() local 1089 EXTENT_NEED_WAIT, &cached_state)) { in __btrfs_wait_marked_extents() 1099 EXTENT_NEED_WAIT, 0, 0, &cached_state); in __btrfs_wait_marked_extents() 1106 free_extent_state(cached_state); in __btrfs_wait_marked_extents() 1107 cached_state = NULL; in __btrfs_wait_marked_extents()
|
| D | ordered-data.h | 212 struct extent_state **cached_state);
|
| D | ordered-data.c | 1019 struct extent_state **cached_state) in btrfs_lock_and_flush_ordered_range() argument 1025 if (cached_state) in btrfs_lock_and_flush_ordered_range() 1026 cachedp = cached_state; in btrfs_lock_and_flush_ordered_range() 1038 if (!cached_state) in btrfs_lock_and_flush_ordered_range()
|
| D | dev-replace.c | 826 struct extent_state *cached_state = NULL; in btrfs_set_target_alloc_state() local 836 CHUNK_ALLOCATED, &cached_state)) { in btrfs_set_target_alloc_state() 844 free_extent_state(cached_state); in btrfs_set_target_alloc_state()
|
| D | free-space-cache.c | 1231 struct extent_state **cached_state) in cleanup_write_cache_enospc() argument 1235 i_size_read(inode) - 1, cached_state); in cleanup_write_cache_enospc() 1325 struct extent_state *cached_state = NULL; in __btrfs_write_out_cache() local 1361 &cached_state); in __btrfs_write_out_cache() 1403 &cached_state, false); in __btrfs_write_out_cache() 1417 i_size_read(inode) - 1, &cached_state); in __btrfs_write_out_cache() 1439 cleanup_write_cache_enospc(inode, io_ctl, &cached_state); in __btrfs_write_out_cache()
|
| D | ioctl.c | 1227 struct extent_state *cached_state = NULL; in cluster_pages_for_defrag() local 1265 &cached_state); in cluster_pages_for_defrag() 1269 &cached_state); in cluster_pages_for_defrag() 1325 page_start, page_end - 1, &cached_state); in cluster_pages_for_defrag() 1362 EXTENT_DEFRAG, 0, 0, &cached_state); in cluster_pages_for_defrag() 1374 &cached_state); in cluster_pages_for_defrag() 1377 page_start, page_end - 1, &cached_state); in cluster_pages_for_defrag() 1392 page_start, page_end - 1, &cached_state); in cluster_pages_for_defrag()
|
| D | disk-io.c | 242 struct extent_state *cached_state = NULL; in verify_parent_transid() local 252 &cached_state); in verify_parent_transid() 266 &cached_state); in verify_parent_transid() 4802 struct extent_state *cached_state = NULL; in btrfs_destroy_pinned_extent() local 4812 EXTENT_DIRTY, &cached_state); in btrfs_destroy_pinned_extent() 4818 clear_extent_dirty(unpin, start, end, &cached_state); in btrfs_destroy_pinned_extent() 4819 free_extent_state(cached_state); in btrfs_destroy_pinned_extent()
|
| D | extent-tree.c | 2801 struct extent_state *cached_state = NULL; in btrfs_finish_extent_commit() local 2805 EXTENT_DIRTY, &cached_state); in btrfs_finish_extent_commit() 2815 clear_extent_dirty(unpin, start, end, &cached_state); in btrfs_finish_extent_commit() 2818 free_extent_state(cached_state); in btrfs_finish_extent_commit()
|
| D | tree-log.c | 4052 struct extent_state *cached_state = NULL; in log_csums() local 4070 lock_end, &cached_state); in log_csums() 4087 &cached_state); in log_csums()
|
| D | ctree.h | 3164 struct extent_state **cached_state);
|
| /Linux-v5.15/drivers/gpu/drm/msm/dsi/phy/ |
| D | dsi_phy_28nm_8960.c | 65 struct pll_28nm_cached_state cached_state; member 346 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state; in dsi_28nm_pll_save_state() local 349 cached_state->postdiv3 = in dsi_28nm_pll_save_state() 351 cached_state->postdiv2 = in dsi_28nm_pll_save_state() 353 cached_state->postdiv1 = in dsi_28nm_pll_save_state() 356 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw); in dsi_28nm_pll_save_state() 362 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state; in dsi_28nm_pll_restore_state() local 367 cached_state->vco_rate, 0); in dsi_28nm_pll_restore_state() 375 cached_state->postdiv3); in dsi_28nm_pll_restore_state() 377 cached_state->postdiv2); in dsi_28nm_pll_restore_state() [all …]
|
| D | dsi_phy_28nm.c | 73 struct pll_28nm_cached_state cached_state; member 481 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state; in dsi_28nm_pll_save_state() local 484 cached_state->postdiv3 = in dsi_28nm_pll_save_state() 486 cached_state->postdiv1 = in dsi_28nm_pll_save_state() 488 cached_state->byte_mux = dsi_phy_read(base + REG_DSI_28nm_PHY_PLL_VREG_CFG); in dsi_28nm_pll_save_state() 490 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw); in dsi_28nm_pll_save_state() 492 cached_state->vco_rate = 0; in dsi_28nm_pll_save_state() 498 struct pll_28nm_cached_state *cached_state = &pll_28nm->cached_state; in dsi_28nm_pll_restore_state() local 503 cached_state->vco_rate, 0); in dsi_28nm_pll_restore_state() 511 cached_state->postdiv3); in dsi_28nm_pll_restore_state() [all …]
|
| D | dsi_phy_14nm.c | 79 struct pll_14nm_cached_state cached_state; member 689 struct pll_14nm_cached_state *cached_state = &pll_14nm->cached_state; in dsi_14nm_pll_save_state() local 695 cached_state->n1postdiv = data & 0xf; in dsi_14nm_pll_save_state() 696 cached_state->n2postdiv = (data >> 4) & 0xf; in dsi_14nm_pll_save_state() 699 cached_state->n1postdiv, cached_state->n2postdiv); in dsi_14nm_pll_save_state() 701 cached_state->vco_rate = clk_hw_get_rate(phy->vco_hw); in dsi_14nm_pll_save_state() 707 struct pll_14nm_cached_state *cached_state = &pll_14nm->cached_state; in dsi_14nm_pll_restore_state() local 713 cached_state->vco_rate, 0); in dsi_14nm_pll_restore_state() 720 data = cached_state->n1postdiv | (cached_state->n2postdiv << 4); in dsi_14nm_pll_restore_state() 723 cached_state->n1postdiv, cached_state->n2postdiv); in dsi_14nm_pll_restore_state()
|
| D | dsi_phy_10nm.c | 79 struct pll_10nm_cached_state cached_state; member 472 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state; in dsi_10nm_pll_save_state() 495 struct pll_10nm_cached_state *cached = &pll_10nm->cached_state; in dsi_10nm_pll_restore_state()
|
| D | dsi_phy_7nm.c | 78 struct pll_7nm_cached_state cached_state; member 500 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; in dsi_7nm_pll_save_state() 523 struct pll_7nm_cached_state *cached = &pll_7nm->cached_state; in dsi_7nm_pll_restore_state()
|
| /Linux-v5.15/drivers/mux/ |
| D | core.c | 117 mux->cached_state = MUX_CACHE_UNKNOWN; in mux_chip_alloc() 131 mux->cached_state = ret < 0 ? MUX_CACHE_UNKNOWN : state; in mux_control_set() 155 if (mux->idle_state == mux->cached_state) in mux_chip_register() 303 if (mux->cached_state == state) in __mux_control_select() 399 mux->idle_state != mux->cached_state) in mux_control_deselect()
|
| /Linux-v5.15/drivers/iio/multiplexer/ |
| D | iio-mux.c | 29 int cached_state; member 47 mux->cached_state = -1; in iio_mux_select() 51 if (mux->cached_state == chan->channel) in iio_mux_select() 70 mux->cached_state = -1; in iio_mux_select() 75 mux->cached_state = chan->channel; in iio_mux_select() 393 mux->cached_state = -1; in mux_probe()
|
| /Linux-v5.15/include/linux/mux/ |
| D | driver.h | 46 int cached_state; member
|
| /Linux-v5.15/drivers/gpu/drm/amd/display/amdgpu_dm/ |
| D | amdgpu_dm.h | 393 struct drm_atomic_state *cached_state; member
|
| D | amdgpu_dm.c | 2007 WARN_ON(adev->dm.cached_state); in dm_suspend() 2008 adev->dm.cached_state = drm_atomic_helper_suspend(adev_to_drm(adev)); in dm_suspend() 2309 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i) in dm_resume() 2317 for_each_new_crtc_in_state(dm->cached_state, crtc, new_crtc_state, i) { in dm_resume() 2326 for_each_new_plane_in_state(dm->cached_state, plane, new_plane_state, i) { in dm_resume() 2335 drm_atomic_helper_resume(ddev, dm->cached_state); in dm_resume() 2337 dm->cached_state = NULL; in dm_resume()
|