Lines Matching refs:rcg
46 #define RCG_CFG_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + CFG_REG) argument
47 #define RCG_M_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + M_REG) argument
48 #define RCG_N_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + N_REG) argument
49 #define RCG_D_OFFSET(rcg) ((rcg)->cmd_rcgr + (rcg)->cfg_off + D_REG) argument
66 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_is_enabled() local
70 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd); in clk_rcg2_is_enabled()
79 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in __clk_rcg2_get_parent() local
87 if (cfg == rcg->parent_map[i].cfg) in __clk_rcg2_get_parent()
97 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_get_parent() local
101 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg); in clk_rcg2_get_parent()
111 static int update_config(struct clk_rcg2 *rcg) in update_config() argument
115 struct clk_hw *hw = &rcg->clkr.hw; in update_config()
118 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, in update_config()
125 ret = regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, &cmd); in update_config()
139 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_set_parent() local
141 u32 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; in clk_rcg2_set_parent()
143 ret = regmap_update_bits(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), in clk_rcg2_set_parent()
148 return update_config(rcg); in clk_rcg2_set_parent()
179 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in __clk_rcg2_recalc_rate() local
182 if (rcg->mnd_width) { in __clk_rcg2_recalc_rate()
183 mask = BIT(rcg->mnd_width) - 1; in __clk_rcg2_recalc_rate()
184 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m); in __clk_rcg2_recalc_rate()
186 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), &n); in __clk_rcg2_recalc_rate()
194 mask = BIT(rcg->hid_width) - 1; in __clk_rcg2_recalc_rate()
204 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_recalc_rate() local
207 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg); in clk_rcg2_recalc_rate()
218 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in _freq_tbl_determine_rate() local
235 index = qcom_find_src_index(hw, rcg->parent_map, f->src); in _freq_tbl_determine_rate()
272 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_determine_rate() local
274 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, CEIL); in clk_rcg2_determine_rate()
280 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_determine_floor_rate() local
282 return _freq_tbl_determine_rate(hw, rcg->freq_tbl, req, FLOOR); in clk_rcg2_determine_floor_rate()
285 static int __clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f, in __clk_rcg2_configure() argument
289 struct clk_hw *hw = &rcg->clkr.hw; in __clk_rcg2_configure()
290 int ret, index = qcom_find_src_index(hw, rcg->parent_map, f->src); in __clk_rcg2_configure()
295 if (rcg->mnd_width && f->n) { in __clk_rcg2_configure()
296 mask = BIT(rcg->mnd_width) - 1; in __clk_rcg2_configure()
297 ret = regmap_update_bits(rcg->clkr.regmap, in __clk_rcg2_configure()
298 RCG_M_OFFSET(rcg), mask, f->m); in __clk_rcg2_configure()
302 ret = regmap_update_bits(rcg->clkr.regmap, in __clk_rcg2_configure()
303 RCG_N_OFFSET(rcg), mask, ~(f->n - f->m)); in __clk_rcg2_configure()
316 ret = regmap_update_bits(rcg->clkr.regmap, in __clk_rcg2_configure()
317 RCG_D_OFFSET(rcg), mask, not2d_val); in __clk_rcg2_configure()
322 mask = BIT(rcg->hid_width) - 1; in __clk_rcg2_configure()
325 cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; in __clk_rcg2_configure()
326 if (rcg->mnd_width && f->n && (f->m != f->n)) in __clk_rcg2_configure()
335 static int clk_rcg2_configure(struct clk_rcg2 *rcg, const struct freq_tbl *f) in clk_rcg2_configure() argument
340 ret = regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg); in clk_rcg2_configure()
344 ret = __clk_rcg2_configure(rcg, f, &cfg); in clk_rcg2_configure()
348 ret = regmap_write(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), cfg); in clk_rcg2_configure()
352 return update_config(rcg); in clk_rcg2_configure()
358 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in __clk_rcg2_set_rate() local
363 f = qcom_find_freq_floor(rcg->freq_tbl, rate); in __clk_rcg2_set_rate()
366 f = qcom_find_freq(rcg->freq_tbl, rate); in __clk_rcg2_set_rate()
375 return clk_rcg2_configure(rcg, f); in __clk_rcg2_set_rate()
404 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_get_duty_cycle() local
407 if (!rcg->mnd_width) { in clk_rcg2_get_duty_cycle()
414 regmap_read(rcg->clkr.regmap, RCG_D_OFFSET(rcg), ¬2d); in clk_rcg2_get_duty_cycle()
415 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m); in clk_rcg2_get_duty_cycle()
416 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m); in clk_rcg2_get_duty_cycle()
425 mask = BIT(rcg->mnd_width) - 1; in clk_rcg2_get_duty_cycle()
440 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_set_duty_cycle() local
445 if (!rcg->mnd_width) in clk_rcg2_set_duty_cycle()
448 mask = BIT(rcg->mnd_width) - 1; in clk_rcg2_set_duty_cycle()
450 regmap_read(rcg->clkr.regmap, RCG_N_OFFSET(rcg), ¬n_m); in clk_rcg2_set_duty_cycle()
451 regmap_read(rcg->clkr.regmap, RCG_M_OFFSET(rcg), &m); in clk_rcg2_set_duty_cycle()
452 regmap_read(rcg->clkr.regmap, RCG_CFG_OFFSET(rcg), &cfg); in clk_rcg2_set_duty_cycle()
478 ret = regmap_update_bits(rcg->clkr.regmap, RCG_D_OFFSET(rcg), mask, in clk_rcg2_set_duty_cycle()
483 return update_config(rcg); in clk_rcg2_set_duty_cycle()
549 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_edp_pixel_set_rate() local
550 struct freq_tbl f = *rcg->freq_tbl; in clk_edp_pixel_set_rate()
555 u32 mask = BIT(rcg->hid_width) - 1; in clk_edp_pixel_set_rate()
571 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, in clk_edp_pixel_set_rate()
579 return clk_rcg2_configure(rcg, &f); in clk_edp_pixel_set_rate()
595 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_edp_pixel_determine_rate() local
596 const struct freq_tbl *f = rcg->freq_tbl; in clk_edp_pixel_determine_rate()
600 u32 mask = BIT(rcg->hid_width) - 1; in clk_edp_pixel_determine_rate()
602 int index = qcom_find_src_index(hw, rcg->parent_map, f->src); in clk_edp_pixel_determine_rate()
621 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, in clk_edp_pixel_determine_rate()
649 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_byte_determine_rate() local
650 const struct freq_tbl *f = rcg->freq_tbl; in clk_byte_determine_rate()
651 int index = qcom_find_src_index(hw, rcg->parent_map, f->src); in clk_byte_determine_rate()
653 u32 mask = BIT(rcg->hid_width) - 1; in clk_byte_determine_rate()
673 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_byte_set_rate() local
674 struct freq_tbl f = *rcg->freq_tbl; in clk_byte_set_rate()
676 u32 mask = BIT(rcg->hid_width) - 1; in clk_byte_set_rate()
683 return clk_rcg2_configure(rcg, &f); in clk_byte_set_rate()
707 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_byte2_determine_rate() local
709 u32 mask = BIT(rcg->hid_width) - 1; in clk_byte2_determine_rate()
730 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_byte2_set_rate() local
734 u32 mask = BIT(rcg->hid_width) - 1; in clk_byte2_set_rate()
742 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); in clk_byte2_set_rate()
747 if (cfg == rcg->parent_map[i].cfg) { in clk_byte2_set_rate()
748 f.src = rcg->parent_map[i].src; in clk_byte2_set_rate()
749 return clk_rcg2_configure(rcg, &f); in clk_byte2_set_rate()
809 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_pixel_set_rate() local
814 u32 mask = BIT(rcg->hid_width) - 1; in clk_pixel_set_rate()
818 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); in clk_pixel_set_rate()
823 if (cfg == rcg->parent_map[i].cfg) { in clk_pixel_set_rate()
824 f.src = rcg->parent_map[i].src; in clk_pixel_set_rate()
835 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, in clk_pixel_set_rate()
843 return clk_rcg2_configure(rcg, &f); in clk_pixel_set_rate()
941 struct clk_rcg2 *rcg = &cgfx->rcg; in clk_gfx3d_set_rate_and_parent() local
945 cfg = rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; in clk_gfx3d_set_rate_and_parent()
950 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, cfg); in clk_gfx3d_set_rate_and_parent()
954 return update_config(rcg); in clk_gfx3d_set_rate_and_parent()
981 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_set_force_enable() local
985 ret = regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, in clk_rcg2_set_force_enable()
1004 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_clear_force_enable() local
1006 return regmap_update_bits(rcg->clkr.regmap, rcg->cmd_rcgr + CMD_REG, in clk_rcg2_clear_force_enable()
1013 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_force_enable_clear() local
1020 ret = clk_rcg2_configure(rcg, f); in clk_rcg2_shared_force_enable_clear()
1030 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_set_rate() local
1033 f = qcom_find_freq(rcg->freq_tbl, rate); in clk_rcg2_shared_set_rate()
1043 return __clk_rcg2_configure(rcg, f, &rcg->parked_cfg); in clk_rcg2_shared_set_rate()
1056 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_enable() local
1068 ret = regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, rcg->parked_cfg); in clk_rcg2_shared_enable()
1072 ret = update_config(rcg); in clk_rcg2_shared_enable()
1081 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_disable() local
1087 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &rcg->parked_cfg); in clk_rcg2_shared_disable()
1099 regmap_write(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, in clk_rcg2_shared_disable()
1100 rcg->safe_src_index << CFG_SRC_SEL_SHIFT); in clk_rcg2_shared_disable()
1102 update_config(rcg); in clk_rcg2_shared_disable()
1109 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_get_parent() local
1113 return __clk_rcg2_get_parent(hw, rcg->parked_cfg); in clk_rcg2_shared_get_parent()
1120 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_set_parent() local
1124 rcg->parked_cfg &= ~CFG_SRC_SEL_MASK; in clk_rcg2_shared_set_parent()
1125 rcg->parked_cfg |= rcg->parent_map[index].cfg << CFG_SRC_SEL_SHIFT; in clk_rcg2_shared_set_parent()
1136 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_shared_recalc_rate() local
1140 return __clk_rcg2_recalc_rate(hw, parent_rate, rcg->parked_cfg); in clk_rcg2_shared_recalc_rate()
1161 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_dfs_populate_freq() local
1167 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(l), &cfg); in clk_rcg2_dfs_populate_freq()
1169 mask = BIT(rcg->hid_width) - 1; in clk_rcg2_dfs_populate_freq()
1179 if (src == rcg->parent_map[i].cfg) { in clk_rcg2_dfs_populate_freq()
1180 f->src = rcg->parent_map[i].src; in clk_rcg2_dfs_populate_freq()
1181 p = clk_hw_get_parent_by_index(&rcg->clkr.hw, i); in clk_rcg2_dfs_populate_freq()
1189 mask = BIT(rcg->mnd_width) - 1; in clk_rcg2_dfs_populate_freq()
1190 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_M_DFSR(l), in clk_rcg2_dfs_populate_freq()
1195 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_N_DFSR(l), in clk_rcg2_dfs_populate_freq()
1206 static int clk_rcg2_dfs_populate_freq_table(struct clk_rcg2 *rcg) in clk_rcg2_dfs_populate_freq_table() argument
1215 rcg->freq_tbl = freq_tbl; in clk_rcg2_dfs_populate_freq_table()
1218 clk_rcg2_dfs_populate_freq(&rcg->clkr.hw, i, freq_tbl + i); in clk_rcg2_dfs_populate_freq_table()
1226 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_dfs_determine_rate() local
1229 if (!rcg->freq_tbl) { in clk_rcg2_dfs_determine_rate()
1230 ret = clk_rcg2_dfs_populate_freq_table(rcg); in clk_rcg2_dfs_determine_rate()
1244 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_dfs_recalc_rate() local
1247 regmap_read(rcg->clkr.regmap, in clk_rcg2_dfs_recalc_rate()
1248 rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &level); in clk_rcg2_dfs_recalc_rate()
1252 if (rcg->freq_tbl) in clk_rcg2_dfs_recalc_rate()
1253 return rcg->freq_tbl[level].freq; in clk_rcg2_dfs_recalc_rate()
1262 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + SE_PERF_DFSR(level), in clk_rcg2_dfs_recalc_rate()
1265 mask = BIT(rcg->hid_width) - 1; in clk_rcg2_dfs_recalc_rate()
1273 mask = BIT(rcg->mnd_width) - 1; in clk_rcg2_dfs_recalc_rate()
1274 regmap_read(rcg->clkr.regmap, in clk_rcg2_dfs_recalc_rate()
1275 rcg->cmd_rcgr + SE_PERF_M_DFSR(level), &m); in clk_rcg2_dfs_recalc_rate()
1278 regmap_read(rcg->clkr.regmap, in clk_rcg2_dfs_recalc_rate()
1279 rcg->cmd_rcgr + SE_PERF_N_DFSR(level), &n); in clk_rcg2_dfs_recalc_rate()
1298 struct clk_rcg2 *rcg = data->rcg; in clk_rcg2_enable_dfs() local
1303 ret = regmap_read(regmap, rcg->cmd_rcgr + SE_CMD_DFSR_OFFSET, &val); in clk_rcg2_enable_dfs()
1317 rcg->freq_tbl = NULL; in clk_rcg2_enable_dfs()
1340 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_dp_set_rate() local
1342 u32 mask = BIT(rcg->hid_width) - 1; in clk_rcg2_dp_set_rate()
1348 GENMASK(rcg->mnd_width - 1, 0), in clk_rcg2_dp_set_rate()
1349 GENMASK(rcg->mnd_width - 1, 0), &den, &num); in clk_rcg2_dp_set_rate()
1354 regmap_read(rcg->clkr.regmap, rcg->cmd_rcgr + CFG_REG, &cfg); in clk_rcg2_dp_set_rate()
1360 if (cfg == rcg->parent_map[i].cfg) { in clk_rcg2_dp_set_rate()
1361 f.src = rcg->parent_map[i].src; in clk_rcg2_dp_set_rate()
1378 return clk_rcg2_configure(rcg, &f); in clk_rcg2_dp_set_rate()
1390 struct clk_rcg2 *rcg = to_clk_rcg2(hw); in clk_rcg2_dp_determine_rate() local
1396 GENMASK(rcg->mnd_width - 1, 0), in clk_rcg2_dp_determine_rate()
1397 GENMASK(rcg->mnd_width - 1, 0), &den, &num); in clk_rcg2_dp_determine_rate()