Lines Matching full:sw

14 static int tb_switch_set_tmu_mode_params(struct tb_switch *sw,  in tb_switch_set_tmu_mode_params()  argument
32 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_set_tmu_mode_params()
33 sw->tmu.cap + TMU_RTR_CS_0, 1); in tb_switch_set_tmu_mode_params()
40 ret = tb_sw_write(sw, &val, TB_CFG_SWITCH, in tb_switch_set_tmu_mode_params()
41 sw->tmu.cap + TMU_RTR_CS_0, 1); in tb_switch_set_tmu_mode_params()
45 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_set_tmu_mode_params()
46 sw->tmu.cap + TMU_RTR_CS_15, 1); in tb_switch_set_tmu_mode_params()
59 return tb_sw_write(sw, &val, TB_CFG_SWITCH, in tb_switch_set_tmu_mode_params()
60 sw->tmu.cap + TMU_RTR_CS_15, 1); in tb_switch_set_tmu_mode_params()
63 static const char *tb_switch_tmu_mode_name(const struct tb_switch *sw) in tb_switch_tmu_mode_name() argument
65 bool root_switch = !tb_route(sw); in tb_switch_tmu_mode_name()
67 switch (sw->tmu.rate) { in tb_switch_tmu_mode_name()
75 if (sw->tmu.unidirectional) in tb_switch_tmu_mode_name()
89 static bool tb_switch_tmu_ucap_supported(struct tb_switch *sw) in tb_switch_tmu_ucap_supported() argument
94 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_ucap_supported()
95 sw->tmu.cap + TMU_RTR_CS_0, 1); in tb_switch_tmu_ucap_supported()
102 static int tb_switch_tmu_rate_read(struct tb_switch *sw) in tb_switch_tmu_rate_read() argument
107 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_rate_read()
108 sw->tmu.cap + TMU_RTR_CS_3, 1); in tb_switch_tmu_rate_read()
116 static int tb_switch_tmu_rate_write(struct tb_switch *sw, int rate) in tb_switch_tmu_rate_write() argument
121 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_rate_write()
122 sw->tmu.cap + TMU_RTR_CS_3, 1); in tb_switch_tmu_rate_write()
129 return tb_sw_write(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_rate_write()
130 sw->tmu.cap + TMU_RTR_CS_3, 1); in tb_switch_tmu_rate_write()
155 if (!port->sw->tmu.has_ucap) in tb_port_tmu_set_unidirectional()
202 static int tb_switch_tmu_set_time_disruption(struct tb_switch *sw, bool set) in tb_switch_tmu_set_time_disruption() argument
207 if (tb_switch_is_usb4(sw)) { in tb_switch_tmu_set_time_disruption()
208 offset = sw->tmu.cap + TMU_RTR_CS_0; in tb_switch_tmu_set_time_disruption()
211 offset = sw->cap_vsec_tmu + TB_TIME_VSEC_3_CS_26; in tb_switch_tmu_set_time_disruption()
215 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, offset, 1); in tb_switch_tmu_set_time_disruption()
224 return tb_sw_write(sw, &val, TB_CFG_SWITCH, offset, 1); in tb_switch_tmu_set_time_disruption()
229 * @sw: Switch to initialized
235 int tb_switch_tmu_init(struct tb_switch *sw) in tb_switch_tmu_init() argument
240 if (tb_switch_is_icm(sw)) in tb_switch_tmu_init()
243 ret = tb_switch_find_cap(sw, TB_SWITCH_CAP_TMU); in tb_switch_tmu_init()
245 sw->tmu.cap = ret; in tb_switch_tmu_init()
247 tb_switch_for_each_port(sw, port) { in tb_switch_tmu_init()
255 ret = tb_switch_tmu_rate_read(sw); in tb_switch_tmu_init()
259 sw->tmu.rate = ret; in tb_switch_tmu_init()
261 sw->tmu.has_ucap = tb_switch_tmu_ucap_supported(sw); in tb_switch_tmu_init()
262 if (sw->tmu.has_ucap) { in tb_switch_tmu_init()
263 tb_sw_dbg(sw, "TMU: supports uni-directional mode\n"); in tb_switch_tmu_init()
265 if (tb_route(sw)) { in tb_switch_tmu_init()
266 struct tb_port *up = tb_upstream_port(sw); in tb_switch_tmu_init()
268 sw->tmu.unidirectional = in tb_switch_tmu_init()
272 sw->tmu.unidirectional = false; in tb_switch_tmu_init()
275 tb_sw_dbg(sw, "TMU: current mode: %s\n", tb_switch_tmu_mode_name(sw)); in tb_switch_tmu_init()
281 * @sw: Switch whose time to update
285 int tb_switch_tmu_post_time(struct tb_switch *sw) in tb_switch_tmu_post_time() argument
289 struct tb_switch *root_switch = sw->tb->root_switch; in tb_switch_tmu_post_time()
294 if (!tb_route(sw)) in tb_switch_tmu_post_time()
297 if (!tb_switch_is_usb4(sw)) in tb_switch_tmu_post_time()
322 ret = tb_switch_tmu_set_time_disruption(sw, true); in tb_switch_tmu_post_time()
326 post_local_time_offset = sw->tmu.cap + TMU_RTR_CS_22; in tb_switch_tmu_post_time()
327 post_time_offset = sw->tmu.cap + TMU_RTR_CS_24; in tb_switch_tmu_post_time()
328 post_time_high_offset = sw->tmu.cap + TMU_RTR_CS_25; in tb_switch_tmu_post_time()
334 ret = tb_sw_write(sw, &local_time, TB_CFG_SWITCH, in tb_switch_tmu_post_time()
349 ret = tb_sw_write(sw, &post_time, TB_CFG_SWITCH, post_time_offset, 2); in tb_switch_tmu_post_time()
353 ret = tb_sw_write(sw, &post_time_high, TB_CFG_SWITCH, in tb_switch_tmu_post_time()
360 ret = tb_sw_read(sw, &post_time, TB_CFG_SWITCH, in tb_switch_tmu_post_time()
371 tb_sw_dbg(sw, "TMU: updated local time to %#llx\n", local_time); in tb_switch_tmu_post_time()
374 tb_switch_tmu_set_time_disruption(sw, false); in tb_switch_tmu_post_time()
380 * @sw: Switch whose TMU to disable
382 * Turns off TMU of @sw if it is enabled. If not enabled does nothing.
384 int tb_switch_tmu_disable(struct tb_switch *sw) in tb_switch_tmu_disable() argument
391 if (!tb_switch_is_clx_supported(sw)) in tb_switch_tmu_disable()
395 if (sw->tmu.rate == TB_SWITCH_TMU_RATE_OFF) in tb_switch_tmu_disable()
399 if (tb_route(sw)) { in tb_switch_tmu_disable()
400 bool unidirectional = sw->tmu.unidirectional; in tb_switch_tmu_disable()
401 struct tb_switch *parent = tb_switch_parent(sw); in tb_switch_tmu_disable()
405 down = tb_port_at(tb_route(sw), parent); in tb_switch_tmu_disable()
406 up = tb_upstream_port(sw); in tb_switch_tmu_disable()
418 tb_switch_tmu_rate_write(sw, TB_SWITCH_TMU_RATE_OFF); in tb_switch_tmu_disable()
433 tb_switch_tmu_rate_write(sw, TB_SWITCH_TMU_RATE_OFF); in tb_switch_tmu_disable()
436 sw->tmu.unidirectional = false; in tb_switch_tmu_disable()
437 sw->tmu.rate = TB_SWITCH_TMU_RATE_OFF; in tb_switch_tmu_disable()
439 tb_sw_dbg(sw, "TMU: disabled\n"); in tb_switch_tmu_disable()
443 static void __tb_switch_tmu_off(struct tb_switch *sw, bool unidirectional) in __tb_switch_tmu_off() argument
445 struct tb_switch *parent = tb_switch_parent(sw); in __tb_switch_tmu_off()
448 down = tb_port_at(tb_route(sw), parent); in __tb_switch_tmu_off()
449 up = tb_upstream_port(sw); in __tb_switch_tmu_off()
462 tb_switch_tmu_rate_write(sw, TB_SWITCH_TMU_RATE_OFF); in __tb_switch_tmu_off()
464 tb_switch_set_tmu_mode_params(sw, sw->tmu.rate); in __tb_switch_tmu_off()
473 static int __tb_switch_tmu_enable_bidirectional(struct tb_switch *sw) in __tb_switch_tmu_enable_bidirectional() argument
475 struct tb_switch *parent = tb_switch_parent(sw); in __tb_switch_tmu_enable_bidirectional()
479 up = tb_upstream_port(sw); in __tb_switch_tmu_enable_bidirectional()
480 down = tb_port_at(tb_route(sw), parent); in __tb_switch_tmu_enable_bidirectional()
490 ret = tb_switch_tmu_rate_write(sw, TB_SWITCH_TMU_RATE_HIFI); in __tb_switch_tmu_enable_bidirectional()
505 __tb_switch_tmu_off(sw, false); in __tb_switch_tmu_enable_bidirectional()
509 static int tb_switch_tmu_objection_mask(struct tb_switch *sw) in tb_switch_tmu_objection_mask() argument
514 ret = tb_sw_read(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_objection_mask()
515 sw->cap_vsec_tmu + TB_TIME_VSEC_3_CS_9, 1); in tb_switch_tmu_objection_mask()
521 return tb_sw_write(sw, &val, TB_CFG_SWITCH, in tb_switch_tmu_objection_mask()
522 sw->cap_vsec_tmu + TB_TIME_VSEC_3_CS_9, 1); in tb_switch_tmu_objection_mask()
525 static int tb_switch_tmu_unidirectional_enable(struct tb_switch *sw) in tb_switch_tmu_unidirectional_enable() argument
527 struct tb_port *up = tb_upstream_port(sw); in tb_switch_tmu_unidirectional_enable()
538 static int __tb_switch_tmu_enable_unidirectional(struct tb_switch *sw) in __tb_switch_tmu_enable_unidirectional() argument
540 struct tb_switch *parent = tb_switch_parent(sw); in __tb_switch_tmu_enable_unidirectional()
544 up = tb_upstream_port(sw); in __tb_switch_tmu_enable_unidirectional()
545 down = tb_port_at(tb_route(sw), parent); in __tb_switch_tmu_enable_unidirectional()
546 ret = tb_switch_tmu_rate_write(parent, sw->tmu.rate_request); in __tb_switch_tmu_enable_unidirectional()
550 ret = tb_switch_set_tmu_mode_params(sw, sw->tmu.rate_request); in __tb_switch_tmu_enable_unidirectional()
573 __tb_switch_tmu_off(sw, true); in __tb_switch_tmu_enable_unidirectional()
577 static void __tb_switch_tmu_change_mode_prev(struct tb_switch *sw) in __tb_switch_tmu_change_mode_prev() argument
579 struct tb_switch *parent = tb_switch_parent(sw); in __tb_switch_tmu_change_mode_prev()
582 down = tb_port_at(tb_route(sw), parent); in __tb_switch_tmu_change_mode_prev()
583 up = tb_upstream_port(sw); in __tb_switch_tmu_change_mode_prev()
590 tb_port_tmu_set_unidirectional(down, sw->tmu.unidirectional); in __tb_switch_tmu_change_mode_prev()
591 if (sw->tmu.unidirectional_request) in __tb_switch_tmu_change_mode_prev()
592 tb_switch_tmu_rate_write(parent, sw->tmu.rate); in __tb_switch_tmu_change_mode_prev()
594 tb_switch_tmu_rate_write(sw, sw->tmu.rate); in __tb_switch_tmu_change_mode_prev()
596 tb_switch_set_tmu_mode_params(sw, sw->tmu.rate); in __tb_switch_tmu_change_mode_prev()
597 tb_port_tmu_set_unidirectional(up, sw->tmu.unidirectional); in __tb_switch_tmu_change_mode_prev()
600 static int __tb_switch_tmu_change_mode(struct tb_switch *sw) in __tb_switch_tmu_change_mode() argument
602 struct tb_switch *parent = tb_switch_parent(sw); in __tb_switch_tmu_change_mode()
606 up = tb_upstream_port(sw); in __tb_switch_tmu_change_mode()
607 down = tb_port_at(tb_route(sw), parent); in __tb_switch_tmu_change_mode()
608 ret = tb_port_tmu_set_unidirectional(down, sw->tmu.unidirectional_request); in __tb_switch_tmu_change_mode()
612 if (sw->tmu.unidirectional_request) in __tb_switch_tmu_change_mode()
613 ret = tb_switch_tmu_rate_write(parent, sw->tmu.rate_request); in __tb_switch_tmu_change_mode()
615 ret = tb_switch_tmu_rate_write(sw, sw->tmu.rate_request); in __tb_switch_tmu_change_mode()
619 ret = tb_switch_set_tmu_mode_params(sw, sw->tmu.rate_request); in __tb_switch_tmu_change_mode()
623 ret = tb_port_tmu_set_unidirectional(up, sw->tmu.unidirectional_request); in __tb_switch_tmu_change_mode()
638 __tb_switch_tmu_change_mode_prev(sw); in __tb_switch_tmu_change_mode()
644 * @sw: Router whose TMU to enable
653 int tb_switch_tmu_enable(struct tb_switch *sw) in tb_switch_tmu_enable() argument
655 bool unidirectional = sw->tmu.unidirectional_request; in tb_switch_tmu_enable()
658 if (unidirectional && !sw->tmu.has_ucap) in tb_switch_tmu_enable()
666 if (!tb_switch_is_clx_supported(sw)) in tb_switch_tmu_enable()
669 if (tb_switch_tmu_is_enabled(sw, sw->tmu.unidirectional_request)) in tb_switch_tmu_enable()
672 if (tb_switch_is_titan_ridge(sw) && unidirectional) { in tb_switch_tmu_enable()
677 if (!tb_switch_is_clx_enabled(sw, TB_CL1)) in tb_switch_tmu_enable()
680 ret = tb_switch_tmu_objection_mask(sw); in tb_switch_tmu_enable()
684 ret = tb_switch_tmu_unidirectional_enable(sw); in tb_switch_tmu_enable()
689 ret = tb_switch_tmu_set_time_disruption(sw, true); in tb_switch_tmu_enable()
693 if (tb_route(sw)) { in tb_switch_tmu_enable()
699 if (sw->tmu.rate == TB_SWITCH_TMU_RATE_OFF) { in tb_switch_tmu_enable()
701 ret = __tb_switch_tmu_enable_unidirectional(sw); in tb_switch_tmu_enable()
703 ret = __tb_switch_tmu_enable_bidirectional(sw); in tb_switch_tmu_enable()
706 } else if (sw->tmu.rate == TB_SWITCH_TMU_RATE_NORMAL) { in tb_switch_tmu_enable()
707 ret = __tb_switch_tmu_change_mode(sw); in tb_switch_tmu_enable()
711 sw->tmu.unidirectional = unidirectional; in tb_switch_tmu_enable()
719 ret = tb_switch_tmu_rate_write(sw, sw->tmu.rate_request); in tb_switch_tmu_enable()
724 sw->tmu.rate = sw->tmu.rate_request; in tb_switch_tmu_enable()
726 tb_sw_dbg(sw, "TMU: mode set to: %s\n", tb_switch_tmu_mode_name(sw)); in tb_switch_tmu_enable()
727 return tb_switch_tmu_set_time_disruption(sw, false); in tb_switch_tmu_enable()
732 * @sw: Router whose mode to change
739 void tb_switch_tmu_configure(struct tb_switch *sw, in tb_switch_tmu_configure() argument
742 sw->tmu.unidirectional_request = unidirectional; in tb_switch_tmu_configure()
743 sw->tmu.rate_request = rate; in tb_switch_tmu_configure()
749 struct tb_switch *sw = tb_to_switch(dev); in tb_switch_tmu_config_enable() local
751 tb_switch_tmu_configure(sw, *(enum tb_switch_tmu_rate *)rate, in tb_switch_tmu_config_enable()
752 tb_switch_is_clx_enabled(sw, TB_CL1)); in tb_switch_tmu_config_enable()
753 if (tb_switch_tmu_enable(sw)) in tb_switch_tmu_config_enable()
754 tb_sw_dbg(sw, "fail switching TMU mode for 1st depth router\n"); in tb_switch_tmu_config_enable()
762 * @sw: The router to configure and enable it's children TMU
768 void tb_switch_enable_tmu_1st_child(struct tb_switch *sw, in tb_switch_enable_tmu_1st_child() argument
771 device_for_each_child(&sw->dev, &rate, in tb_switch_enable_tmu_1st_child()