Home
last modified time | relevance | path

Searched defs:rq (Results 1 – 25 of 434) sorted by relevance

12345678910>>...18

/Linux-v5.4/kernel/sched/
Dstop_task.c20 balance_stop(struct rq *rq, struct task_struct *prev, struct rq_flags *rf) in balance_stop()
27 check_preempt_curr_stop(struct rq *rq, struct task_struct *p, int flags) in check_preempt_curr_stop()
32 static void set_next_task_stop(struct rq *rq, struct task_struct *stop) in set_next_task_stop()
38 pick_next_task_stop(struct rq *rq, struct task_struct *prev, struct rq_flags *rf) in pick_next_task_stop()
50 enqueue_task_stop(struct rq *rq, struct task_struct *p, int flags) in enqueue_task_stop()
56 dequeue_task_stop(struct rq *rq, struct task_struct *p, int flags) in dequeue_task_stop()
61 static void yield_task_stop(struct rq *rq) in yield_task_stop()
66 static void put_prev_task_stop(struct rq *rq, struct task_struct *prev) in put_prev_task_stop()
93 static void task_tick_stop(struct rq *rq, struct task_struct *curr, int queued) in task_tick_stop()
97 static void switched_to_stop(struct rq *rq, struct task_struct *p) in switched_to_stop()
[all …]
Dsched.h552 struct rq *rq; /* CPU runqueue to which this cfs_rq is attached */ member
621 struct rq *rq; member
847 struct rq { struct
849 raw_spinlock_t lock;
876 struct uclamp_rq uclamp[UCLAMP_CNT] ____cacheline_aligned; argument
881 struct cfs_rq cfs; argument
882 struct rt_rq rt; argument
883 struct dl_rq dl; argument
887 struct list_head leaf_cfs_rq_list; argument
888 struct list_head *tmp_alone_branch;
[all …]
Ddeadline.c36 struct rq *rq = task_rq(p); in dl_rq_of_se() local
158 struct rq *rq; in dl_change_utilization() local
242 struct rq *rq = rq_of_dl_rq(dl_rq); in task_non_contending() local
378 static inline int dl_overloaded(struct rq *rq) in dl_overloaded()
383 static inline void dl_set_overload(struct rq *rq) in dl_set_overload()
399 static inline void dl_clear_overload(struct rq *rq) in dl_clear_overload()
445 static void enqueue_pushable_dl_task(struct rq *rq, struct task_struct *p) in enqueue_pushable_dl_task()
475 static void dequeue_pushable_dl_task(struct rq *rq, struct task_struct *p) in dequeue_pushable_dl_task()
496 static inline int has_pushable_dl_tasks(struct rq *rq) in has_pushable_dl_tasks()
503 static inline bool need_pull_dl_task(struct rq *rq, struct task_struct *prev) in need_pull_dl_task()
[all …]
Dpelt.h14 update_irq_load_avg(struct rq *rq, u64 running) in update_irq_load_avg()
58 static inline void update_rq_clock_pelt(struct rq *rq, s64 delta) in update_rq_clock_pelt()
97 static inline void update_idle_rq_clock_pelt(struct rq *rq) in update_idle_rq_clock_pelt()
117 static inline u64 rq_clock_pelt(struct rq *rq) in rq_clock_pelt()
150 update_rt_rq_load_avg(u64 now, struct rq *rq, int running) in update_rt_rq_load_avg()
156 update_dl_rq_load_avg(u64 now, struct rq *rq, int running) in update_dl_rq_load_avg()
162 update_irq_load_avg(struct rq *rq, u64 running) in update_irq_load_avg()
167 static inline u64 rq_clock_pelt(struct rq *rq) in rq_clock_pelt()
173 update_rq_clock_pelt(struct rq *rq, s64 delta) { } in update_rq_clock_pelt()
176 update_idle_rq_clock_pelt(struct rq *rq) { } in update_idle_rq_clock_pelt()
Dstats.h9 rq_sched_info_arrive(struct rq *rq, unsigned long long delta) in rq_sched_info_arrive()
21 rq_sched_info_depart(struct rq *rq, unsigned long long delta) in rq_sched_info_depart()
28 rq_sched_info_dequeued(struct rq *rq, unsigned long long delta) in rq_sched_info_dequeued()
44 static inline void rq_sched_info_arrive (struct rq *rq, unsigned long long delta) { } in rq_sched_info_arrive()
45 static inline void rq_sched_info_dequeued(struct rq *rq, unsigned long long delta) { } in rq_sched_info_dequeued()
46 static inline void rq_sched_info_depart (struct rq *rq, unsigned long long delta) { } in rq_sched_info_depart()
114 struct rq *rq; in psi_ttwu_dequeue() local
129 static inline void psi_task_tick(struct rq *rq) in psi_task_tick()
141 static inline void psi_task_tick(struct rq *rq) {} in psi_task_tick()
156 static inline void sched_info_dequeued(struct rq *rq, struct task_struct *t) in sched_info_dequeued()
[all …]
Drt.c160 struct rq *rq = cpu_rq(cpu); in init_tg_rt_entry() local
246 struct rq *rq = rq_of_rt_se(rt_se); in rt_rq_of_se() local
263 static inline bool need_pull_rt_task(struct rq *rq, struct task_struct *prev) in need_pull_rt_task()
269 static inline int rt_overloaded(struct rq *rq) in rt_overloaded()
274 static inline void rt_set_overload(struct rq *rq) in rt_set_overload()
293 static inline void rt_clear_overload(struct rq *rq) in rt_clear_overload()
350 static inline int has_pushable_tasks(struct rq *rq) in has_pushable_tasks()
361 static inline void rt_queue_push_tasks(struct rq *rq) in rt_queue_push_tasks()
369 static inline void rt_queue_pull_task(struct rq *rq) in rt_queue_pull_task()
374 static void enqueue_pushable_task(struct rq *rq, struct task_struct *p) in enqueue_pushable_task()
[all …]
Didle.c370 balance_idle(struct rq *rq, struct task_struct *prev, struct rq_flags *rf) in balance_idle()
379 static void check_preempt_curr_idle(struct rq *rq, struct task_struct *p, int flags) in check_preempt_curr_idle()
384 static void put_prev_task_idle(struct rq *rq, struct task_struct *prev) in put_prev_task_idle()
388 static void set_next_task_idle(struct rq *rq, struct task_struct *next) in set_next_task_idle()
395 pick_next_task_idle(struct rq *rq, struct task_struct *prev, struct rq_flags *rf) in pick_next_task_idle()
412 dequeue_task_idle(struct rq *rq, struct task_struct *p, int flags) in dequeue_task_idle()
428 static void task_tick_idle(struct rq *rq, struct task_struct *curr, int queued) in task_tick_idle()
432 static void switched_to_idle(struct rq *rq, struct task_struct *p) in switched_to_idle()
438 prio_changed_idle(struct rq *rq, struct task_struct *p, int oldprio) in prio_changed_idle()
443 static unsigned int get_rr_interval_idle(struct rq *rq, struct task_struct *task) in get_rr_interval_idle()
[all …]
/Linux-v5.4/block/
Dblk-pm.h16 static inline void blk_pm_mark_last_busy(struct request *rq) in blk_pm_mark_last_busy()
22 static inline void blk_pm_requeue_request(struct request *rq) in blk_pm_requeue_request()
31 struct request *rq) in blk_pm_add_request()
39 static inline void blk_pm_put_request(struct request *rq) in blk_pm_put_request()
51 static inline void blk_pm_mark_last_busy(struct request *rq) in blk_pm_mark_last_busy()
55 static inline void blk_pm_requeue_request(struct request *rq) in blk_pm_requeue_request()
60 struct request *rq) in blk_pm_add_request()
64 static inline void blk_pm_put_request(struct request *rq) in blk_pm_put_request()
Delevator.c54 #define rq_hash_key(rq) (blk_rq_pos(rq) + blk_rq_sectors(rq)) argument
60 static int elv_iosched_allow_bio_merge(struct request *rq, struct bio *bio) in elv_iosched_allow_bio_merge()
74 bool elv_bio_merge_ok(struct request *rq, struct bio *bio) in elv_bio_merge_ok()
201 static inline void __elv_rqhash_del(struct request *rq) in __elv_rqhash_del()
207 void elv_rqhash_del(struct request_queue *q, struct request *rq) in elv_rqhash_del()
214 void elv_rqhash_add(struct request_queue *q, struct request *rq) in elv_rqhash_add()
224 void elv_rqhash_reposition(struct request_queue *q, struct request *rq) in elv_rqhash_reposition()
234 struct request *rq; in elv_rqhash_find() local
255 void elv_rb_add(struct rb_root *root, struct request *rq) in elv_rb_add()
276 void elv_rb_del(struct rb_root *root, struct request *rq) in elv_rb_del()
[all …]
Dmq-deadline.c68 deadline_rb_root(struct deadline_data *dd, struct request *rq) in deadline_rb_root()
77 deadline_latter_request(struct request *rq) in deadline_latter_request()
88 deadline_add_rq_rb(struct deadline_data *dd, struct request *rq) in deadline_add_rq_rb()
96 deadline_del_rq_rb(struct deadline_data *dd, struct request *rq) in deadline_del_rq_rb()
109 static void deadline_remove_request(struct request_queue *q, struct request *rq) in deadline_remove_request()
165 deadline_move_request(struct deadline_data *dd, struct request *rq) in deadline_move_request()
185 struct request *rq = rq_entry_fifo(dd->fifo_list[ddir].next); in deadline_check_fifo() local
203 struct request *rq; in deadline_fifo_request() local
239 struct request *rq; in deadline_next_request() local
273 struct request *rq, *next_rq; in __dd_dispatch_request() local
[all …]
Dblk-mq.c46 static int blk_mq_poll_stats_bkt(const struct request *rq) in blk_mq_poll_stats_bkt()
100 struct request *rq, void *priv, in blk_mq_check_inflight()
126 struct request *rq, void *priv, in blk_mq_check_inflight_rw()
289 static inline bool blk_mq_need_time_stamp(struct request *rq) in blk_mq_need_time_stamp()
298 struct request *rq = tags->static_rqs[tag]; in blk_mq_rq_ctx_init() local
361 struct request *rq; in blk_mq_get_request() local
426 struct request *rq; in blk_mq_alloc_request() local
450 struct request *rq; in blk_mq_alloc_request_hctx() local
492 static void __blk_mq_free_request(struct request *rq) in __blk_mq_free_request()
509 void blk_mq_free_request(struct request *rq) in blk_mq_free_request()
[all …]
Dblk-flush.c98 static unsigned int blk_flush_policy(unsigned long fflags, struct request *rq) in blk_flush_policy()
115 static unsigned int blk_flush_cur_seq(struct request *rq) in blk_flush_cur_seq()
120 static void blk_flush_restore_request(struct request *rq) in blk_flush_restore_request()
134 static void blk_flush_queue_rq(struct request *rq, bool add_front) in blk_flush_queue_rq()
155 static void blk_flush_complete_seq(struct request *rq, in blk_flush_complete_seq()
210 struct request *rq, *n; in flush_end_io() local
326 static void mq_flush_data_end_io(struct request *rq, blk_status_t error) in mq_flush_data_end_io()
359 void blk_insert_flush(struct request *rq) in blk_insert_flush()
/Linux-v5.4/drivers/scsi/esas2r/
Desas2r_disc.c160 struct esas2r_request *rq = &a->general_req; in esas2r_disc_check_for_work() local
313 struct esas2r_request *rq = &a->general_req; in esas2r_disc_start_port() local
387 struct esas2r_request *rq) in esas2r_disc_continue()
460 struct esas2r_request *rq) in esas2r_disc_start_request()
489 struct esas2r_request *rq) in esas2r_disc_local_start_request()
503 struct esas2r_request *rq) in esas2r_disc_abort()
518 struct esas2r_request *rq) in esas2r_disc_block_dev_scan()
549 struct esas2r_request *rq) in esas2r_disc_block_dev_scan_cb()
578 struct esas2r_request *rq) in esas2r_disc_raid_grp_info()
625 struct esas2r_request *rq) in esas2r_disc_raid_grp_info_cb()
[all …]
Desas2r_vda.c67 struct esas2r_request *rq, in esas2r_process_vda_ioctl()
270 struct esas2r_request *rq) in esas2r_complete_vda_ioctl()
347 struct esas2r_request *rq, in esas2r_build_flash_req()
373 struct esas2r_request *rq, in esas2r_build_mgt_req()
420 void esas2r_build_ae_req(struct esas2r_adapter *a, struct esas2r_request *rq) in esas2r_build_ae_req()
449 struct esas2r_request *rq, in esas2r_build_cli_req()
466 struct esas2r_request *rq, in esas2r_build_ioctl_req()
483 struct esas2r_request *rq, in esas2r_build_cfg_req()
504 static void clear_vda_request(struct esas2r_request *rq) in clear_vda_request()
Desas2r_ioctl.c83 struct esas2r_request *rq) in complete_fm_api_req()
111 struct esas2r_request *rq; in do_fm_api() local
182 struct esas2r_request *rq) in complete_nvr_req()
199 struct esas2r_request *rq) in complete_buffered_ioctl_req()
208 struct esas2r_request *rq; in handle_buffered_ioctl() local
294 struct esas2r_request *rq, in smp_ioctl_callback()
331 struct esas2r_request *rq) in esas2r_csmi_ioctl_tunnel_comp_cb()
343 struct esas2r_request *rq, in csmi_ioctl_tunnel()
391 struct esas2r_request *rq, in csmi_ioctl_callback()
607 struct esas2r_request *rq, void *context) in csmi_ioctl_done_callback()
[all …]
Desas2r_io.c46 void esas2r_start_request(struct esas2r_adapter *a, struct esas2r_request *rq) in esas2r_start_request()
120 struct esas2r_request *rq) in esas2r_local_start_request()
138 struct esas2r_request *rq) in esas2r_start_vda_request()
190 struct esas2r_request *rq = sgc->first_req; in esas2r_build_sg_list_sge() local
373 struct esas2r_request *rq = sgc->first_req; in esas2r_build_prd_iblk() local
527 struct esas2r_request *rq = sgc->first_req; in esas2r_build_sg_list_prd() local
770 struct esas2r_request *rq; in esas2r_send_task_mgmt() local
858 bool esas2r_ioreq_aborted(struct esas2r_adapter *a, struct esas2r_request *rq, in esas2r_ioreq_aborted()
/Linux-v5.4/drivers/scsi/fnic/
Dvnic_rq.c27 static int vnic_rq_alloc_bufs(struct vnic_rq *rq) in vnic_rq_alloc_bufs()
65 void vnic_rq_free(struct vnic_rq *rq) in vnic_rq_free()
82 int vnic_rq_alloc(struct vnic_dev *vdev, struct vnic_rq *rq, unsigned int index, in vnic_rq_alloc()
111 void vnic_rq_init(struct vnic_rq *rq, unsigned int cq_index, in vnic_rq_init()
137 unsigned int vnic_rq_error_status(struct vnic_rq *rq) in vnic_rq_error_status()
142 void vnic_rq_enable(struct vnic_rq *rq) in vnic_rq_enable()
147 int vnic_rq_disable(struct vnic_rq *rq) in vnic_rq_disable()
165 void vnic_rq_clean(struct vnic_rq *rq, in vnic_rq_clean()
166 void (*buf_clean)(struct vnic_rq *rq, struct vnic_rq_buf *buf)) in vnic_rq_clean()
/Linux-v5.4/drivers/net/ethernet/cisco/enic/
Dvnic_rq.c31 static int vnic_rq_alloc_bufs(struct vnic_rq *rq) in vnic_rq_alloc_bufs()
66 void vnic_rq_free(struct vnic_rq *rq) in vnic_rq_free()
85 int vnic_rq_alloc(struct vnic_dev *vdev, struct vnic_rq *rq, unsigned int index, in vnic_rq_alloc()
114 static void vnic_rq_init_start(struct vnic_rq *rq, unsigned int cq_index, in vnic_rq_init_start()
138 void vnic_rq_init(struct vnic_rq *rq, unsigned int cq_index, in vnic_rq_init()
146 unsigned int vnic_rq_error_status(struct vnic_rq *rq) in vnic_rq_error_status()
151 void vnic_rq_enable(struct vnic_rq *rq) in vnic_rq_enable()
156 int vnic_rq_disable(struct vnic_rq *rq) in vnic_rq_disable()
184 void vnic_rq_clean(struct vnic_rq *rq, in vnic_rq_clean()
185 void (*buf_clean)(struct vnic_rq *rq, struct vnic_rq_buf *buf)) in vnic_rq_clean()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/
Den_rx.c66 static inline void mlx5e_read_title_slot(struct mlx5e_rq *rq, in mlx5e_read_title_slot()
111 static inline void mlx5e_decompress_cqe(struct mlx5e_rq *rq, in mlx5e_decompress_cqe()
132 static inline void mlx5e_decompress_cqe_no_hash(struct mlx5e_rq *rq, in mlx5e_decompress_cqe_no_hash()
143 static inline u32 mlx5e_decompress_cqes_cont(struct mlx5e_rq *rq, in mlx5e_decompress_cqes_cont()
171 static inline u32 mlx5e_decompress_cqes_start(struct mlx5e_rq *rq, in mlx5e_decompress_cqes_start()
192 static inline bool mlx5e_rx_cache_put(struct mlx5e_rq *rq, in mlx5e_rx_cache_put()
214 static inline bool mlx5e_rx_cache_get(struct mlx5e_rq *rq, in mlx5e_rx_cache_get()
240 static inline int mlx5e_page_alloc_pool(struct mlx5e_rq *rq, in mlx5e_page_alloc_pool()
261 static inline int mlx5e_page_alloc(struct mlx5e_rq *rq, in mlx5e_page_alloc()
270 void mlx5e_page_dma_unmap(struct mlx5e_rq *rq, struct mlx5e_dma_info *dma_info) in mlx5e_page_dma_unmap()
[all …]
/Linux-v5.4/drivers/gpu/drm/i915/
Di915_request.c99 struct i915_request *rq = to_request(fence); in i915_fence_release() local
142 static void __notify_execute_cb(struct i915_request *rq) in __notify_execute_cb()
197 static void remove_from_engine(struct i915_request *rq) in remove_from_engine()
218 static bool i915_request_retire(struct i915_request *rq) in i915_request_retire()
317 void i915_request_retire_upto(struct i915_request *rq) in i915_request_retire_upto()
336 __i915_request_await_execution(struct i915_request *rq, in __i915_request_await_execution()
338 void (*hook)(struct i915_request *rq, in __i915_request_await_execution()
584 struct i915_request *rq, *rn; in retire_requests() local
594 struct i915_request *rq; in request_alloc_slow() local
626 struct i915_request *rq; in __i915_request_create() local
[all …]
Di915_request.h266 i915_request_get(struct i915_request *rq) in i915_request_get()
272 i915_request_get_rcu(struct i915_request *rq) in i915_request_get_rcu()
278 i915_request_put(struct i915_request *rq) in i915_request_put()
317 static inline bool i915_request_signaled(const struct i915_request *rq) in i915_request_signaled()
323 static inline bool i915_request_is_active(const struct i915_request *rq) in i915_request_is_active()
336 static inline u32 __hwsp_seqno(const struct i915_request *rq) in __hwsp_seqno()
354 static inline u32 hwsp_seqno(const struct i915_request *rq) in hwsp_seqno()
365 static inline bool __i915_request_has_started(const struct i915_request *rq) in __i915_request_has_started()
396 static inline bool i915_request_started(const struct i915_request *rq) in i915_request_started()
413 static inline bool i915_request_is_running(const struct i915_request *rq) in i915_request_is_running()
[all …]
Di915_trace.h800 trace_i915_request_submit(struct i915_request *rq) in trace_i915_request_submit()
805 trace_i915_request_execute(struct i915_request *rq) in trace_i915_request_execute()
810 trace_i915_request_in(struct i915_request *rq, unsigned int port) in trace_i915_request_in()
815 trace_i915_request_out(struct i915_request *rq) in trace_i915_request_out()
/Linux-v5.4/drivers/net/ethernet/mellanox/mlx5/core/en/xsk/
Drx.c10 bool mlx5e_xsk_pages_enough_umem(struct mlx5e_rq *rq, int count) in mlx5e_xsk_pages_enough_umem()
18 int mlx5e_xsk_page_alloc_umem(struct mlx5e_rq *rq, in mlx5e_xsk_page_alloc_umem()
46 static inline void mlx5e_xsk_recycle_frame(struct mlx5e_rq *rq, u64 handle) in mlx5e_xsk_recycle_frame()
55 void mlx5e_xsk_page_release(struct mlx5e_rq *rq, in mlx5e_xsk_page_release()
66 struct mlx5e_rq *rq = container_of(zca, struct mlx5e_rq, zca); in mlx5e_xsk_zca_free() local
71 static struct sk_buff *mlx5e_xsk_construct_skb(struct mlx5e_rq *rq, void *data, in mlx5e_xsk_construct_skb()
87 struct sk_buff *mlx5e_xsk_skb_from_cqe_mpwrq_linear(struct mlx5e_rq *rq, in mlx5e_xsk_skb_from_cqe_mpwrq_linear()
151 struct sk_buff *mlx5e_xsk_skb_from_cqe_linear(struct mlx5e_rq *rq, in mlx5e_xsk_skb_from_cqe_linear()
/Linux-v5.4/drivers/gpu/drm/i915/gt/
Dintel_ringbuffer.c61 gen2_render_ring_flush(struct i915_request *rq, u32 mode) in gen2_render_ring_flush()
92 gen4_render_ring_flush(struct i915_request *rq, u32 mode) in gen4_render_ring_flush()
216 gen6_emit_post_sync_nonzero_flush(struct i915_request *rq) in gen6_emit_post_sync_nonzero_flush()
251 gen6_render_ring_flush(struct i915_request *rq, u32 mode) in gen6_render_ring_flush()
303 static u32 *gen6_rcs_emit_breadcrumb(struct i915_request *rq, u32 *cs) in gen6_rcs_emit_breadcrumb()
338 gen7_render_ring_cs_stall_wa(struct i915_request *rq) in gen7_render_ring_cs_stall_wa()
356 gen7_render_ring_flush(struct i915_request *rq, u32 mode) in gen7_render_ring_flush()
418 static u32 *gen7_rcs_emit_breadcrumb(struct i915_request *rq, u32 *cs) in gen7_rcs_emit_breadcrumb()
440 static u32 *gen6_xcs_emit_breadcrumb(struct i915_request *rq, u32 *cs) in gen6_xcs_emit_breadcrumb()
458 static u32 *gen7_xcs_emit_breadcrumb(struct i915_request *rq, u32 *cs) in gen7_xcs_emit_breadcrumb()
[all …]
/Linux-v5.4/drivers/ide/
Dide-pm.c11 struct request *rq; in generic_ide_suspend() local
43 static int ide_pm_execute_rq(struct request *rq) in ide_pm_execute_rq()
63 struct request *rq; in generic_ide_resume() local
99 void ide_complete_power_step(ide_drive_t *drive, struct request *rq) in ide_complete_power_step()
129 ide_startstop_t ide_start_power_step(ide_drive_t *drive, struct request *rq) in ide_start_power_step()
200 void ide_complete_pm_rq(ide_drive_t *drive, struct request *rq) in ide_complete_pm_rq()
223 void ide_check_pm_state(ide_drive_t *drive, struct request *rq) in ide_check_pm_state()

12345678910>>...18