Lines Matching refs:rqd
40 static int pblk_read_ppalist_rq(struct pblk *pblk, struct nvm_rq *rqd, in pblk_read_ppalist_rq() argument
44 void *meta_list = rqd->meta_list; in pblk_read_ppalist_rq()
48 nr_secs = pblk_lookup_l2p_seq(pblk, rqd->ppa_list, blba, rqd->nr_ppas, in pblk_read_ppalist_rq()
58 if (pblk_ppa_empty(rqd->ppa_list[i])) { in pblk_read_ppalist_rq()
62 } else if (pblk_addr_in_cache(rqd->ppa_list[i])) { in pblk_read_ppalist_rq()
69 rqd->ppa_list[i])) { in pblk_read_ppalist_rq()
99 rqd->is_seq = 1; in pblk_read_ppalist_rq()
109 static void pblk_read_check_seq(struct pblk *pblk, struct nvm_rq *rqd, in pblk_read_check_seq() argument
112 void *meta_list = rqd->meta_list; in pblk_read_check_seq()
113 int nr_lbas = rqd->nr_ppas; in pblk_read_check_seq()
128 struct ppa_addr *ppa_list = nvm_rq_to_ppa_list(rqd); in pblk_read_check_seq()
142 static void pblk_read_check_rand(struct pblk *pblk, struct nvm_rq *rqd, in pblk_read_check_rand() argument
145 void *meta_lba_list = rqd->meta_list; in pblk_read_check_rand()
164 struct ppa_addr *ppa_list = nvm_rq_to_ppa_list(rqd); in pblk_read_check_rand()
176 WARN_ONCE(j != rqd->nr_ppas, "pblk: corrupted random request\n"); in pblk_read_check_rand()
187 static void __pblk_end_io_read(struct pblk *pblk, struct nvm_rq *rqd, in __pblk_end_io_read() argument
190 struct pblk_g_ctx *r_ctx = nvm_rq_to_pdu(rqd); in __pblk_end_io_read()
191 struct bio *int_bio = rqd->bio; in __pblk_end_io_read()
196 if (rqd->error) in __pblk_end_io_read()
197 pblk_log_read_err(pblk, rqd); in __pblk_end_io_read()
199 pblk_read_check_seq(pblk, rqd, r_ctx->lba); in __pblk_end_io_read()
203 pblk_rq_to_line_put(pblk, rqd); in __pblk_end_io_read()
206 atomic_long_add(rqd->nr_ppas, &pblk->sync_reads); in __pblk_end_io_read()
207 atomic_long_sub(rqd->nr_ppas, &pblk->inflight_reads); in __pblk_end_io_read()
210 pblk_free_rqd(pblk, rqd, PBLK_READ); in __pblk_end_io_read()
214 static void pblk_end_io_read(struct nvm_rq *rqd) in pblk_end_io_read() argument
216 struct pblk *pblk = rqd->private; in pblk_end_io_read()
217 struct pblk_g_ctx *r_ctx = nvm_rq_to_pdu(rqd); in pblk_end_io_read()
220 pblk_end_user_read(bio, rqd->error); in pblk_end_io_read()
221 __pblk_end_io_read(pblk, rqd, true); in pblk_end_io_read()
224 static void pblk_read_rq(struct pblk *pblk, struct nvm_rq *rqd, struct bio *bio, in pblk_read_rq() argument
227 struct pblk_sec_meta *meta = pblk_get_meta(pblk, rqd->meta_list, 0); in pblk_read_rq()
259 rqd->ppa_addr = ppa; in pblk_read_rq()
269 struct nvm_rq *rqd; in pblk_submit_read() local
275 rqd = pblk_alloc_rqd(pblk, PBLK_READ); in pblk_submit_read()
277 rqd->opcode = NVM_OP_PREAD; in pblk_submit_read()
278 rqd->nr_ppas = nr_secs; in pblk_submit_read()
279 rqd->private = pblk; in pblk_submit_read()
280 rqd->end_io = pblk_end_io_read; in pblk_submit_read()
282 r_ctx = nvm_rq_to_pdu(rqd); in pblk_submit_read()
286 if (pblk_alloc_rqd_meta(pblk, rqd)) { in pblk_submit_read()
288 pblk_free_rqd(pblk, rqd, PBLK_READ); in pblk_submit_read()
299 nr_secs = pblk_read_ppalist_rq(pblk, rqd, int_bio, blba, in pblk_submit_read()
302 pblk_read_rq(pblk, rqd, int_bio, blba, &from_cache); in pblk_submit_read()
306 rqd->bio = int_bio; /* internal bio */ in pblk_submit_read()
308 if (from_cache && nr_secs == rqd->nr_ppas) { in pblk_submit_read()
312 __pblk_end_io_read(pblk, rqd, false); in pblk_submit_read()
313 } else if (nr_secs != rqd->nr_ppas) { in pblk_submit_read()
332 rqd->nr_ppas = nr_secs; in pblk_submit_read()
333 if (rqd->nr_ppas == 1) in pblk_submit_read()
334 rqd->ppa_addr = rqd->ppa_list[0]; in pblk_submit_read()
342 } else if (pblk_submit_io(pblk, rqd, NULL)) { in pblk_submit_read()
344 rqd->error = -ENODEV; in pblk_submit_read()
345 pblk_end_io_read(rqd); in pblk_submit_read()
349 static int read_ppalist_rq_gc(struct pblk *pblk, struct nvm_rq *rqd, in read_ppalist_rq_gc() argument
370 rqd->ppa_list[valid_secs++] = ppa_list_l2p[i]; in read_ppalist_rq_gc()
380 static int read_rq_gc(struct pblk *pblk, struct nvm_rq *rqd, in read_rq_gc() argument
404 rqd->ppa_addr = ppa_l2p; in read_rq_gc()
417 struct nvm_rq rqd; in pblk_submit_read_gc() local
420 memset(&rqd, 0, sizeof(struct nvm_rq)); in pblk_submit_read_gc()
422 ret = pblk_alloc_rqd_meta(pblk, &rqd); in pblk_submit_read_gc()
427 gc_rq->secs_to_gc = read_ppalist_rq_gc(pblk, &rqd, gc_rq->line, in pblk_submit_read_gc()
432 rqd.ppa_addr = rqd.ppa_list[0]; in pblk_submit_read_gc()
434 gc_rq->secs_to_gc = read_rq_gc(pblk, &rqd, gc_rq->line, in pblk_submit_read_gc()
442 rqd.opcode = NVM_OP_PREAD; in pblk_submit_read_gc()
443 rqd.nr_ppas = gc_rq->secs_to_gc; in pblk_submit_read_gc()
445 if (pblk_submit_io_sync(pblk, &rqd, gc_rq->data)) { in pblk_submit_read_gc()
450 pblk_read_check_rand(pblk, &rqd, gc_rq->lba_list, gc_rq->nr_secs); in pblk_submit_read_gc()
454 if (rqd.error) { in pblk_submit_read_gc()
457 pblk_print_failed_rqd(pblk, &rqd, rqd.error); in pblk_submit_read_gc()
468 pblk_free_rqd_meta(pblk, &rqd); in pblk_submit_read_gc()
472 pblk_free_rqd_meta(pblk, &rqd); in pblk_submit_read_gc()