Lines Matching refs:nlb
1234 static uint16_t nvme_map_data(NvmeCtrl *n, uint32_t nlb, NvmeRequest *req) in nvme_map_data() argument
1240 size_t len = nvme_l2b(ns, nlb); in nvme_map_data()
1247 len += nvme_m2b(ns, nlb); in nvme_map_data()
1264 static uint16_t nvme_map_mdata(NvmeCtrl *n, uint32_t nlb, NvmeRequest *req) in nvme_map_mdata() argument
1267 size_t len = nvme_m2b(ns, nlb); in nvme_map_mdata()
1273 len += nvme_l2b(ns, nlb); in nvme_map_mdata()
1695 uint32_t nlb) in nvme_check_bounds() argument
1699 if (unlikely(UINT64_MAX - slba < nlb || slba + nlb > nsze)) { in nvme_check_bounds()
1700 trace_pci_nvme_err_invalid_lba_range(slba, nlb, nsze); in nvme_check_bounds()
1708 uint32_t nlb, int flags) in nvme_block_status_all() argument
1712 int64_t pnum = 0, bytes = nvme_l2b(ns, nlb); in nvme_block_status_all()
1745 uint32_t nlb) in nvme_check_dulbe() argument
1750 ret = nvme_block_status_all(ns, slba, nlb, BDRV_BLOCK_DATA); in nvme_check_dulbe()
1809 uint64_t slba, uint32_t nlb) in nvme_check_zone_write() argument
1822 if (slba < zone->w_ptr || slba + nlb > ezrwa) { in nvme_check_zone_write()
1834 if (unlikely((slba + nlb) > zcap)) { in nvme_check_zone_write()
1835 trace_pci_nvme_err_zone_boundary(slba, nlb, zcap); in nvme_check_zone_write()
1863 uint32_t nlb) in nvme_check_zone_read() argument
1873 end = slba + nlb; in nvme_check_zone_read()
2062 uint32_t nlb) in nvme_advance_zone_wp() argument
2064 zone->d.wp += nlb; in nvme_advance_zone_wp()
2090 uint32_t nlb; in nvme_finalize_zoned_write() local
2093 nlb = le16_to_cpu(rw->nlb) + 1; in nvme_finalize_zoned_write()
2099 uint64_t elba = slba + nlb - 1; in nvme_finalize_zoned_write()
2108 nvme_advance_zone_wp(ns, zone, nlb); in nvme_finalize_zoned_write()
2200 uint32_t nlb = (uint32_t)le16_to_cpu(rw->nlb) + 1; in nvme_rw_cb() local
2204 size_t mlen = nvme_m2b(ns, nlb); in nvme_rw_cb()
2216 status = nvme_map_mdata(nvme_ctrl(req), nlb, req); in nvme_rw_cb()
2299 uint32_t nlb = le16_to_cpu(rw->nlb) + 1; in nvme_verify_mdata_in_cb() local
2300 size_t mlen = nvme_m2b(ns, nlb); in nvme_verify_mdata_in_cb()
2468 uint32_t nlb = le16_to_cpu(rw->nlb) + 1; in nvme_compare_data_cb() local
2469 size_t mlen = nvme_m2b(ns, nlb); in nvme_compare_data_cb()
2537 uint32_t nlb; in nvme_dsm_md_cb() local
2545 nlb = le32_to_cpu(range->nlb); in nvme_dsm_md_cb()
2552 ret = nvme_block_status_all(ns, slba, nlb, BDRV_BLOCK_ZERO); in nvme_dsm_md_cb()
2563 nvme_m2b(ns, nlb), BDRV_REQ_MAY_UNMAP, in nvme_dsm_md_cb()
2579 uint32_t nlb; in nvme_dsm_cb() local
2595 nlb = le32_to_cpu(range->nlb); in nvme_dsm_cb()
2597 trace_pci_nvme_dsm_deallocate(slba, nlb); in nvme_dsm_cb()
2599 if (nlb > n->dmrsl) { in nvme_dsm_cb()
2600 trace_pci_nvme_dsm_single_range_limit_exceeded(nlb, n->dmrsl); in nvme_dsm_cb()
2604 if (nvme_check_bounds(ns, slba, nlb)) { in nvme_dsm_cb()
2605 trace_pci_nvme_err_invalid_lba_range(slba, nlb, in nvme_dsm_cb()
2611 nvme_l2b(ns, nlb), in nvme_dsm_cb()
2666 uint32_t nlb = le16_to_cpu(rw->nlb) + 1; in nvme_verify() local
2667 size_t len = nvme_l2b(ns, nlb); in nvme_verify()
2675 trace_pci_nvme_verify(nvme_cid(req), nvme_nsid(ns), slba, nlb); in nvme_verify()
2689 data_len += nvme_m2b(ns, nlb); in nvme_verify()
2696 status = nvme_check_bounds(ns, slba, nlb); in nvme_verify()
2702 status = nvme_check_dulbe(ns, slba, nlb); in nvme_verify()
2797 uint32_t *nlb, in nvme_copy_source_range_parse_format0_2() argument
2813 if (nlb) { in nvme_copy_source_range_parse_format0_2()
2814 *nlb = le16_to_cpu(_ranges[idx].nlb) + 1; in nvme_copy_source_range_parse_format0_2()
2832 uint32_t *nlb, in nvme_copy_source_range_parse_format1_3() argument
2848 if (nlb) { in nvme_copy_source_range_parse_format1_3()
2849 *nlb = le16_to_cpu(_ranges[idx].nlb) + 1; in nvme_copy_source_range_parse_format1_3()
2873 uint64_t *slba, uint32_t *nlb, in nvme_copy_source_range_parse() argument
2880 nvme_copy_source_range_parse_format0_2(ranges, idx, slba, nlb, snsid, in nvme_copy_source_range_parse()
2886 nvme_copy_source_range_parse_format1_3(ranges, idx, slba, nlb, snsid, in nvme_copy_source_range_parse()
2901 uint32_t nlb; in nvme_check_copy_mcl() local
2903 &nlb, NULL, NULL, NULL, NULL); in nvme_check_copy_mcl()
2904 copy_len += nlb; in nvme_check_copy_mcl()
2919 uint32_t nlb; in nvme_copy_out_completed_cb() local
2922 &nlb, NULL, NULL, NULL, NULL); in nvme_copy_out_completed_cb()
2933 nvme_advance_zone_wp(dns, iocb->zone, nlb); in nvme_copy_out_completed_cb()
2937 iocb->slba += nlb; in nvme_copy_out_completed_cb()
2947 uint32_t nlb; in nvme_copy_out_cb() local
2956 &nlb, NULL, NULL, NULL, NULL); in nvme_copy_out_cb()
2958 mlen = nvme_m2b(dns, nlb); in nvme_copy_out_cb()
2959 mbounce = iocb->bounce + nvme_l2b(dns, nlb); in nvme_copy_out_cb()
2982 uint32_t nlb; in nvme_copy_in_completed_cb() local
2998 &nlb, NULL, &apptag, &appmask, &reftag); in nvme_copy_in_completed_cb()
3000 trace_pci_nvme_copy_out(iocb->slba, nlb); in nvme_copy_in_completed_cb()
3002 len = nvme_l2b(sns, nlb); in nvme_copy_in_completed_cb()
3009 mlen = nvme_m2b(sns, nlb); in nvme_copy_in_completed_cb()
3010 mbounce = iocb->bounce + nvme_l2b(sns, nlb); in nvme_copy_in_completed_cb()
3027 mlen = nvme_m2b(dns, nlb); in nvme_copy_in_completed_cb()
3028 mbounce = iocb->bounce + nvme_l2b(dns, nlb); in nvme_copy_in_completed_cb()
3051 status = nvme_check_bounds(dns, iocb->slba, nlb); in nvme_copy_in_completed_cb()
3057 status = nvme_check_zone_write(dns, iocb->zone, iocb->slba, nlb); in nvme_copy_in_completed_cb()
3063 iocb->zone->w_ptr += nlb; in nvme_copy_in_completed_cb()
3090 uint32_t nlb; in nvme_copy_in_cb() local
3097 &nlb, NULL, NULL, NULL, NULL); in nvme_copy_in_cb()
3100 qemu_iovec_add(&iocb->iov, iocb->bounce + nvme_l2b(sns, nlb), in nvme_copy_in_cb()
3101 nvme_m2b(sns, nlb)); in nvme_copy_in_cb()
3179 uint32_t nlb; in nvme_do_copy() local
3195 &slba, &nlb, &snsid, NULL, NULL, NULL); in nvme_do_copy()
3208 if (((slba + nlb) > iocb->slba) && in nvme_do_copy()
3209 ((slba + nlb) < (iocb->slba + iocb->tcl))) { in nvme_do_copy()
3216 &slba, &nlb, NULL, NULL, NULL, NULL); in nvme_do_copy()
3272 len = nvme_l2b(sns, nlb); in nvme_do_copy()
3274 trace_pci_nvme_copy_source_range(slba, nlb); in nvme_do_copy()
3276 if (nlb > le16_to_cpu(sns->id_ns.mssrl)) { in nvme_do_copy()
3281 status = nvme_check_bounds(sns, slba, nlb); in nvme_do_copy()
3287 status = nvme_check_dulbe(sns, slba, nlb); in nvme_do_copy()
3294 status = nvme_check_zone_read(sns, slba, nlb); in nvme_do_copy()
3417 uint32_t nlb = le16_to_cpu(rw->nlb) + 1; in nvme_compare() local
3419 size_t data_len = nvme_l2b(ns, nlb); in nvme_compare()
3425 trace_pci_nvme_compare(nvme_cid(req), nvme_nsid(ns), slba, nlb); in nvme_compare()
3432 len += nvme_m2b(ns, nlb); in nvme_compare()
3444 status = nvme_check_bounds(ns, slba, nlb); in nvme_compare()
3450 status = nvme_check_dulbe(ns, slba, nlb); in nvme_compare()
3609 uint32_t nlb = (uint32_t)le16_to_cpu(rw->nlb) + 1; in nvme_read() local
3611 uint64_t data_size = nvme_l2b(ns, nlb); in nvme_read()
3618 mapped_size += nvme_m2b(ns, nlb); in nvme_read()
3629 trace_pci_nvme_read(nvme_cid(req), nvme_nsid(ns), nlb, mapped_size, slba); in nvme_read()
3636 status = nvme_check_bounds(ns, slba, nlb); in nvme_read()
3642 status = nvme_check_zone_read(ns, slba, nlb); in nvme_read()
3644 trace_pci_nvme_err_zone_read_not_ok(slba, nlb, status); in nvme_read()
3650 status = nvme_check_dulbe(ns, slba, nlb); in nvme_read()
3660 status = nvme_map_data(n, nlb, req); in nvme_read()
3678 uint32_t nlb) in nvme_do_write_fdp() argument
3682 uint64_t data_size = nvme_l2b(ns, nlb); in nvme_do_write_fdp()
3701 while (nlb) { in nvme_do_write_fdp()
3702 if (nlb < ru->ruamw) { in nvme_do_write_fdp()
3703 ru->ruamw -= nlb; in nvme_do_write_fdp()
3707 nlb -= ru->ruamw; in nvme_do_write_fdp()
3718 uint32_t nlb = (uint32_t)le16_to_cpu(rw->nlb) + 1; in nvme_do_write() local
3721 uint64_t data_size = nvme_l2b(ns, nlb); in nvme_do_write()
3730 mapped_size += nvme_m2b(ns, nlb); in nvme_do_write()
3736 mapped_size -= nvme_m2b(ns, nlb); in nvme_do_write()
3742 nvme_nsid(ns), nlb, mapped_size, slba); in nvme_do_write()
3751 status = nvme_check_bounds(ns, slba, nlb); in nvme_do_write()
3808 status = nvme_check_zone_write(ns, zone, slba, nlb); in nvme_do_write()
3819 zone->w_ptr += nlb; in nvme_do_write()
3822 nvme_do_write_fdp(n, req, slba, nlb); in nvme_do_write()
3832 status = nvme_map_data(n, nlb, req); in nvme_do_write()
4191 uint32_t nlb = elba - wp + 1; in nvme_zone_mgmt_send_zrwa_flush() local
4207 if (nlb % ns->zns.zrwafg) { in nvme_zone_mgmt_send_zrwa_flush()
4216 zone->w_ptr += nlb; in nvme_zone_mgmt_send_zrwa_flush()
4218 nvme_advance_zone_wp(ns, zone, nlb); in nvme_zone_mgmt_send_zrwa_flush()
7420 uint32_t nlb = (uint32_t)le16_to_cpu(rw->nlb); in nvme_atomic_write_check() local
7421 uint64_t elba = slba + nlb; in nvme_atomic_write_check()
7426 ((rw->nlb + 1) > atomic->atomic_max_write_size))) { in nvme_atomic_write_check()
7456 req_nlb = (uint32_t)le16_to_cpu(req_rw->nlb); in nvme_atomic_write_check()