/qemu/hw/nvme/ |
H A D | dif.c | 65 size_t mlen, uint16_t apptag, in nvme_dif_pract_generate_dif_crc16() argument 99 size_t mlen, uint16_t apptag, in nvme_dif_pract_generate_dif_crc64() argument 138 uint8_t *mbuf, size_t mlen, uint16_t apptag, in nvme_dif_pract_generate_dif() argument 143 return nvme_dif_pract_generate_dif_crc16(ns, buf, len, mbuf, mlen, in nvme_dif_pract_generate_dif() 146 return nvme_dif_pract_generate_dif_crc64(ns, buf, len, mbuf, mlen, in nvme_dif_pract_generate_dif() 297 uint8_t *mbuf, size_t mlen, uint8_t prinfo, in nvme_dif_check() argument 349 uint16_t nvme_dif_mangle_mdata(NvmeNamespace *ns, uint8_t *mbuf, size_t mlen, in nvme_dif_mangle_mdata() argument 359 int64_t bytes = (mlen / ns->lbaf.ms) << ns->lbaf.ds; in nvme_dif_mangle_mdata() 388 mlen = (pnum >> ns->lbaf.ds) * ns->lbaf.ms; in nvme_dif_mangle_mdata() 389 end = mbufp + mlen; in nvme_dif_mangle_mdata() [all …]
|
H A D | dif.h | 180 uint16_t nvme_dif_mangle_mdata(NvmeNamespace *ns, uint8_t *mbuf, size_t mlen, 183 uint8_t *mbuf, size_t mlen, uint16_t apptag, 186 uint8_t *mbuf, size_t mlen, uint8_t prinfo,
|
H A D | ctrl.c | 2204 size_t mlen = nvme_m2b(ns, nlb); in nvme_rw_cb() local 2206 req->aiocb = blk_aio_pwrite_zeroes(blk, offset, mlen, in nvme_rw_cb() 2300 size_t mlen = nvme_m2b(ns, nlb); in nvme_verify_mdata_in_cb() local 2310 ctx->mdata.bounce = g_malloc(mlen); in nvme_verify_mdata_in_cb() 2313 qemu_iovec_add(&ctx->mdata.iov, ctx->mdata.bounce, mlen); in nvme_verify_mdata_in_cb() 2469 size_t mlen = nvme_m2b(ns, nlb); in nvme_compare_data_cb() local 2472 ctx->mdata.bounce = g_malloc(mlen); in nvme_compare_data_cb() 2475 qemu_iovec_add(&ctx->mdata.iov, ctx->mdata.bounce, mlen); in nvme_compare_data_cb() 2948 size_t mlen; in nvme_copy_out_cb() local 2958 mlen = nvme_m2b(dns, nlb); in nvme_copy_out_cb() [all …]
|
/qemu/hw/dma/ |
H A D | xlnx_csu_dma.c | 202 uint32_t mlen = MIN(len - i, s->width); in xlnx_csu_dma_read() local 205 buf + i, mlen, false); in xlnx_csu_dma_read() 233 uint32_t mlen = MIN(len - i, s->width); in xlnx_csu_dma_write() local 236 buf, mlen, true); in xlnx_csu_dma_write() 237 buf += mlen; in xlnx_csu_dma_write() 591 uint32_t mlen = MIN(size, len) & (~3); /* Size is word aligned */ in xlnx_csu_dma_stream_push() local 600 if (len && (xlnx_csu_dma_is_paused(s) || mlen == 0)) { in xlnx_csu_dma_stream_push() 607 if (xlnx_csu_dma_write(s, buf, mlen) != mlen) { in xlnx_csu_dma_stream_push() 611 xlnx_csu_dma_advance(s, mlen); in xlnx_csu_dma_stream_push() 614 return mlen; in xlnx_csu_dma_stream_push()
|
/qemu/hw/hyperv/ |
H A D | vmbus.c | 371 dma_addr_t mlen = pgleft; in gpadl_iter_io() local 377 iter->map = dma_memory_map(iter->as, maddr, &mlen, iter->dir, in gpadl_iter_io() 379 if (mlen != pgleft) { in gpadl_iter_io() 380 dma_memory_unmap(iter->as, iter->map, mlen, iter->dir, 0); in gpadl_iter_io() 481 dma_addr_t mlen = MIN(sg[i].len - off, len); in vmbus_map_sgl() local 483 len -= mlen; in vmbus_map_sgl() 486 for (; mlen; ret_cnt++) { in vmbus_map_sgl() 487 dma_addr_t l = mlen; in vmbus_map_sgl() 503 mlen -= l; in vmbus_map_sgl() 569 dma_addr_t mlen = sizeof(*rb); in ringbuf_map_hdr() local [all …]
|