| /linux/include/trace/events/ |
| H A D | dma.h | 38 TP_PROTO(struct device *dev, phys_addr_t phys_addr, dma_addr_t dma_addr, 40 TP_ARGS(dev, phys_addr, dma_addr, size, dir, attrs), 45 __field(u64, dma_addr) 54 __entry->dma_addr = dma_addr; 63 __entry->dma_addr, 71 TP_PROTO(struct device *dev, phys_addr_t phys_addr, dma_addr_t dma_addr, \ 73 TP_ARGS(dev, phys_addr, dma_addr, size, dir, attrs)) 115 TP_PROTO(struct device *dev, void *virt_addr, dma_addr_t dma_addr, 118 TP_ARGS(dev, virt_addr, dma_addr, size, dir, flags, attrs), 123 __field(u64, dma_addr) [all …]
|
| H A D | habanalabs.h | 54 TP_PROTO(struct device *dev, u64 cpu_addr, u64 dma_addr, size_t size, const char *caller), 56 TP_ARGS(dev, cpu_addr, dma_addr, size, caller), 61 __field(u64, dma_addr) 69 __entry->dma_addr = dma_addr; 77 __entry->dma_addr, 83 TP_PROTO(struct device *dev, u64 cpu_addr, u64 dma_addr, size_t size, const char *caller), 84 TP_ARGS(dev, cpu_addr, dma_addr, size, caller)); 87 TP_PROTO(struct device *dev, u64 cpu_addr, u64 dma_addr, size_t size, const char *caller), 88 TP_ARGS(dev, cpu_addr, dma_addr, size, caller)); 91 TP_PROTO(struct device *dev, u64 phys_addr, u64 dma_addr, size_t len, [all …]
|
| /linux/drivers/net/ethernet/cisco/enic/ |
| H A D | enic_res.h | 34 void *os_buf, dma_addr_t dma_addr, unsigned int len, in enic_queue_wq_desc_ex() argument 45 (u64)dma_addr | VNIC_PADDR_TARGET, in enic_queue_wq_desc_ex() 55 vnic_wq_post(wq, os_buf, dma_addr, len, sop, eop, desc_skip_cnt, in enic_queue_wq_desc_ex() 60 void *os_buf, dma_addr_t dma_addr, unsigned int len, in enic_queue_wq_desc_cont() argument 63 enic_queue_wq_desc_ex(wq, os_buf, dma_addr, len, in enic_queue_wq_desc_cont() 69 dma_addr_t dma_addr, unsigned int len, int vlan_tag_insert, in enic_queue_wq_desc() argument 72 enic_queue_wq_desc_ex(wq, os_buf, dma_addr, len, in enic_queue_wq_desc() 79 void *os_buf, dma_addr_t dma_addr, unsigned int len, in enic_queue_wq_desc_csum() argument 83 enic_queue_wq_desc_ex(wq, os_buf, dma_addr, len, in enic_queue_wq_desc_csum() 91 void *os_buf, dma_addr_t dma_addr, unsigned int len, in enic_queue_wq_desc_csum_l4() argument [all …]
|
| /linux/sound/soc/bcm/ |
| H A D | bcm63xx-pcm-whistler.c | 20 dma_addr_t dma_addr; member 26 dma_addr_t dma_addr; member 153 dma_desc->dma_addr = runtime->dma_addr; in bcm63xx_pcm_prepare() 168 regmap_write(regmap_i2s, regaddr_descaddr, dma_desc->dma_addr); in bcm63xx_pcm_prepare() 181 prtd->dma_addr_next = substream->runtime->dma_addr; in bcm63xx_pcm_pointer() 184 prtd->dma_addr_next - substream->runtime->dma_addr); in bcm63xx_pcm_pointer() 274 dma_desc->dma_addr += in i2s_dma_isr() 278 if (dma_desc->dma_addr - runtime->dma_addr >= in i2s_dma_isr() 280 dma_desc->dma_addr = runtime->dma_addr; in i2s_dma_isr() 284 prtd->dma_addr = dma_desc->dma_addr; in i2s_dma_isr() [all …]
|
| /linux/drivers/net/ethernet/apm/xgene-v2/ |
| H A D | ring.c | 24 next_dma = ring->dma_addr + (offset * XGENE_ENET_DESC_SIZE); in xge_setup_desc() 37 dma_addr_t dma_addr = ring->dma_addr; in xge_update_tx_desc_addr() local 39 xge_wr_csr(pdata, DMATXDESCL, dma_addr); in xge_update_tx_desc_addr() 40 xge_wr_csr(pdata, DMATXDESCH, upper_32_bits(dma_addr)); in xge_update_tx_desc_addr() 49 dma_addr_t dma_addr = ring->dma_addr; in xge_update_rx_desc_addr() local 51 xge_wr_csr(pdata, DMARXDESCL, dma_addr); in xge_update_rx_desc_addr() 52 xge_wr_csr(pdata, DMARXDESCH, upper_32_bits(dma_addr)); in xge_update_rx_desc_addr()
|
| H A D | main.c | 72 dma_addr_t dma_addr; in xge_refill_buffers() local 84 dma_addr = dma_map_single(dev, skb->data, len, DMA_FROM_DEVICE); in xge_refill_buffers() 85 if (dma_mapping_error(dev, dma_addr)) { in xge_refill_buffers() 92 ring->pkt_info[tail].dma_addr = dma_addr; in xge_refill_buffers() 99 upper_32_bits(dma_addr))); in xge_refill_buffers() 102 raw_desc->m0 = cpu_to_le64(SET_BITS(PKT_ADDRL, dma_addr) | in xge_refill_buffers() 176 static dma_addr_t dma_addr; in xge_start_xmit() local 193 pkt_buf = dma_alloc_coherent(dev, XGENE_ENET_STD_MTU, &dma_addr, in xge_start_xmit() 206 upper_32_bits(dma_addr))); in xge_start_xmit() 209 tx_ring->pkt_info[tail].dma_addr = dma_addr; in xge_start_xmit() [all …]
|
| /linux/drivers/crypto/marvell/octeontx2/ |
| H A D | otx2_cpt_reqmgr.h | 107 dma_addr_t dma_addr; member 197 if (req->out[i].dma_addr) in otx2_cpt_info_destroy() 199 req->out[i].dma_addr, in otx2_cpt_info_destroy() 205 if (req->in[i].dma_addr) in otx2_cpt_info_destroy() 207 req->in[i].dma_addr, in otx2_cpt_info_destroy() 231 list[i].dma_addr = dma_map_single(&pdev->dev, list[i].vptr, in setup_sgio_components() 234 if (unlikely(dma_mapping_error(&pdev->dev, list[i].dma_addr))) { in setup_sgio_components() 246 sg_ptr->ptr0 = cpu_to_be64(list[i * SG_COMPS_MAX + 0].dma_addr); in setup_sgio_components() 247 sg_ptr->ptr1 = cpu_to_be64(list[i * SG_COMPS_MAX + 1].dma_addr); in setup_sgio_components() 248 sg_ptr->ptr2 = cpu_to_be64(list[i * SG_COMPS_MAX + 2].dma_addr); in setup_sgio_components() [all …]
|
| /linux/arch/mips/sgi-ip32/ |
| H A D | ip32-dma.c | 23 dma_addr_t dma_addr = paddr & RAM_OFFSET_MASK; in phys_to_dma() local 26 dma_addr += CRIME_HI_MEM_BASE; in phys_to_dma() 27 return dma_addr; in phys_to_dma() 30 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 32 phys_addr_t paddr = dma_addr & RAM_OFFSET_MASK; in dma_to_phys() 34 if (dma_addr >= 256*1024*1024) in dma_to_phys()
|
| /linux/include/linux/ |
| H A D | dma-direct.h | 43 dma_addr_t dma_addr) in translate_dma_to_phys() argument 48 u64 offset = dma_addr - m->dma_start; in translate_dma_to_phys() 50 if (dma_addr >= m->dma_start && offset < m->size) in translate_dma_to_phys() 104 static inline phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 108 dma_addr = dma_addr_canonical(dma_addr); in dma_to_phys() 110 paddr = translate_dma_to_phys(dev, dma_addr); in dma_to_phys() 112 paddr = dma_addr; in dma_to_phys() 145 dma_addr_t dma_addr, unsigned long attrs); 149 struct page *page, dma_addr_t dma_addr,
|
| /linux/drivers/net/ethernet/freescale/dpaa2/ |
| H A D | dpaa2-eth-trace.h | 116 dma_addr_t dma_addr, 122 TP_ARGS(netdev, vaddr, size, dma_addr, map_size, bpid), 132 __field(dma_addr_t, dma_addr) 144 __entry->dma_addr = dma_addr; 157 &__entry->dma_addr, 167 dma_addr_t dma_addr, 171 TP_ARGS(netdev, vaddr, size, dma_addr, map_size, bpid) 179 dma_addr_t dma_addr, 183 TP_ARGS(netdev, vaddr, size, dma_addr, map_size, bpid)
|
| /linux/drivers/gpu/drm/xe/ |
| H A D | xe_res_cursor.h | 59 const struct drm_pagemap_addr *dma_addr; member 95 cur->dma_addr = NULL; in xe_res_first() 173 const struct drm_pagemap_addr *addr = cur->dma_addr; in __xe_res_dma_next() 192 cur->dma_addr = addr; in __xe_res_dma_next() 216 cur->dma_addr = NULL; in xe_res_first_sg() 232 static inline void xe_res_first_dma(const struct drm_pagemap_addr *dma_addr, in xe_res_first_dma() argument 236 XE_WARN_ON(!dma_addr); in xe_res_first_dma() 243 cur->dma_seg_size = PAGE_SIZE << dma_addr->order; in xe_res_first_dma() 246 cur->dma_addr = dma_addr; in xe_res_first_dma() 278 if (cur->dma_addr) { in xe_res_next() [all …]
|
| /linux/drivers/media/platform/mediatek/vcodec/decoder/vdec/ |
| H A D | vdec_h264_req_multi_if.c | 138 u64 dma_addr; member 494 inst->vsi_ext->bs.dma_addr = (u64)bs->dma_addr; in vdec_h264_slice_setup_lat_buffer_ext() 499 inst->vsi_ext->mv_buf_dma[i].dma_addr = mem->dma_addr; in vdec_h264_slice_setup_lat_buffer_ext() 502 inst->vsi_ext->ube.dma_addr = lat_buf->ctx->msg_queue.wdma_addr.dma_addr; in vdec_h264_slice_setup_lat_buffer_ext() 505 inst->vsi_ext->row_info.dma_addr = 0; in vdec_h264_slice_setup_lat_buffer_ext() 508 inst->vsi_ext->err_map.dma_addr = lat_buf->wdma_err_addr.dma_addr; in vdec_h264_slice_setup_lat_buffer_ext() 511 inst->vsi_ext->slice_bc.dma_addr = lat_buf->slice_bc_addr.dma_addr; in vdec_h264_slice_setup_lat_buffer_ext() 515 inst->vsi_ext->trans.dma_addr = inst->ctx->msg_queue.wdma_wptr_addr; in vdec_h264_slice_setup_lat_buffer_ext() 535 y_fb_dma = (u64)fb->base_y.dma_addr; in vdec_h264_slice_setup_core_buffer_ext() 539 c_fb_dma = (u64)fb->base_c.dma_addr; in vdec_h264_slice_setup_core_buffer_ext() [all …]
|
| /linux/kernel/dma/ |
| H A D | direct.h | 14 void *cpu_addr, dma_addr_t dma_addr, size_t size, 18 void *cpu_addr, dma_addr_t dma_addr, size_t size, 20 bool dma_direct_need_sync(struct device *dev, dma_addr_t dma_addr); 87 dma_addr_t dma_addr; in dma_direct_map_phys() local 97 dma_addr = phys; in dma_direct_map_phys() 98 if (unlikely(!dma_capable(dev, dma_addr, size, false))) in dma_direct_map_phys() 101 dma_addr = phys_to_dma(dev, phys); in dma_direct_map_phys() 102 if (unlikely(!dma_capable(dev, dma_addr, size, true)) || in dma_direct_map_phys() 114 return dma_addr; in dma_direct_map_phys() 120 &dma_addr, size, *dev->dma_mask, dev->bus_dma_limit); in dma_direct_map_phys()
|
| H A D | debug.h | 13 size_t size, int direction, dma_addr_t dma_addr, 27 dma_addr_t dma_addr, void *virt, 50 dma_addr_t dma_addr, 54 dma_addr_t dma_addr); 58 dma_addr_t dma_addr, unsigned long attrs) in debug_dma_map_phys() argument 80 dma_addr_t dma_addr, void *virt, in debug_dma_alloc_coherent() argument 116 dma_addr_t dma_addr, in debug_dma_alloc_pages() argument 123 dma_addr_t dma_addr) in debug_dma_free_pages() argument
|
| /linux/drivers/net/ethernet/netronome/nfp/nfdk/ |
| H A D | dp.c | 267 dma_addr_t dma_addr; in nfp_nfdk_tx() local 313 dma_addr = dma_map_single(dp->dev, skb->data, dma_len, DMA_TO_DEVICE); in nfp_nfdk_tx() 314 if (dma_mapping_error(dp->dev, dma_addr)) in nfp_nfdk_tx() 320 txbuf->dma_addr = dma_addr; in nfp_nfdk_tx() 338 nfp_desc_set_dma_addr_48b(txd, dma_addr); in nfp_nfdk_tx() 348 dma_addr += tmp_dlen + 1; in nfp_nfdk_tx() 363 nfp_desc_set_dma_addr_48b(txd, dma_addr); in nfp_nfdk_tx() 366 dma_addr += dlen_type + 1; in nfp_nfdk_tx() 374 dma_addr = skb_frag_dma_map(dp->dev, frag, 0, dma_len, in nfp_nfdk_tx() 376 if (dma_mapping_error(dp->dev, dma_addr)) in nfp_nfdk_tx() [all …]
|
| /linux/drivers/net/ethernet/sfc/ |
| H A D | tx_tso.c | 65 dma_addr_t dma_addr; member 103 dma_addr_t dma_addr, unsigned int len, in efx_tx_queue_insert() argument 119 buffer->dma_addr = dma_addr; in efx_tx_queue_insert() 122 dma_addr, len); in efx_tx_queue_insert() 130 dma_addr += dma_len; in efx_tx_queue_insert() 175 dma_addr_t dma_addr; in tso_start() local 198 dma_addr = dma_map_single(dma_dev, skb->data, in tso_start() 200 st->header_dma_addr = dma_addr; in tso_start() 202 st->dma_addr = dma_addr + header_len; in tso_start() 205 return unlikely(dma_mapping_error(dma_dev, dma_addr)) ? -ENOMEM : 0; in tso_start() [all …]
|
| H A D | mcdi_functions.c | 80 dma_addr_t dma_addr; in efx_mcdi_ev_init() local 115 dma_addr = channel->eventq.dma_addr; in efx_mcdi_ev_init() 117 MCDI_SET_ARRAY_QWORD(inbuf, INIT_EVQ_IN_DMA_ADDR, i, dma_addr); in efx_mcdi_ev_init() 118 dma_addr += EFX_BUF_SIZE; in efx_mcdi_ev_init() 172 dma_addr_t dma_addr; in efx_mcdi_tx_init() local 185 dma_addr = tx_queue->txd.dma_addr; in efx_mcdi_tx_init() 188 tx_queue->queue, entries, (u64)dma_addr); in efx_mcdi_tx_init() 191 MCDI_SET_ARRAY_QWORD(inbuf, INIT_TXQ_IN_DMA_ADDR, i, dma_addr); in efx_mcdi_tx_init() 192 dma_addr += EFX_BUF_SIZE; in efx_mcdi_tx_init() 285 dma_addr_t dma_addr; in efx_mcdi_rx_init() local [all …]
|
| H A D | tx_common.c | 166 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in efx_dequeue_buffer() 329 dma_addr_t dma_addr, size_t len) in efx_tx_map_chunk() argument 340 dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len); in efx_tx_map_chunk() 345 buffer->dma_addr = dma_addr; in efx_tx_map_chunk() 348 dma_addr += dma_len; in efx_tx_map_chunk() 375 dma_addr_t dma_addr, unmap_addr; in efx_tx_map_data() local 384 dma_addr = dma_map_single(dma_dev, skb->data, len, DMA_TO_DEVICE); in efx_tx_map_data() 387 unmap_addr = dma_addr; in efx_tx_map_data() 389 if (unlikely(dma_mapping_error(dma_dev, dma_addr))) in efx_tx_map_data() 400 efx_tx_map_chunk(tx_queue, dma_addr, header_len); in efx_tx_map_data() [all …]
|
| /linux/drivers/net/ethernet/sfc/siena/ |
| H A D | tx_common.c | 129 dma_addr_t unmap_addr = buffer->dma_addr - buffer->dma_offset; in efx_dequeue_buffer() 289 dma_addr_t dma_addr, size_t len) in efx_siena_tx_map_chunk() argument 300 dma_len = nic_type->tx_limit_len(tx_queue, dma_addr, len); in efx_siena_tx_map_chunk() 305 buffer->dma_addr = dma_addr; in efx_siena_tx_map_chunk() 308 dma_addr += dma_len; in efx_siena_tx_map_chunk() 335 dma_addr_t dma_addr, unmap_addr; in efx_siena_tx_map_data() local 344 dma_addr = dma_map_single(dma_dev, skb->data, len, DMA_TO_DEVICE); in efx_siena_tx_map_data() 347 unmap_addr = dma_addr; in efx_siena_tx_map_data() 349 if (unlikely(dma_mapping_error(dma_dev, dma_addr))) in efx_siena_tx_map_data() 360 efx_siena_tx_map_chunk(tx_queue, dma_addr, header_len); in efx_siena_tx_map_data() [all …]
|
| /linux/drivers/xen/ |
| H A D | swiotlb-xen.c | 72 dma_addr_t dma_addr) in xen_dma_to_phys() argument 74 return xen_bus_to_phys(dev, dma_to_phys(dev, dma_addr)); in xen_dma_to_phys() 100 dma_addr_t dma_addr) in xen_swiotlb_find_pool() argument 102 unsigned long bfn = XEN_PFN_DOWN(dma_to_phys(dev, dma_addr)); in xen_swiotlb_find_pool() 289 xen_swiotlb_sync_single_for_cpu(struct device *dev, dma_addr_t dma_addr, in xen_swiotlb_sync_single_for_cpu() argument 292 phys_addr_t paddr = xen_dma_to_phys(dev, dma_addr); in xen_swiotlb_sync_single_for_cpu() 296 if (pfn_valid(PFN_DOWN(dma_to_phys(dev, dma_addr)))) in xen_swiotlb_sync_single_for_cpu() 299 xen_dma_sync_for_cpu(dev, dma_addr, size, dir); in xen_swiotlb_sync_single_for_cpu() 302 pool = xen_swiotlb_find_pool(dev, dma_addr); in xen_swiotlb_sync_single_for_cpu() 308 xen_swiotlb_sync_single_for_device(struct device *dev, dma_addr_t dma_addr, in xen_swiotlb_sync_single_for_device() argument [all …]
|
| /linux/drivers/net/ethernet/qualcomm/emac/ |
| H A D | emac-mac.c | 305 writel(upper_32_bits(adpt->tx_q.tpd.dma_addr), in emac_mac_dma_rings_config() 308 writel(lower_32_bits(adpt->tx_q.tpd.dma_addr), in emac_mac_dma_rings_config() 315 writel(upper_32_bits(adpt->rx_q.rfd.dma_addr), in emac_mac_dma_rings_config() 318 writel(lower_32_bits(adpt->rx_q.rfd.dma_addr), in emac_mac_dma_rings_config() 320 writel(lower_32_bits(adpt->rx_q.rrd.dma_addr), in emac_mac_dma_rings_config() 598 if (tpbuf->dma_addr) { in emac_tx_q_descs_free() 600 tpbuf->dma_addr, tpbuf->length, in emac_tx_q_descs_free() 602 tpbuf->dma_addr = 0; in emac_tx_q_descs_free() 635 if (rfbuf->dma_addr) { in emac_rx_q_free_descs() 636 dma_unmap_single(dev, rfbuf->dma_addr, rfbuf->length, in emac_rx_q_free_descs() [all …]
|
| /linux/drivers/net/ethernet/netronome/nfp/nfd3/ |
| H A D | dp.c | 266 dma_addr_t dma_addr; in nfp_nfd3_tx() local 302 dma_addr = dma_map_single(dp->dev, skb->data, skb_headlen(skb), in nfp_nfd3_tx() 304 if (dma_mapping_error(dp->dev, dma_addr)) in nfp_nfd3_tx() 312 txbuf->dma_addr = dma_addr; in nfp_nfd3_tx() 321 nfp_desc_set_dma_addr_40b(txd, dma_addr); in nfp_nfd3_tx() 350 dma_addr = skb_frag_dma_map(dp->dev, frag, 0, in nfp_nfd3_tx() 352 if (dma_mapping_error(dp->dev, dma_addr)) in nfp_nfd3_tx() 357 tx_ring->txbufs[wr_idx].dma_addr = dma_addr; in nfp_nfd3_tx() 362 nfp_desc_set_dma_addr_40b(txd, dma_addr); in nfp_nfd3_tx() 390 dma_unmap_page(dp->dev, tx_ring->txbufs[wr_idx].dma_addr, in nfp_nfd3_tx() [all …]
|
| /linux/drivers/media/pci/solo6x10/ |
| H A D | solo6x10-p2m.c | 32 dma_addr_t dma_addr; in solo_p2m_dma() local 40 dma_addr = dma_map_single(&solo_dev->pdev->dev, sys_addr, size, in solo_p2m_dma() 42 if (dma_mapping_error(&solo_dev->pdev->dev, dma_addr)) in solo_p2m_dma() 45 ret = solo_p2m_dma_t(solo_dev, wr, dma_addr, ext_addr, size, in solo_p2m_dma() 48 dma_unmap_single(&solo_dev->pdev->dev, dma_addr, size, in solo_p2m_dma() 93 desc[1].dma_addr); in solo_p2m_dma_desc() 125 dma_addr_t dma_addr, u32 ext_addr, u32 size, in solo_p2m_fill_desc() argument 128 WARN_ON_ONCE(dma_addr & 0x03); in solo_p2m_fill_desc() 141 desc->dma_addr = dma_addr; in solo_p2m_fill_desc() 146 dma_addr_t dma_addr, u32 ext_addr, u32 size, in solo_p2m_dma_t() argument [all …]
|
| /linux/drivers/accel/ivpu/ |
| H A D | ivpu_mmu_context.c | 49 dma_addr_t dma_addr; in ivpu_pgtable_alloc_page() local 59 dma_addr = dma_map_page(vdev->drm.dev, page, 0, PAGE_SIZE, DMA_BIDIRECTIONAL); in ivpu_pgtable_alloc_page() 60 if (dma_mapping_error(vdev->drm.dev, dma_addr)) in ivpu_pgtable_alloc_page() 68 *dma = dma_addr; in ivpu_pgtable_alloc_page() 72 dma_unmap_page(vdev->drm.dev, dma_addr, PAGE_SIZE, DMA_BIDIRECTIONAL); in ivpu_pgtable_alloc_page() 79 static void ivpu_pgtable_free_page(struct ivpu_device *vdev, u64 *cpu_addr, dma_addr_t dma_addr) in ivpu_pgtable_free_page() argument 86 dma_unmap_page(vdev->drm.dev, dma_addr & ~IVPU_MMU_ENTRY_FLAGS_MASK, PAGE_SIZE, in ivpu_pgtable_free_page() 240 u64 vpu_addr, dma_addr_t dma_addr, u64 prot) in ivpu_mmu_context_map_page() argument 268 pte[pte_idx] = dma_addr | prot; in ivpu_mmu_context_map_page() 275 dma_addr_t dma_addr, u64 prot) in ivpu_mmu_context_map_cont_64k() argument [all …]
|
| /linux/arch/mips/loongson2ef/lemote-2f/ |
| H A D | dma.c | 9 phys_addr_t dma_to_phys(struct device *dev, dma_addr_t dma_addr) in dma_to_phys() argument 11 if (dma_addr > 0x8fffffff) in dma_to_phys() 12 return dma_addr; in dma_to_phys() 13 return dma_addr & 0x0fffffff; in dma_to_phys()
|