/linux/drivers/ntb/hw/intel/ |
H A D | ntb_hw_gen3.c | 156 bar_addr = ioread64(mmio + GEN3_IMBAR1XLMT_OFFSET); in gen3_setup_b2b_mw() 161 bar_addr = ioread64(mmio + GEN3_IMBAR2XLMT_OFFSET); in gen3_setup_b2b_mw() 334 u.v64 = ioread64(mmio + GEN3_IMBAR1XBASE_OFFSET); in ndev_ntb3_debugfs_read() 338 u.v64 = ioread64(mmio + GEN3_IMBAR2XBASE_OFFSET); in ndev_ntb3_debugfs_read() 342 u.v64 = ioread64(mmio + GEN3_IMBAR1XLMT_OFFSET); in ndev_ntb3_debugfs_read() 346 u.v64 = ioread64(mmio + GEN3_IMBAR2XLMT_OFFSET); in ndev_ntb3_debugfs_read() 354 u.v64 = ioread64(mmio + GEN3_EMBAR1XBASE_OFFSET); in ndev_ntb3_debugfs_read() 358 u.v64 = ioread64(mmio + GEN3_EMBAR2XBASE_OFFSET); in ndev_ntb3_debugfs_read() 362 u.v64 = ioread64(mmio + GEN3_EMBAR1XLMT_OFFSET); in ndev_ntb3_debugfs_read() 366 u.v64 = ioread64(mmi in ndev_ntb3_debugfs_read() [all...] |
H A D | ntb_hw_gen4.c | 110 bar_addr = ioread64(mmio + GEN4_IM23XLMT_OFFSET); in gen4_setup_b2b_mw() 115 bar_addr = ioread64(mmio + GEN4_IM45XLMT_OFFSET); in gen4_setup_b2b_mw() 291 u.v64 = ioread64(mmio + GEN4_IM23XBASE_OFFSET); in ndev_ntb4_debugfs_read() 295 u.v64 = ioread64(mmio + GEN4_IM45XBASE_OFFSET); in ndev_ntb4_debugfs_read() 299 u.v64 = ioread64(mmio + GEN4_IM23XLMT_OFFSET); in ndev_ntb4_debugfs_read() 303 u.v64 = ioread64(mmio + GEN4_IM45XLMT_OFFSET); in ndev_ntb4_debugfs_read() 395 reg_val = ioread64(mmio + xlat_reg); in intel_ntb4_mw_set_trans() 405 reg_val = ioread64(mmio + limit_reg); in intel_ntb4_mw_set_trans()
|
H A D | ntb_hw_gen1.c | 615 u.v64 = ioread64(mmio + bar2_off(ndev->xlat_reg->bar2_xlat, 2)); in ndev_ntb_debugfs_read() 628 u.v64 = ioread64(mmio + bar2_off(ndev->xlat_reg->bar2_xlat, 4)); in ndev_ntb_debugfs_read() 633 u.v64 = ioread64(mmio + bar2_off(ndev->xlat_reg->bar2_limit, 2)); in ndev_ntb_debugfs_read() 645 u.v64 = ioread64(mmio + bar2_off(ndev->xlat_reg->bar2_limit, 4)); in ndev_ntb_debugfs_read() 655 u.v64 = ioread64(mmio + XEON_PBAR23XLAT_OFFSET); in ndev_ntb_debugfs_read() 669 u.v64 = ioread64(mmio + XEON_PBAR45XLAT_OFFSET); in ndev_ntb_debugfs_read() 675 u.v64 = ioread64(mmio + XEON_PBAR23LMT_OFFSET); in ndev_ntb_debugfs_read() 689 u.v64 = ioread64(mmio + XEON_PBAR45LMT_OFFSET); in ndev_ntb_debugfs_read() 698 u.v64 = ioread64(mmio + XEON_SBAR0BASE_OFFSET); in ndev_ntb_debugfs_read() 702 u.v64 = ioread64(mmi in ndev_ntb_debugfs_read() [all...] |
H A D | ntb_hw_gen3.h | 96 return ioread64(mmio); in gen3_db_ioread()
|
/linux/include/linux/ |
H A D | io-64-nonatomic-lo-hi.h | 102 #ifndef ioread64 105 #define ioread64 __ioread64_lo_hi macro 107 #define ioread64 ioread64_lo_hi macro
|
H A D | io-64-nonatomic-hi-lo.h | 102 #ifndef ioread64 105 #define ioread64 __ioread64_hi_lo macro 107 #define ioread64 ioread64_hi_lo macro
|
H A D | vfio_pci_core.h | 160 #ifdef ioread64
|
/linux/arch/alpha/include/asm/ |
H A D | io.h | 149 REMAP1(u64, ioread64, const) in REMAP1() 278 extern u64 ioread64(const void __iomem *); 400 extern inline u64 ioread64(const void __iomem *addr) in ioread64() function 404 ret = IO_CONCAT(__IO_PREFIX,ioread64)(addr); in ioread64() 433 #define ioread64 ioread64 macro 543 #define ioread64be(p) swab64(ioread64(p)) 639 #define ioread64 ioread64 macro
|
H A D | io_trivial.h | 48 IO_CONCAT(__IO_PREFIX,ioread64)(const void __iomem *a) in IO_CONCAT() argument
|
/linux/drivers/dma/idxd/ |
H A D | perfmon.c | 100 hwc->config_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event() 101 hwc->event_base = ioread64(CNTRCFG_REG(idxd, idx)); in perfmon_assign_hw_event() 189 event->hw.event_base = ioread64(PERFMON_TABLE_OFFSET(idxd)); in perfmon_pmu_event_init() 207 return ioread64(CNTRDATA_REG(idxd, cntr)); in perfmon_pmu_read_counter() 329 cntrdata = ioread64(CNTRDATA_REG(idxd, cntr)); in perfmon_pmu_event_start() 361 cntr_cfg = ioread64(CNTRCFG_REG(idxd, cntr)); in perfmon_pmu_event_stop() 508 perfcap.bits = ioread64(PERFCAP_REG(idxd)); in perfmon_pmu_init()
|
H A D | init.c | 500 offsets.bits[0] = ioread64(idxd->reg_base + IDXD_TABLE_OFFSET); in idxd_read_table_offsets() 501 offsets.bits[1] = ioread64(idxd->reg_base + IDXD_TABLE_OFFSET + sizeof(u64)); in idxd_read_table_offsets() 531 idxd->hw.gen_cap.bits = ioread64(idxd->reg_base + IDXD_GENCAP_OFFSET); in idxd_read_caps() 552 ioread64(idxd->reg_base + IDXD_GRPCAP_OFFSET); in idxd_read_caps() 562 ioread64(idxd->reg_base + IDXD_ENGCAP_OFFSET); in idxd_read_caps() 568 idxd->hw.wq_cap.bits = ioread64(idxd->reg_base + IDXD_WQCAP_OFFSET); in idxd_read_caps() 579 idxd->hw.opcap.bits[i] = ioread64(idxd->reg_base + in idxd_read_caps() 587 idxd->hw.iaa_cap.bits = ioread64(idxd->reg_base + IDXD_IAACAP_OFFSET); in idxd_read_caps()
|
H A D | debugfs.c | 71 evl_status.bits = ioread64(idxd->reg_base + IDXD_EVLSTATUS_OFFSET); in debugfs_evl_show()
|
H A D | device.c | 859 ioread64(idxd->reg_base + grpcfg_offset)); in idxd_group_config_write() 866 grpcfg_offset, ioread64(idxd->reg_base + grpcfg_offset)); in idxd_group_config_write() 873 ioread64(idxd->reg_base + grpcfg_offset)); in idxd_group_config_write() 1178 group->grpcfg.wqs[i] = ioread64(idxd->reg_base + grpcfg_offset); in idxd_group_load_config() 1202 group->grpcfg.engines = ioread64(idxd->reg_base + grpcfg_offset); in idxd_group_load_config() 1219 group->grpcfg.flags.bits = ioread64(idxd->reg_base + grpcfg_offset); in idxd_group_load_config()
|
H A D | irq.c | 370 evl_status.bits = ioread64(idxd->reg_base + IDXD_EVLSTATUS_OFFSET); in process_evl_entries() 459 idxd->sw_err.bits[i] = ioread64(idxd->reg_base + in idxd_misc_thread()
|
H A D | cdev.c | 341 status.bits = ioread64(idxd->reg_base + IDXD_EVLSTATUS_OFFSET); in idxd_cdev_evl_drain_pasid()
|
/linux/arch/alpha/kernel/ |
H A D | io.c | 44 u64 ioread64(const void __iomem *addr) in ioread64() function 48 ret = IO_CONCAT(__IO_PREFIX,ioread64)(addr); in ioread64() 80 EXPORT_SYMBOL(ioread64);
|
/linux/include/asm-generic/ |
H A D | io.h | 877 #ifndef ioread64 878 #define ioread64 ioread64 macro 879 static inline u64 ioread64(const volatile void __iomem *addr) in ioread64() function
|
/linux/drivers/ntb/hw/mscc/ |
H A D | ntb_hw_switchtec.c | 204 size = ioread64(&sndev->peer_shared->mw_sizes[widx]); in switchtec_ntb_mw_get_align() 498 u64 peer = ioread64(&sndev->peer_shared->magic); in switchtec_ntb_link_status_update() 630 ret = ioread64(&sndev->mmio_self_dbmsg->idb) >> sndev->db_shift; in switchtec_ntb_db_read() 862 part_map = ioread64(&sndev->mmio_ntb->ep_map); in switchtec_ntb_init_sndev() 1103 bar_addr = ioread64(&mmio_pff->pci_bar64[i]); in crosslink_enum_partition() 1390 u64 msg = ioread64(&sndev->mmio_self_dbmsg->imsg[i]); in switchtec_ntb_message_isr()
|
/linux/arch/parisc/lib/ |
H A D | iomap.c | 348 u64 ioread64(const void __iomem *addr) in ioread64() function 532 EXPORT_SYMBOL(ioread64);
|
/linux/drivers/dma/ |
H A D | fsldma.h | 242 #define fsl_ioread64(p) ioread64(p)
|
/linux/drivers/gpu/drm/imagination/ |
H A D | pvr_device.h | 561 return ioread64(pvr_dev->regs + reg); in pvr_cr_read64()
|
/linux/drivers/net/ethernet/freescale/enetc/ |
H A D | enetc_hw.h | 496 #ifdef ioread64 499 return ioread64(reg); in _enetc_rd_reg64()
|
/linux/drivers/vfio/pci/ |
H A D | vfio_pci_rdwr.c | 24 #define vfio_ioread64 ioread64
|
/linux/drivers/crypto/caam/ |
H A D | regs.h | 167 return ioread64(reg); in rd_reg64()
|
/linux/drivers/gpu/drm/xe/ |
H A D | xe_ggtt.c | 994 return ioread64(ggtt->gsm + (offset / XE_PAGE_SIZE)); in xe_ggtt_read_pte()
|