Lines Matching refs:u

52 		req = readl(&hba->u.itl.iop->inbound_queue);
59 writel(req, &hba->u.itl.iop->outbound_queue);
60 readl(&hba->u.itl.iop->outbound_intstatus);
90 while ((req = readl(&hba->u.itl.iop->outbound_queue)) !=
99 ((char __iomem *)hba->u.itl.iop + req);
115 struct hpt_iopmu_itl __iomem *iop = hba->u.itl.iop;
116 void __iomem *plx = hba->u.itl.plx;
163 u32 inbound_head = readl(&hba->u.mv.mu->inbound_head);
169 memcpy_toio(&hba->u.mv.mu->inbound_q[inbound_head], &p, 8);
170 writel(head, &hba->u.mv.mu->inbound_head);
172 &hba->u.mv.regs->inbound_doorbell);
208 status = readl(&hba->u.mv.regs->outbound_doorbell);
209 writel(~status, &hba->u.mv.regs->outbound_doorbell);
213 msg = readl(&hba->u.mv.mu->outbound_msg);
222 while ((tag = mv_outbound_read(hba->u.mv.mu)))
259 writel(0, &(hba->u.mvfrey.mu->pcie_f0_int_enable));
261 status = readl(&(hba->u.mvfrey.mu->f0_doorbell));
263 writel(status, &(hba->u.mvfrey.mu->f0_doorbell));
265 u32 msg = readl(&(hba->u.mvfrey.mu->cpu_to_f0_msg_a));
272 status = readl(&(hba->u.mvfrey.mu->isr_cause));
274 writel(status, &(hba->u.mvfrey.mu->isr_cause));
276 cptr = *hba->u.mvfrey.outlist_cptr & 0xff;
277 cur_rptr = hba->u.mvfrey.outlist_rptr;
280 if (cur_rptr == hba->u.mvfrey.list_count)
283 _tag = hba->u.mvfrey.outlist[cur_rptr].val;
288 hba->u.mvfrey.outlist_rptr = cur_rptr;
289 } while (cptr != (*hba->u.mvfrey.outlist_cptr & 0xff));
293 writel(0x1010, &(hba->u.mvfrey.mu->pcie_f0_int_enable));
306 writel((unsigned long)req - (unsigned long)hba->u.itl.iop,
307 &hba->u.itl.iop->inbound_queue);
308 readl(&hba->u.itl.iop->outbound_intstatus);
323 struct hpt_iop_request_header *reqhdr = hba->u.mv.internal_req;
328 mv_inbound_write(hba->u.mv.internal_req_phy |
344 hba->u.mvfrey.internal_req.req_virt;
349 hba->ops->post_req(hba, &(hba->u.mvfrey.internal_req));
362 writel(msg, &hba->u.itl.iop->inbound_msgaddr0);
363 readl(&hba->u.itl.iop->outbound_intstatus);
368 writel(msg, &hba->u.mv.mu->inbound_msg);
369 writel(MVIOP_MU_INBOUND_INT_MSG, &hba->u.mv.regs->inbound_doorbell);
370 readl(&hba->u.mv.regs->inbound_doorbell);
375 writel(msg, &(hba->u.mvfrey.mu->f0_to_cpu_msg_a));
376 readl(&(hba->u.mvfrey.mu->f0_to_cpu_msg_a));
406 req32 = readl(&hba->u.itl.iop->inbound_queue);
411 ((unsigned long)hba->u.itl.iop + req32);
424 writel(req32, &hba->u.itl.iop->outbound_queue);
431 struct hpt_iop_request_get_config *req = hba->u.mv.internal_req;
453 struct hpt_iop_request_get_config *info = hba->u.mvfrey.config;
478 req32 = readl(&hba->u.itl.iop->inbound_queue);
483 ((unsigned long)hba->u.itl.iop + req32);
500 writel(req32, &hba->u.itl.iop->outbound_queue);
507 struct hpt_iop_request_set_config *req = hba->u.mv.internal_req;
530 hba->u.mvfrey.internal_req.req_virt;
552 &hba->u.itl.iop->outbound_intmask);
558 &hba->u.mv.regs->outbound_intmask);
563 writel(CPU_TO_F0_DRBL_MSG_BIT, &(hba->u.mvfrey.mu->f0_doorbell_enable));
564 writel(0x1, &(hba->u.mvfrey.mu->isr_enable));
565 writel(0x1010, &(hba->u.mvfrey.mu->pcie_f0_int_enable));
614 hba->u.itl.iop = hptiop_map_pci_bar(hba, 0);
615 if (hba->u.itl.iop == NULL)
618 hba->u.itl.plx = hba->u.itl.iop;
619 hba->u.itl.iop = hptiop_map_pci_bar(hba, 2);
620 if (hba->u.itl.iop == NULL) {
621 iounmap(hba->u.itl.plx);
630 if (hba->u.itl.plx)
631 iounmap(hba->u.itl.plx);
632 iounmap(hba->u.itl.iop);
637 hba->u.mv.regs = hptiop_map_pci_bar(hba, 0);
638 if (hba->u.mv.regs == NULL)
641 hba->u.mv.mu = hptiop_map_pci_bar(hba, 2);
642 if (hba->u.mv.mu == NULL) {
643 iounmap(hba->u.mv.regs);
652 hba->u.mvfrey.config = hptiop_map_pci_bar(hba, 0);
653 if (hba->u.mvfrey.config == NULL)
656 hba->u.mvfrey.mu = hptiop_map_pci_bar(hba, 2);
657 if (hba->u.mvfrey.mu == NULL) {
658 iounmap(hba->u.mvfrey.config);
667 iounmap(hba->u.mv.regs);
668 iounmap(hba->u.mv.mu);
673 iounmap(hba->u.mvfrey.config);
674 iounmap(hba->u.mvfrey.mu);
801 ((unsigned long)hba->u.itl.iop + tag);
830 writel(tag, &hba->u.itl.iop->outbound_queue);
894 &hba->u.itl.iop->inbound_queue);
897 &hba->u.itl.iop->inbound_queue);
938 hba->u.mvfrey.inlist_wptr++;
939 index = hba->u.mvfrey.inlist_wptr & 0x3fff;
941 if (index == hba->u.mvfrey.list_count) {
943 hba->u.mvfrey.inlist_wptr &= ~0x3fff;
944 hba->u.mvfrey.inlist_wptr ^= CL_POINTER_TOGGLE;
947 hba->u.mvfrey.inlist[index].addr =
949 hba->u.mvfrey.inlist[index].intrfc_len = (reqhdr->size + 3) / 4;
950 writel(hba->u.mvfrey.inlist_wptr,
951 &(hba->u.mvfrey.mu->inbound_write_ptr));
952 readl(&(hba->u.mvfrey.mu->inbound_write_ptr));
967 u32 list_count = hba->u.mvfrey.list_count;
975 writel(cpu_to_le32(hba->u.mvfrey.inlist_phy & 0xffffffff),
976 &(hba->u.mvfrey.mu->inbound_base));
977 writel(cpu_to_le32((hba->u.mvfrey.inlist_phy >> 16) >> 16),
978 &(hba->u.mvfrey.mu->inbound_base_high));
980 writel(cpu_to_le32(hba->u.mvfrey.outlist_phy & 0xffffffff),
981 &(hba->u.mvfrey.mu->outbound_base));
982 writel(cpu_to_le32((hba->u.mvfrey.outlist_phy >> 16) >> 16),
983 &(hba->u.mvfrey.mu->outbound_base_high));
985 writel(cpu_to_le32(hba->u.mvfrey.outlist_cptr_phy & 0xffffffff),
986 &(hba->u.mvfrey.mu->outbound_shadow_base));
987 writel(cpu_to_le32((hba->u.mvfrey.outlist_cptr_phy >> 16) >> 16),
988 &(hba->u.mvfrey.mu->outbound_shadow_base_high));
990 hba->u.mvfrey.inlist_wptr = (list_count - 1) | CL_POINTER_TOGGLE;
991 *hba->u.mvfrey.outlist_cptr = (list_count - 1) | CL_POINTER_TOGGLE;
992 hba->u.mvfrey.outlist_rptr = list_count - 1;
1184 hba->u.mv.internal_req = dma_alloc_coherent(&hba->pcidev->dev,
1185 0x800, &hba->u.mv.internal_req_phy, GFP_KERNEL);
1186 if (hba->u.mv.internal_req)
1194 u32 list_count = readl(&hba->u.mvfrey.mu->inbound_conf_ctl);
1207 hba->u.mvfrey.list_count = list_count;
1208 hba->u.mvfrey.internal_mem_size = 0x800 +
1214 hba->u.mvfrey.internal_mem_size, &phy, GFP_KERNEL);
1218 hba->u.mvfrey.internal_req.req_virt = p;
1219 hba->u.mvfrey.internal_req.req_shifted_phy = phy >> 5;
1220 hba->u.mvfrey.internal_req.scp = NULL;
1221 hba->u.mvfrey.internal_req.next = NULL;
1226 hba->u.mvfrey.inlist = (struct mvfrey_inlist_entry *)p;
1227 hba->u.mvfrey.inlist_phy = phy;
1232 hba->u.mvfrey.outlist = (struct mvfrey_outlist_entry *)p;
1233 hba->u.mvfrey.outlist_phy = phy;
1238 hba->u.mvfrey.outlist_cptr = (__le32 *)p;
1239 hba->u.mvfrey.outlist_cptr_phy = phy;
1251 if (hba->u.mv.internal_req) {
1253 hba->u.mv.internal_req, hba->u.mv.internal_req_phy);
1261 if (hba->u.mvfrey.internal_req.req_virt) {
1263 hba->u.mvfrey.internal_mem_size,
1264 hba->u.mvfrey.internal_req.req_virt,
1266 hba->u.mvfrey.internal_req.req_shifted_phy << 5);
1522 int_mask = readl(&hba->u.itl.iop->outbound_intmask);
1525 &hba->u.itl.iop->outbound_intmask);
1526 readl(&hba->u.itl.iop->outbound_intmask);
1531 writel(0, &hba->u.mv.regs->outbound_intmask);
1532 readl(&hba->u.mv.regs->outbound_intmask);
1537 writel(0, &(hba->u.mvfrey.mu->f0_doorbell_enable));
1538 readl(&(hba->u.mvfrey.mu->f0_doorbell_enable));
1539 writel(0, &(hba->u.mvfrey.mu->isr_enable));
1540 readl(&(hba->u.mvfrey.mu->isr_enable));
1541 writel(0, &(hba->u.mvfrey.mu->pcie_f0_int_enable));
1542 readl(&(hba->u.mvfrey.mu->pcie_f0_int_enable));