Lines Matching +full:0 +full:xa400
19 {0x7e40, 0x7e44, 0x020, 28}, /* t6_tp_pio_regs_20_to_3b */
20 {0x7e40, 0x7e44, 0x040, 10}, /* t6_tp_pio_regs_40_to_49 */
21 {0x7e40, 0x7e44, 0x050, 10}, /* t6_tp_pio_regs_50_to_59 */
22 {0x7e40, 0x7e44, 0x060, 14}, /* t6_tp_pio_regs_60_to_6d */
23 {0x7e40, 0x7e44, 0x06F, 1}, /* t6_tp_pio_regs_6f */
24 {0x7e40, 0x7e44, 0x070, 6}, /* t6_tp_pio_regs_70_to_75 */
25 {0x7e40, 0x7e44, 0x130, 18}, /* t6_tp_pio_regs_130_to_141 */
26 {0x7e40, 0x7e44, 0x145, 19}, /* t6_tp_pio_regs_145_to_157 */
27 {0x7e40, 0x7e44, 0x160, 1}, /* t6_tp_pio_regs_160 */
28 {0x7e40, 0x7e44, 0x230, 25}, /* t6_tp_pio_regs_230_to_248 */
29 {0x7e40, 0x7e44, 0x24a, 3}, /* t6_tp_pio_regs_24c */
30 {0x7e40, 0x7e44, 0x8C0, 1} /* t6_tp_pio_regs_8c0 */
34 {0x7e40, 0x7e44, 0x020, 28}, /* t5_tp_pio_regs_20_to_3b */
35 {0x7e40, 0x7e44, 0x040, 19}, /* t5_tp_pio_regs_40_to_52 */
36 {0x7e40, 0x7e44, 0x054, 2}, /* t5_tp_pio_regs_54_to_55 */
37 {0x7e40, 0x7e44, 0x060, 13}, /* t5_tp_pio_regs_60_to_6c */
38 {0x7e40, 0x7e44, 0x06F, 1}, /* t5_tp_pio_regs_6f */
39 {0x7e40, 0x7e44, 0x120, 4}, /* t5_tp_pio_regs_120_to_123 */
40 {0x7e40, 0x7e44, 0x12b, 2}, /* t5_tp_pio_regs_12b_to_12c */
41 {0x7e40, 0x7e44, 0x12f, 21}, /* t5_tp_pio_regs_12f_to_143 */
42 {0x7e40, 0x7e44, 0x145, 19}, /* t5_tp_pio_regs_145_to_157 */
43 {0x7e40, 0x7e44, 0x230, 25}, /* t5_tp_pio_regs_230_to_248 */
44 {0x7e40, 0x7e44, 0x8C0, 1} /* t5_tp_pio_regs_8c0 */
48 {0x7e18, 0x7e1c, 0x0, 12}
52 {0x7e18, 0x7e1c, 0x0, 12}
56 {0x7e50, 0x7e54, 0x0, 13},
57 {0x7e50, 0x7e54, 0x10, 6},
58 {0x7e50, 0x7e54, 0x18, 21},
59 {0x7e50, 0x7e54, 0x30, 32},
60 {0x7e50, 0x7e54, 0x50, 22},
61 {0x7e50, 0x7e54, 0x68, 12}
65 {0x7e50, 0x7e54, 0x0, 13},
66 {0x7e50, 0x7e54, 0x10, 6},
67 {0x7e50, 0x7e54, 0x18, 8},
68 {0x7e50, 0x7e54, 0x20, 13},
69 {0x7e50, 0x7e54, 0x30, 16},
70 {0x7e50, 0x7e54, 0x40, 16},
71 {0x7e50, 0x7e54, 0x50, 16},
72 {0x7e50, 0x7e54, 0x60, 6},
73 {0x7e50, 0x7e54, 0x68, 4}
77 {0x10cc, 0x10d0, 0x0, 16},
78 {0x10cc, 0x10d4, 0x0, 16},
82 /* 1 addr reg SGE_QBASE_INDEX and 4 data reg SGE_QBASE_MAP[0-3] */
83 0x1250, 0x1240, 0x1244, 0x1248, 0x124c,
87 {0x5a04, 0x5a0c, 0x00, 0x20}, /* t5_pcie_pdbg_regs_00_to_20 */
88 {0x5a04, 0x5a0c, 0x21, 0x20}, /* t5_pcie_pdbg_regs_21_to_40 */
89 {0x5a04, 0x5a0c, 0x41, 0x10}, /* t5_pcie_pdbg_regs_41_to_50 */
93 {0x5a10, 0x5a18, 0x00, 0x20}, /* t5_pcie_cdbg_regs_00_to_20 */
94 {0x5a10, 0x5a18, 0x21, 0x18}, /* t5_pcie_cdbg_regs_21_to_37 */
98 {0x8FD0, 0x8FD4, 0x10000, 0x20}, /* t5_pm_rx_regs_10000_to_10020 */
99 {0x8FD0, 0x8FD4, 0x10021, 0x0D}, /* t5_pm_rx_regs_10021_to_1002c */
103 {0x8FF0, 0x8FF4, 0x10000, 0x20}, /* t5_pm_tx_regs_10000_to_10020 */
104 {0x8FF0, 0x8FF4, 0x10021, 0x1D}, /* t5_pm_tx_regs_10021_to_1003c */
108 {0x0, 0x34},
109 {0x3c, 0x40},
110 {0x50, 0x64},
111 {0x70, 0x80},
112 {0x94, 0xa0},
113 {0xb0, 0xb8},
114 {0xd0, 0xd4},
115 {0x100, 0x128},
116 {0x140, 0x148},
117 {0x150, 0x164},
118 {0x170, 0x178},
119 {0x180, 0x194},
120 {0x1a0, 0x1b8},
121 {0x1c0, 0x208},
125 {0x78f8, 0x78fc, 0xa000, 23}, /* t6_ma_regs_a000_to_a016 */
126 {0x78f8, 0x78fc, 0xa400, 30}, /* t6_ma_regs_a400_to_a41e */
127 {0x78f8, 0x78fc, 0xa800, 20} /* t6_ma_regs_a800_to_a813 */
131 {0x78f8, 0x78fc, 0xe400, 17}, /* t6_ma_regs_e400_to_e600 */
132 {0x78f8, 0x78fc, 0xe640, 13} /* t6_ma_regs_e640_to_e7c0 */
136 {0x7b50, 0x7b54, 0x2000, 0x20, 0}, /* up_cim_2000_to_207c */
137 {0x7b50, 0x7b54, 0x2080, 0x1d, 0}, /* up_cim_2080_to_20fc */
138 {0x7b50, 0x7b54, 0x00, 0x20, 0}, /* up_cim_00_to_7c */
139 {0x7b50, 0x7b54, 0x80, 0x20, 0}, /* up_cim_80_to_fc */
140 {0x7b50, 0x7b54, 0x100, 0x11, 0}, /* up_cim_100_to_14c */
141 {0x7b50, 0x7b54, 0x200, 0x10, 0}, /* up_cim_200_to_23c */
142 {0x7b50, 0x7b54, 0x240, 0x2, 0}, /* up_cim_240_to_244 */
143 {0x7b50, 0x7b54, 0x250, 0x2, 0}, /* up_cim_250_to_254 */
144 {0x7b50, 0x7b54, 0x260, 0x2, 0}, /* up_cim_260_to_264 */
145 {0x7b50, 0x7b54, 0x270, 0x2, 0}, /* up_cim_270_to_274 */
146 {0x7b50, 0x7b54, 0x280, 0x20, 0}, /* up_cim_280_to_2fc */
147 {0x7b50, 0x7b54, 0x300, 0x20, 0}, /* up_cim_300_to_37c */
148 {0x7b50, 0x7b54, 0x380, 0x14, 0}, /* up_cim_380_to_3cc */
149 {0x7b50, 0x7b54, 0x4900, 0x4, 0x4}, /* up_cim_4900_to_4c60 */
150 {0x7b50, 0x7b54, 0x4904, 0x4, 0x4}, /* up_cim_4904_to_4c64 */
151 {0x7b50, 0x7b54, 0x4908, 0x4, 0x4}, /* up_cim_4908_to_4c68 */
152 {0x7b50, 0x7b54, 0x4910, 0x4, 0x4}, /* up_cim_4910_to_4c70 */
153 {0x7b50, 0x7b54, 0x4914, 0x4, 0x4}, /* up_cim_4914_to_4c74 */
154 {0x7b50, 0x7b54, 0x4920, 0x10, 0x10}, /* up_cim_4920_to_4a10 */
155 {0x7b50, 0x7b54, 0x4924, 0x10, 0x10}, /* up_cim_4924_to_4a14 */
156 {0x7b50, 0x7b54, 0x4928, 0x10, 0x10}, /* up_cim_4928_to_4a18 */
157 {0x7b50, 0x7b54, 0x492c, 0x10, 0x10}, /* up_cim_492c_to_4a1c */
161 {0x7b50, 0x7b54, 0x2000, 0x20, 0}, /* up_cim_2000_to_207c */
162 {0x7b50, 0x7b54, 0x2080, 0x19, 0}, /* up_cim_2080_to_20ec */
163 {0x7b50, 0x7b54, 0x00, 0x20, 0}, /* up_cim_00_to_7c */
164 {0x7b50, 0x7b54, 0x80, 0x20, 0}, /* up_cim_80_to_fc */
165 {0x7b50, 0x7b54, 0x100, 0x11, 0}, /* up_cim_100_to_14c */
166 {0x7b50, 0x7b54, 0x200, 0x10, 0}, /* up_cim_200_to_23c */
167 {0x7b50, 0x7b54, 0x240, 0x2, 0}, /* up_cim_240_to_244 */
168 {0x7b50, 0x7b54, 0x250, 0x2, 0}, /* up_cim_250_to_254 */
169 {0x7b50, 0x7b54, 0x260, 0x2, 0}, /* up_cim_260_to_264 */
170 {0x7b50, 0x7b54, 0x270, 0x2, 0}, /* up_cim_270_to_274 */
171 {0x7b50, 0x7b54, 0x280, 0x20, 0}, /* up_cim_280_to_2fc */
172 {0x7b50, 0x7b54, 0x300, 0x20, 0}, /* up_cim_300_to_37c */
173 {0x7b50, 0x7b54, 0x380, 0x14, 0}, /* up_cim_380_to_3cc */
177 {0x51320, 0x51324, 0xa000, 32} /* t6_hma_regs_a000_to_a01f */
182 struct cudbg_tcam tcam_region = { 0 }; in cudbg_get_entity_length()
183 u32 value, n = 0, len = 0; in cudbg_get_entity_length()
227 len = cudbg_cim_obq_size(adap, 0); in cudbg_get_entity_length()
381 n = 0; in cudbg_get_entity_length()
428 struct cudbg_buffer temp_in_buff = { 0 }; in cudbg_do_compression()
438 bytes_read = 0; in cudbg_do_compression()
439 while (bytes_left > 0) { in cudbg_do_compression()
453 return 0; in cudbg_do_compression()
460 int rc = 0; in cudbg_write_and_release_buff()
480 return 0; in is_fw_attached()
491 u8 zero_buf[4] = {0}; in cudbg_align_debug_buffer()
520 if (vaddr < 0) in cudbg_read_vpd_reg()
524 if (rc < 0) in cudbg_read_vpd_reg()
527 return 0; in cudbg_read_vpd_reg()
543 memset(meminfo_buff->avail, 0, in cudbg_fill_meminfo()
546 memset(meminfo_buff->mem, 0, in cudbg_fill_meminfo()
550 for (i = 0; i < ARRAY_SIZE(meminfo_buff->mem); i++) { in cudbg_fill_meminfo()
551 meminfo_buff->mem[i].limit = 0; in cudbg_fill_meminfo()
556 i = 0; in cudbg_fill_meminfo()
565 meminfo_buff->avail[i].idx = 0; in cudbg_fill_meminfo()
664 md->limit = 0; in cudbg_fill_meminfo()
666 md->base = 0; in cudbg_fill_meminfo()
674 } while (0) in cudbg_fill_meminfo()
685 md->base = 0; in cudbg_fill_meminfo()
690 u32 size = 0; in cudbg_fill_meminfo()
709 md->limit = 0; in cudbg_fill_meminfo()
712 md->limit = 0; in cudbg_fill_meminfo()
723 for (n = 0; n < i - 1; n++) in cudbg_fill_meminfo()
748 for (i = 0, meminfo_buff->free_rx_cnt = 0; i < 2; i++) in cudbg_fill_meminfo()
753 meminfo_buff->rx_pages_data[0] = PMRXMAXPAGE_G(lo); in cudbg_fill_meminfo()
760 for (i = 0, meminfo_buff->free_tx_cnt = 0; i < 4; i++) in cudbg_fill_meminfo()
765 meminfo_buff->tx_pages_data[0] = PMTXMAXPAGE_G(lo); in cudbg_fill_meminfo()
776 for (i = 0; i < 4; i++) { in cudbg_fill_meminfo()
793 for (i = 0; i < padap->params.arch.nchan; i++) { in cudbg_fill_meminfo()
810 return 0; in cudbg_fill_meminfo()
818 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_reg_dump()
819 u32 buf_size = 0; in cudbg_collect_reg_dump()
820 int rc = 0; in cudbg_collect_reg_dump()
839 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_fw_devlog()
841 int rc = 0; in cudbg_collect_fw_devlog()
844 if (rc < 0) { in cudbg_collect_fw_devlog()
855 if (dparams->start != 0) { in cudbg_collect_fw_devlog()
877 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_la()
879 u32 cfg = 0; in cudbg_collect_cim_la()
905 if (rc < 0) { in cudbg_collect_cim_la()
918 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_ma_la()
938 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_qcfg()
976 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_cim_ibq()
977 int no_of_read_words, rc = 0; in cudbg_read_cim_ibq()
989 /* no_of_read_words is less than or equal to 0 means error */ in cudbg_read_cim_ibq()
990 if (no_of_read_words <= 0) { in cudbg_read_cim_ibq()
1006 return cudbg_read_cim_ibq(pdbg_init, dbg_buff, cudbg_err, 0); in cudbg_collect_cim_ibq_tp0()
1060 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_cim_obq()
1061 int no_of_read_words, rc = 0; in cudbg_read_cim_obq()
1073 /* no_of_read_words is less than or equal to 0 means error */ in cudbg_read_cim_obq()
1074 if (no_of_read_words <= 0) { in cudbg_read_cim_obq()
1090 return cudbg_read_cim_obq(pdbg_init, dbg_buff, cudbg_err, 0); in cudbg_collect_cim_obq_ulp0()
1169 for (i = 0; i < mem_info->avail_c; i++) { in cudbg_meminfo_get_mem_index()
1172 return 0; in cudbg_meminfo_get_mem_index()
1185 u8 mc, found = 0; in cudbg_get_mem_region()
1186 u32 idx = 0; in cudbg_get_mem_region()
1194 if (i < 0) in cudbg_get_mem_region()
1198 for (i = 0; i < meminfo->mem_c; i++) { in cudbg_get_mem_region()
1205 meminfo->mem[i + 1].base - 1 : ~0; in cudbg_get_mem_region()
1225 return 0; in cudbg_get_mem_region()
1228 /* Fetch and update the start and end of the requested memory region w.r.t 0
1243 *out_base = 0; in cudbg_get_mem_relative()
1252 return 0; in cudbg_get_mem_relative()
1260 struct cudbg_mem_desc mem_desc = { 0 }; in cudbg_get_payload_range()
1272 return 0; in cudbg_get_payload_range()
1295 if (addr & 0x3 || (uintptr_t)hbuf & 0x3) in cudbg_memory_read()
1301 resid = len & 0x7; in cudbg_memory_read()
1310 win_pf = is_t4(adap->params.chip) ? 0 : PFNUM_V(adap->pf); in cudbg_memory_read()
1321 while (len > 0) { in cudbg_memory_read()
1332 offset = 0; in cudbg_memory_read()
1350 offset = 0; in cudbg_memory_read()
1360 return 0; in cudbg_memory_read()
1372 unsigned long bytes, bytes_left, bytes_read = 0; in cudbg_read_fw_mem()
1374 struct cudbg_buffer temp_buff = { 0 }; in cudbg_read_fw_mem()
1376 u32 yield_count = 0; in cudbg_read_fw_mem()
1377 int rc = 0; in cudbg_read_fw_mem()
1381 memset(payload, 0, sizeof(payload)); in cudbg_read_fw_mem()
1382 for (i = 0; i < ARRAY_SIZE(region_name); i++) { in cudbg_read_fw_mem()
1398 while (bytes_left > 0) { in cudbg_read_fw_mem()
1414 for (i = 0; i < ARRAY_SIZE(payload); i++) in cudbg_read_fw_mem()
1467 memset(&mem_info, 0, sizeof(struct cudbg_meminfo)); in cudbg_mem_region_size()
1485 return 0; in cudbg_mem_region_size()
1493 unsigned long size = 0; in cudbg_collect_mem_region()
1549 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_rss()
1572 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_rss_vf_config()
1584 for (vf = 0; vf < vf_count; vf++) in cudbg_collect_rss_vf_config()
1595 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_path_mtu()
1612 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pm_stats()
1632 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_hw_sched()
1634 int i, rc = 0; in cudbg_collect_hw_sched()
1649 for (i = 0; i < NTX_SCHED; ++i) in cudbg_collect_hw_sched()
1660 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tp_indirect()
1662 int i, rc, n = 0; in cudbg_collect_tp_indirect()
1688 for (i = 0; i < n; i++) { in cudbg_collect_tp_indirect()
1693 tp_pio->ireg_addr = t5_tp_pio_array[i][0]; in cudbg_collect_tp_indirect()
1698 tp_pio->ireg_addr = t6_tp_pio_array[i][0]; in cudbg_collect_tp_indirect()
1714 for (i = 0; i < n; i++) { in cudbg_collect_tp_indirect()
1719 tp_pio->ireg_addr = t5_tp_tm_pio_array[i][0]; in cudbg_collect_tp_indirect()
1724 tp_pio->ireg_addr = t6_tp_tm_pio_array[i][0]; in cudbg_collect_tp_indirect()
1742 for (i = 0; i < n ; i++) { in cudbg_collect_tp_indirect()
1747 tp_pio->ireg_addr = t5_tp_mib_index_array[i][0]; in cudbg_collect_tp_indirect()
1754 tp_pio->ireg_addr = t6_tp_mib_index_array[i][0]; in cudbg_collect_tp_indirect()
1779 * Entries 0->7 are PF0->7, Entries 8->263 are VFID0->256. in cudbg_read_sge_qbase_indirect_reg()
1785 for (i = 0; i < SGE_QBASE_DATA_REG_NUM; i++, buff++) in cudbg_read_sge_qbase_indirect_reg()
1794 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_sge_indirect()
1806 for (i = 0; i < 2; i++) { in cudbg_collect_sge_indirect()
1810 sge_pio->ireg_addr = t5_sge_dbg_index_array[i][0]; in cudbg_collect_sge_indirect()
1826 * SGE_QBASE_MAP[0-3] in cudbg_collect_sge_indirect()
1828 sge_qbase->reg_addr = t6_sge_qbase_index_array[0]; in cudbg_collect_sge_indirect()
1829 for (i = 0; i < SGE_QBASE_DATA_REG_NUM; i++) in cudbg_collect_sge_indirect()
1833 for (i = 0; i <= PCIE_FW_MASTER_M; i++) in cudbg_collect_sge_indirect()
1837 for (i = 0; i < padap->params.arch.vfcount; i++) in cudbg_collect_sge_indirect()
1852 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ulprx_la()
1872 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tp_la()
1892 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_meminfo()
1927 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cim_pif_la()
1949 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_clk_info()
1997 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pcie_indirect()
2010 for (i = 0; i < n; i++) { in cudbg_collect_pcie_indirect()
2014 pcie_pio->ireg_addr = t5_pcie_pdbg_array[i][0]; in cudbg_collect_pcie_indirect()
2029 for (i = 0; i < n; i++) { in cudbg_collect_pcie_indirect()
2033 pcie_pio->ireg_addr = t5_pcie_cdbg_array[i][0]; in cudbg_collect_pcie_indirect()
2053 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pm_indirect()
2066 for (i = 0; i < n; i++) { in cudbg_collect_pm_indirect()
2070 pm_pio->ireg_addr = t5_pm_rx_array[i][0]; in cudbg_collect_pm_indirect()
2085 for (i = 0; i < n; i++) { in cudbg_collect_pm_indirect()
2089 pm_pio->ireg_addr = t5_pm_tx_array[i][0]; in cudbg_collect_pm_indirect()
2110 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_tid()
2137 FW_PARAMS_PARAM_Y_V(0) | \ in cudbg_collect_tid()
2138 FW_PARAMS_PARAM_Z_V(0)) in cudbg_collect_tid()
2140 para[0] = FW_PARAM_PFVF_A(ETHOFLD_START); in cudbg_collect_tid()
2142 rc = t4_query_params(padap, padap->mbox, padap->pf, 0, 2, para, val); in cudbg_collect_tid()
2143 if (rc < 0) { in cudbg_collect_tid()
2148 tid->uotid_base = val[0]; in cudbg_collect_tid()
2149 tid->nuotids = val[1] - val[0] + 1; in cudbg_collect_tid()
2158 para[0] = FW_PARAM_PFVF_A(HPFILTER_START); in cudbg_collect_tid()
2160 rc = t4_query_params(padap, padap->mbox, padap->pf, 0, 2, in cudbg_collect_tid()
2162 if (rc < 0) { in cudbg_collect_tid()
2167 tid->hpftid_base = val[0]; in cudbg_collect_tid()
2168 tid->nhpftids = val[1] - val[0] + 1; in cudbg_collect_tid()
2201 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pcie_config()
2212 for (i = 0; i < n; i++) { in cudbg_collect_pcie_config()
2213 for (j = t5_pcie_config_array[i][0]; in cudbg_collect_pcie_config()
2224 int index, bit, bit_pos = 0; in cudbg_sge_ctxt_check_valid()
2258 found = 0; in cudbg_get_ctxt_region_info()
2259 memset(&mem_desc, 0, sizeof(struct cudbg_mem_desc)); in cudbg_get_ctxt_region_info()
2260 for (j = 0; j < ARRAY_SIZE(meminfo.avail); j++) { in cudbg_get_ctxt_region_info()
2298 return 0; in cudbg_get_ctxt_region_info()
2303 struct cudbg_region_info region_info[CTXT_CNM + 1] = { {0} }; in cudbg_dump_context_size()
2304 u8 mem_type[CTXT_INGRESS + 1] = { 0 }; in cudbg_dump_context_size()
2305 u32 i, size = 0; in cudbg_dump_context_size()
2313 for (i = 0; i < CTXT_CNM; i++) { in cudbg_dump_context_size()
2354 for (j = 0; j < max_qid; j++) { in cudbg_get_sge_ctxt_fw()
2378 struct cudbg_region_info region_info[CTXT_CNM + 1] = { {0} }; in cudbg_collect_dump_context()
2381 u8 mem_type[CTXT_INGRESS + 1] = { 0 }; in cudbg_collect_dump_context()
2382 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_dump_context()
2394 if (rc <= 0) in cudbg_collect_dump_context()
2451 for (j = 0; j < max_ctx_qid; j++) { in cudbg_collect_dump_context()
2461 for (k = 0; k < SGE_CTXT_SIZE / sizeof(u64); k++) in cudbg_collect_dump_context()
2529 int rc = 0; in cudbg_collect_tcam_index()
2533 * CtlCmdType - 0: Read, 1: Write in cudbg_collect_tcam_index()
2534 * CtlTcamSel - 0: TCAM0, 1: TCAM1 in cudbg_collect_tcam_index()
2535 * CtlXYBitSel- 0: Y bit, 1: X bit in cudbg_collect_tcam_index()
2539 ctl = CTLREQID_V(1) | CTLCMDTYPE_V(0) | CTLXYBITSEL_V(0); in cudbg_collect_tcam_index()
2541 ctl |= CTLTCAMINDEX_V(idx) | CTLTCAMSEL_V(0); in cudbg_collect_tcam_index()
2552 /* 0 - Outer header, 1 - Inner header in cudbg_collect_tcam_index()
2601 memset(&ldst_cmd, 0, sizeof(ldst_cmd)); in cudbg_collect_tcam_index()
2623 rc = 0; in cudbg_collect_tcam_index()
2628 tcam->rplc[0] = ntohl(mps_rplc.rplc31_0); in cudbg_collect_tcam_index()
2650 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_mps_tcam()
2651 u32 size = 0, i, n, total_size = 0; in cudbg_collect_mps_tcam()
2662 for (i = 0; i < n; i++) { in cudbg_collect_mps_tcam()
2687 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_vpd_data()
2691 struct vpd_params vpd = { 0 }; in cudbg_collect_vpd_data()
2706 if (rc < 0) in cudbg_collect_vpd_data()
2714 if (rc < 0) in cudbg_collect_vpd_data()
2725 vpd_str[CUDBG_VPD_VER_LEN] = '\0'; in cudbg_collect_vpd_data()
2726 rc = kstrtouint(vpd_str, 0, &vpd_vers); in cudbg_collect_vpd_data()
2756 /* Fill REQ_DATA regs with 0's */ in cudbg_read_tid()
2757 for (i = 0; i < NUM_LE_DB_DBGI_REQ_DATA_INSTANCES; i++) in cudbg_read_tid()
2758 t4_write_reg(padap, LE_DB_DBGI_REQ_DATA_A + (i << 2), 0); in cudbg_read_tid()
2786 for (i = 0; i < NUM_LE_DB_DBGI_RSP_DATA_INSTANCES; i++) in cudbg_read_tid()
2791 return 0; in cudbg_read_tid()
2819 int ipv6 = 0; in cudbg_is_ipv6_entry()
2824 return 0; in cudbg_is_ipv6_entry()
2827 ipv6 = tid_data->data[16] & 0x8000; in cudbg_is_ipv6_entry()
2829 ipv6 = tid_data->data[16] & 0x8000; in cudbg_is_ipv6_entry()
2831 ipv6 = tid_data->data[9] == 0x00C00000; in cudbg_is_ipv6_entry()
2833 ipv6 = 0; in cudbg_is_ipv6_entry()
2871 tcam_region->max_tid = (value & 0xFFFFF) + in cudbg_fill_le_tcam_info()
2897 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_le_tcam()
2898 struct cudbg_tcam tcam_region = { 0 }; in cudbg_collect_le_tcam()
2900 u32 bytes = 0; in cudbg_collect_le_tcam()
2916 for (i = 0; i < tcam_region.max_tid; ) { in cudbg_collect_le_tcam()
2952 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_cctrl()
2970 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ma_indirect()
2985 for (i = 0; i < n; i++) { in cudbg_collect_ma_indirect()
2989 ma_fli->ireg_addr = t6_ma_ireg_array[i][0]; in cudbg_collect_ma_indirect()
3000 for (i = 0; i < n; i++) { in cudbg_collect_ma_indirect()
3004 ma_fli->ireg_addr = t6_ma_ireg_array2[i][0]; in cudbg_collect_ma_indirect()
3007 for (j = 0; j < t6_ma_ireg_array2[i][3]; j++) { in cudbg_collect_ma_indirect()
3012 ma_fli->ireg_local_offset += 0x20; in cudbg_collect_ma_indirect()
3024 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_ulptx_la()
3044 for (i = 0; i < CUDBG_NUM_ULPTX; i++) { in cudbg_collect_ulptx_la()
3047 0x10 * i); in cudbg_collect_ulptx_la()
3050 0x10 * i); in cudbg_collect_ulptx_la()
3053 0x10 * i); in cudbg_collect_ulptx_la()
3054 for (j = 0; j < CUDBG_NUM_ULPTX_READ; j++) in cudbg_collect_ulptx_la()
3057 ULP_TX_LA_RDDATA_0_A + 0x10 * i); in cudbg_collect_ulptx_la()
3060 for (i = 0; i < CUDBG_NUM_ULPTX_ASIC_READ; i++) { in cudbg_collect_ulptx_la()
3061 t4_write_reg(padap, ULP_TX_ASIC_DEBUG_CTRL_A, 0x1); in cudbg_collect_ulptx_la()
3064 ulptx_la_buff->rddata_asic[i][0] = in cudbg_collect_ulptx_la()
3086 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_up_cim_indirect()
3090 u32 instance = 0; in cudbg_collect_up_cim_indirect()
3108 for (i = 0; i < n; i++) { in cudbg_collect_up_cim_indirect()
3113 up_cim_reg->ireg_addr = t5_up_cim_reg_array[i][0]; in cudbg_collect_up_cim_indirect()
3121 up_cim_reg->ireg_addr = t6_up_cim_reg_array[i][0]; in cudbg_collect_up_cim_indirect()
3133 local_offset = 0x120; in cudbg_collect_up_cim_indirect()
3138 local_offset = 0x10; in cudbg_collect_up_cim_indirect()
3143 local_offset = 0; in cudbg_collect_up_cim_indirect()
3148 for (j = 0; j < iter; j++, buff++) { in cudbg_collect_up_cim_indirect()
3167 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_pbt_tables()
3181 for (i = 0; i < CUDBG_PBT_DYNAMIC_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3194 for (i = 0; i < CUDBG_PBT_STATIC_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3206 for (i = 0; i < CUDBG_LRF_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3218 for (i = 0; i < CUDBG_PBT_DATA_ENTRIES; i++) { in cudbg_collect_pbt_tables()
3236 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_mbox_log()
3253 for (k = 0; k < mbox_cmds; k++) { in cudbg_collect_mbox_log()
3260 if (entry->timestamp == 0) in cudbg_collect_mbox_log()
3264 for (i = 0; i < MBOX_LEN / 8; i++) { in cudbg_collect_mbox_log()
3279 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_hma_indirect()
3294 for (i = 0; i < n; i++) { in cudbg_collect_hma_indirect()
3298 hma_fli->ireg_addr = t6_hma_ireg_array[i][0]; in cudbg_collect_hma_indirect()
3313 u32 tot_entries = 0, tot_size = 0; in cudbg_fill_qdesc_num_and_size()
3368 u32 num_queues = 0, tot_entries = 0, size = 0; in cudbg_collect_qdesc()
3370 struct cudbg_buffer temp_buff = { 0 }; in cudbg_collect_qdesc()
3398 if (size <= 0) { \ in cudbg_collect_qdesc()
3407 } while (0) in cudbg_collect_qdesc()
3412 } while (0) in cudbg_collect_qdesc()
3417 } while (0) in cudbg_collect_qdesc()
3422 } while (0) in cudbg_collect_qdesc()
3425 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3429 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3433 for (i = 0; i < s->ethqsets; i++) in cudbg_collect_qdesc()
3437 for (i = 0; i < padap->params.nports; i++) in cudbg_collect_qdesc()
3456 for (j = 0; j < CXGB4_TX_MAX; j++) { in cudbg_collect_qdesc()
3461 for (i = 0; i < utxq->ntxq; i++) in cudbg_collect_qdesc()
3473 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3478 for (i = 0; i < urxq->nrxq; i++) in cudbg_collect_qdesc()
3485 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3490 for (i = 0; i < urxq->nrxq; i++) in cudbg_collect_qdesc()
3497 for (j = 0; j < CXGB4_ULD_MAX; j++) { in cudbg_collect_qdesc()
3503 for (i = 0; i < urxq->nciq; i++) in cudbg_collect_qdesc()
3512 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3518 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3522 for (i = 0; i < s->eoqsets; i++) in cudbg_collect_qdesc()
3533 cur_off = 0; in cudbg_collect_qdesc()
3574 struct cudbg_buffer temp_buff = {0}; in cudbg_collect_flash()
3580 for (i = 0; i < count; i += SF_PAGE_SIZE) { in cudbg_collect_flash()
3588 rc = t4_read_flash(padap, addr, n, (u32 *)temp_buff.data, 0); in cudbg_collect_flash()