Lines Matching +full:0 +full:x10c

38 	reg = start | BIT(24) | (secure ? BIT(28) : 0);  in nvkm_falcon_v1_load_imem()
39 nvkm_falcon_wr32(falcon, 0x180 + (port * 16), reg); in nvkm_falcon_v1_load_imem()
40 for (i = 0; i < size / 4; i++) { in nvkm_falcon_v1_load_imem()
42 if ((i & 0x3f) == 0) in nvkm_falcon_v1_load_imem()
43 nvkm_falcon_wr32(falcon, 0x188 + (port * 16), tag++); in nvkm_falcon_v1_load_imem()
44 nvkm_falcon_wr32(falcon, 0x184 + (port * 16), ((u32 *)data)[i]); in nvkm_falcon_v1_load_imem()
55 if ((i & 0x3f) == 0) in nvkm_falcon_v1_load_imem()
56 nvkm_falcon_wr32(falcon, 0x188 + (port * 16), tag++); in nvkm_falcon_v1_load_imem()
57 nvkm_falcon_wr32(falcon, 0x184 + (port * 16), in nvkm_falcon_v1_load_imem()
62 /* code must be padded to 0x40 words */ in nvkm_falcon_v1_load_imem()
63 for (; i & 0x3f; i++) in nvkm_falcon_v1_load_imem()
64 nvkm_falcon_wr32(falcon, 0x184 + (port * 16), 0); in nvkm_falcon_v1_load_imem()
76 nvkm_falcon_wr32(falcon, 0xac0 + (port * 8), start | (0x1 << 24)); in nvkm_falcon_v1_load_emem()
77 for (i = 0; i < size / 4; i++) in nvkm_falcon_v1_load_emem()
78 nvkm_falcon_wr32(falcon, 0xac4 + (port * 8), ((u32 *)data)[i]); in nvkm_falcon_v1_load_emem()
87 nvkm_falcon_wr32(falcon, 0xac4 + (port * 8), in nvkm_falcon_v1_load_emem()
107 nvkm_falcon_wr32(falcon, 0x1c0 + (port * 8), start | (0x1 << 24)); in nvkm_falcon_v1_load_dmem()
108 for (i = 0; i < size / 4; i++) in nvkm_falcon_v1_load_dmem()
109 nvkm_falcon_wr32(falcon, 0x1c4 + (port * 8), ((u32 *)data)[i]); in nvkm_falcon_v1_load_dmem()
118 nvkm_falcon_wr32(falcon, 0x1c4 + (port * 8), in nvkm_falcon_v1_load_dmem()
132 nvkm_falcon_wr32(falcon, 0xac0 + (port * 8), start | (0x1 << 25)); in nvkm_falcon_v1_read_emem()
133 for (i = 0; i < size / 4; i++) in nvkm_falcon_v1_read_emem()
134 ((u32 *)data)[i] = nvkm_falcon_rd32(falcon, 0xac4 + (port * 8)); in nvkm_falcon_v1_read_emem()
141 u32 extra = nvkm_falcon_rd32(falcon, 0xac4 + (port * 8)); in nvkm_falcon_v1_read_emem()
144 ((u8 *)data)[i] = (u8)(extra & 0xff); in nvkm_falcon_v1_read_emem()
164 nvkm_falcon_wr32(falcon, 0x1c0 + (port * 8), start | (0x1 << 25)); in nvkm_falcon_v1_read_dmem()
165 for (i = 0; i < size / 4; i++) in nvkm_falcon_v1_read_dmem()
166 ((u32 *)data)[i] = nvkm_falcon_rd32(falcon, 0x1c4 + (port * 8)); in nvkm_falcon_v1_read_dmem()
173 u32 extra = nvkm_falcon_rd32(falcon, 0x1c4 + (port * 8)); in nvkm_falcon_v1_read_dmem()
176 ((u8 *)data)[i] = (u8)(extra & 0xff); in nvkm_falcon_v1_read_dmem()
190 nvkm_falcon_wr32(falcon, 0x10c, 0x0); in nvkm_falcon_v1_bind_context()
194 nvkm_falcon_wr32(falcon, 0x10c, 0x1); in nvkm_falcon_v1_bind_context()
197 nvkm_falcon_wr32(falcon, fbif + 4 * FALCON_DMAIDX_UCODE, 0x4); in nvkm_falcon_v1_bind_context()
198 nvkm_falcon_wr32(falcon, fbif + 4 * FALCON_DMAIDX_VIRT, 0x0); in nvkm_falcon_v1_bind_context()
200 nvkm_falcon_wr32(falcon, fbif + 4 * FALCON_DMAIDX_PHYS_VID, 0x4); in nvkm_falcon_v1_bind_context()
201 nvkm_falcon_wr32(falcon, fbif + 4 * FALCON_DMAIDX_PHYS_SYS_COH, 0x5); in nvkm_falcon_v1_bind_context()
202 nvkm_falcon_wr32(falcon, fbif + 4 * FALCON_DMAIDX_PHYS_SYS_NCOH, 0x6); in nvkm_falcon_v1_bind_context()
206 case NVKM_MEM_TARGET_VRAM: inst_loc = 0; break; in nvkm_falcon_v1_bind_context()
215 nvkm_falcon_mask(falcon, 0x048, 0x1, 0x1); in nvkm_falcon_v1_bind_context()
216 nvkm_falcon_wr32(falcon, 0x054, in nvkm_falcon_v1_bind_context()
217 ((nvkm_memory_addr(ctx) >> 12) & 0xfffffff) | in nvkm_falcon_v1_bind_context()
220 nvkm_falcon_mask(falcon, 0x090, 0x10000, 0x10000); in nvkm_falcon_v1_bind_context()
221 nvkm_falcon_mask(falcon, 0x0a4, 0x8, 0x8); in nvkm_falcon_v1_bind_context()
227 nvkm_falcon_wr32(falcon, 0x104, start_addr); in nvkm_falcon_v1_set_start_addr()
233 u32 reg = nvkm_falcon_rd32(falcon, 0x100); in nvkm_falcon_v1_start()
236 nvkm_falcon_wr32(falcon, 0x130, 0x2); in nvkm_falcon_v1_start()
238 nvkm_falcon_wr32(falcon, 0x100, 0x2); in nvkm_falcon_v1_start()
247 ret = nvkm_wait_msec(device, ms, falcon->addr + 0x100, 0x10, 0x10); in nvkm_falcon_v1_wait_for_halt()
248 if (ret < 0) in nvkm_falcon_v1_wait_for_halt()
251 return 0; in nvkm_falcon_v1_wait_for_halt()
261 nvkm_falcon_mask(falcon, 0x004, mask, mask); in nvkm_falcon_v1_clear_interrupt()
263 ret = nvkm_wait_msec(device, 10, falcon->addr + 0x008, mask, 0x0); in nvkm_falcon_v1_clear_interrupt()
264 if (ret < 0) in nvkm_falcon_v1_clear_interrupt()
267 return 0; in nvkm_falcon_v1_clear_interrupt()
276 ret = nvkm_wait_msec(device, 10, falcon->addr + 0x04c, 0xffff, 0x0); in falcon_v1_wait_idle()
277 if (ret < 0) in falcon_v1_wait_idle()
280 return 0; in falcon_v1_wait_idle()
289 ret = nvkm_wait_msec(device, 10, falcon->addr + 0x10c, 0x6, 0x0); in nvkm_falcon_v1_enable()
290 if (ret < 0) { in nvkm_falcon_v1_enable()
300 nvkm_falcon_wr32(falcon, 0x010, 0xff); in nvkm_falcon_v1_enable()
302 return 0; in nvkm_falcon_v1_enable()
309 nvkm_falcon_wr32(falcon, 0x014, 0xff); in nvkm_falcon_v1_disable()
335 return 0; in nvkm_falcon_v1_new()