Lines Matching full:device
31 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_watermark() local
33 nvkm_mask(device, 0x61c128 + loff, 0x0000003f, watermark); in g94_sor_dp_watermark()
40 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_activesym() local
42 nvkm_mask(device, 0x61c10c + loff, 0x000001fc, TU << 2); in g94_sor_dp_activesym()
43 nvkm_mask(device, 0x61c128 + loff, 0x010f7f00, VTUa << 24 | in g94_sor_dp_activesym()
51 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_audio_sym() local
53 nvkm_mask(device, 0x61c1e8 + soff, 0x0000ffff, h); in g94_sor_dp_audio_sym()
54 nvkm_mask(device, 0x61c1ec + soff, 0x00ffffff, v); in g94_sor_dp_audio_sym()
60 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_drive() local
65 data[0] = nvkm_rd32(device, 0x61c118 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drive()
66 data[1] = nvkm_rd32(device, 0x61c120 + loff) & ~(0x000000ff << shift); in g94_sor_dp_drive()
67 data[2] = nvkm_rd32(device, 0x61c130 + loff); in g94_sor_dp_drive()
70 nvkm_wr32(device, 0x61c118 + loff, data[0] | (dc << shift)); in g94_sor_dp_drive()
71 nvkm_wr32(device, 0x61c120 + loff, data[1] | (pe << shift)); in g94_sor_dp_drive()
72 nvkm_wr32(device, 0x61c130 + loff, data[2]); in g94_sor_dp_drive()
78 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_pattern() local
80 nvkm_mask(device, 0x61c10c + loff, 0x0f000000, pattern << 24); in g94_sor_dp_pattern()
86 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_power() local
94 nvkm_mask(device, 0x61c130 + loff, 0x0000000f, mask); in g94_sor_dp_power()
95 nvkm_mask(device, 0x61c034 + soff, 0x80000000, 0x80000000); in g94_sor_dp_power()
96 nvkm_msec(device, 2000, in g94_sor_dp_power()
97 if (!(nvkm_rd32(device, 0x61c034 + soff) & 0x80000000)) in g94_sor_dp_power()
105 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_dp_links() local
117 nvkm_mask(device, 0x614300 + soff, 0x000c0000, clksor); in g94_sor_dp_links()
118 nvkm_mask(device, 0x61c10c + loff, 0x001f4000, dpctrl); in g94_sor_dp_links()
125 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_war_needed() local
128 switch (nvkm_rd32(device, 0x614300 + soff) & 0x00030000) { in g94_sor_war_needed()
142 struct nvkm_device *device = disp->engine.subdev.device; in g94_sor_war_update_sppll1() local
151 clksor = nvkm_rd32(device, 0x614300 + nv50_ior_base(ior)); in g94_sor_war_update_sppll1()
165 nvkm_mask(device, 0x00e840, 0x80000000, 0x00000000); in g94_sor_war_update_sppll1()
171 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_war_3() local
178 sorpwr = nvkm_rd32(device, 0x61c004 + soff); in g94_sor_war_3()
180 u32 seqctl = nvkm_rd32(device, 0x61c030 + soff); in g94_sor_war_3()
184 nvkm_wr32(device, 0x61c040 + soff + pd_pc * 4, 0x1f008000); in g94_sor_war_3()
186 nvkm_msec(device, 2000, in g94_sor_war_3()
187 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in g94_sor_war_3()
190 nvkm_mask(device, 0x61c004 + soff, 0x80000001, 0x80000000); in g94_sor_war_3()
191 nvkm_msec(device, 2000, in g94_sor_war_3()
192 if (!(nvkm_rd32(device, 0x61c030 + soff) & 0x10000000)) in g94_sor_war_3()
196 nvkm_wr32(device, 0x61c040 + soff + pd_pc * 4, 0x00002000); in g94_sor_war_3()
197 nvkm_wr32(device, 0x61c040 + soff + pu_pc * 4, 0x1f000000); in g94_sor_war_3()
200 nvkm_mask(device, 0x61c10c + soff, 0x00000001, 0x00000000); in g94_sor_war_3()
201 nvkm_mask(device, 0x614300 + soff, 0x03000000, 0x00000000); in g94_sor_war_3()
204 nvkm_mask(device, 0x61c004 + soff, 0x80000001, 0x80000001); in g94_sor_war_3()
213 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_war_2() local
219 nvkm_mask(device, 0x00e840, 0x80000000, 0x80000000); in g94_sor_war_2()
220 nvkm_mask(device, 0x614300 + soff, 0x03000000, 0x03000000); in g94_sor_war_2()
221 nvkm_mask(device, 0x61c10c + soff, 0x00000001, 0x00000001); in g94_sor_war_2()
223 nvkm_mask(device, 0x61c00c + soff, 0x0f000000, 0x00000000); in g94_sor_war_2()
224 nvkm_mask(device, 0x61c008 + soff, 0xff000000, 0x14000000); in g94_sor_war_2()
225 nvkm_usec(device, 400, NVKM_DELAY); in g94_sor_war_2()
226 nvkm_mask(device, 0x61c008 + soff, 0xff000000, 0x00000000); in g94_sor_war_2()
227 nvkm_mask(device, 0x61c00c + soff, 0x0f000000, 0x01000000); in g94_sor_war_2()
229 if (nvkm_rd32(device, 0x61c004 + soff) & 0x00000001) { in g94_sor_war_2()
230 u32 seqctl = nvkm_rd32(device, 0x61c030 + soff); in g94_sor_war_2()
232 nvkm_wr32(device, 0x61c040 + soff + pu_pc * 4, 0x1f008000); in g94_sor_war_2()
239 struct nvkm_device *device = sor->disp->engine.subdev.device; in g94_sor_state() local
241 u32 ctrl = nvkm_rd32(device, 0x610794 + coff); in g94_sor_state()
288 struct nvkm_device *device = disp->engine.subdev.device; in g94_sor_cnt() local
289 *pmask = (nvkm_rd32(device, 0x610184) & 0x0f000000) >> 24; in g94_sor_cnt()