Lines Matching +full:- +full:i
8 * -----
15 * -----
46 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_unload_context()
47 struct nouveau_fifo_engine *pfifo = &dev_priv->engine.fifo; in nv20_graph_unload_context()
55 grctx = chan->engctx[NVOBJ_ENGINE_GR]; in nv20_graph_unload_context()
57 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_POINTER, grctx->pinst >> 4); in nv20_graph_unload_context()
65 tmp |= (pfifo->channels - 1) << 24; in nv20_graph_unload_context()
73 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_rdi()
74 int i, writecount = 32; in nv20_graph_rdi() local
77 if (dev_priv->chipset == 0x20) { in nv20_graph_rdi()
83 for (i = 0; i < writecount; i++) in nv20_graph_rdi()
92 int i; in nv20_graph_context_init() local
100 for (i = 0x04d4; i <= 0x04e0; i += 4) in nv20_graph_context_init()
101 nv_wo32(ctx, i, 0x00030303); in nv20_graph_context_init()
102 for (i = 0x04f4; i <= 0x0500; i += 4) in nv20_graph_context_init()
103 nv_wo32(ctx, i, 0x00080000); in nv20_graph_context_init()
104 for (i = 0x050c; i <= 0x0518; i += 4) in nv20_graph_context_init()
105 nv_wo32(ctx, i, 0x01012000); in nv20_graph_context_init()
106 for (i = 0x051c; i <= 0x0528; i += 4) in nv20_graph_context_init()
107 nv_wo32(ctx, i, 0x000105b8); in nv20_graph_context_init()
108 for (i = 0x052c; i <= 0x0538; i += 4) in nv20_graph_context_init()
109 nv_wo32(ctx, i, 0x00080008); in nv20_graph_context_init()
110 for (i = 0x055c; i <= 0x0598; i += 4) in nv20_graph_context_init()
111 nv_wo32(ctx, i, 0x07ff0000); in nv20_graph_context_init()
118 for (i = 0x1c1c; i <= 0x248c; i += 16) { in nv20_graph_context_init()
119 nv_wo32(ctx, (i + 0), 0x10700ff9); in nv20_graph_context_init()
120 nv_wo32(ctx, (i + 4), 0x0436086c); in nv20_graph_context_init()
121 nv_wo32(ctx, (i + 8), 0x000c001b); in nv20_graph_context_init()
135 for (i = 0x355c; i <= 0x3578; i += 4) in nv20_graph_context_init()
136 nv_wo32(ctx, i, 0x001c527c); in nv20_graph_context_init()
142 int i; in nv25_graph_context_init() local
154 for (i = 0x0510; i <= 0x051c; i += 4) in nv25_graph_context_init()
155 nv_wo32(ctx, i, 0x00030303); in nv25_graph_context_init()
156 for (i = 0x0530; i <= 0x053c; i += 4) in nv25_graph_context_init()
157 nv_wo32(ctx, i, 0x00080000); in nv25_graph_context_init()
158 for (i = 0x0548; i <= 0x0554; i += 4) in nv25_graph_context_init()
159 nv_wo32(ctx, i, 0x01012000); in nv25_graph_context_init()
160 for (i = 0x0558; i <= 0x0564; i += 4) in nv25_graph_context_init()
161 nv_wo32(ctx, i, 0x000105b8); in nv25_graph_context_init()
162 for (i = 0x0568; i <= 0x0574; i += 4) in nv25_graph_context_init()
163 nv_wo32(ctx, i, 0x00080008); in nv25_graph_context_init()
164 for (i = 0x0598; i <= 0x05d4; i += 4) in nv25_graph_context_init()
165 nv_wo32(ctx, i, 0x07ff0000); in nv25_graph_context_init()
177 for (i = 0x1b04; i <= 0x2374; i += 16) { in nv25_graph_context_init()
178 nv_wo32(ctx, (i + 0), 0x10700ff9); in nv25_graph_context_init()
179 nv_wo32(ctx, (i + 4), 0x0436086c); in nv25_graph_context_init()
180 nv_wo32(ctx, (i + 8), 0x000c001b); in nv25_graph_context_init()
194 for (i = 0x3484; i <= 0x34a0; i += 4) in nv25_graph_context_init()
195 nv_wo32(ctx, i, 0x001c527c); in nv25_graph_context_init()
201 int i; in nv2a_graph_context_init() local
209 for (i = 0x04d4; i <= 0x04e0; i += 4) in nv2a_graph_context_init()
210 nv_wo32(ctx, i, 0x00030303); in nv2a_graph_context_init()
211 for (i = 0x04f4; i <= 0x0500; i += 4) in nv2a_graph_context_init()
212 nv_wo32(ctx, i, 0x00080000); in nv2a_graph_context_init()
213 for (i = 0x050c; i <= 0x0518; i += 4) in nv2a_graph_context_init()
214 nv_wo32(ctx, i, 0x01012000); in nv2a_graph_context_init()
215 for (i = 0x051c; i <= 0x0528; i += 4) in nv2a_graph_context_init()
216 nv_wo32(ctx, i, 0x000105b8); in nv2a_graph_context_init()
217 for (i = 0x052c; i <= 0x0538; i += 4) in nv2a_graph_context_init()
218 nv_wo32(ctx, i, 0x00080008); in nv2a_graph_context_init()
219 for (i = 0x055c; i <= 0x0598; i += 4) in nv2a_graph_context_init()
220 nv_wo32(ctx, i, 0x07ff0000); in nv2a_graph_context_init()
227 for (i = 0x1a9c; i <= 0x22fc; i += 16) { /*XXX: check!! */ in nv2a_graph_context_init()
228 nv_wo32(ctx, (i + 0), 0x10700ff9); in nv2a_graph_context_init()
229 nv_wo32(ctx, (i + 4), 0x0436086c); in nv2a_graph_context_init()
230 nv_wo32(ctx, (i + 8), 0x000c001b); in nv2a_graph_context_init()
244 for (i = 0x341c; i <= 0x3438; i += 4) in nv2a_graph_context_init()
245 nv_wo32(ctx, i, 0x001c527c); in nv2a_graph_context_init()
251 int i; in nv30_31_graph_context_init() local
261 for (i = 0x04e0; i < 0x04e8; i += 4) in nv30_31_graph_context_init()
262 nv_wo32(ctx, i, 0x0fff0000); in nv30_31_graph_context_init()
264 for (i = 0x0508; i < 0x0548; i += 4) in nv30_31_graph_context_init()
265 nv_wo32(ctx, i, 0x07ff0000); in nv30_31_graph_context_init()
273 for (i = 0x0600; i < 0x0640; i += 4) in nv30_31_graph_context_init()
274 nv_wo32(ctx, i, 0x00010588); in nv30_31_graph_context_init()
275 for (i = 0x0640; i < 0x0680; i += 4) in nv30_31_graph_context_init()
276 nv_wo32(ctx, i, 0x00030303); in nv30_31_graph_context_init()
277 for (i = 0x06c0; i < 0x0700; i += 4) in nv30_31_graph_context_init()
278 nv_wo32(ctx, i, 0x0008aae4); in nv30_31_graph_context_init()
279 for (i = 0x0700; i < 0x0740; i += 4) in nv30_31_graph_context_init()
280 nv_wo32(ctx, i, 0x01012000); in nv30_31_graph_context_init()
281 for (i = 0x0740; i < 0x0780; i += 4) in nv30_31_graph_context_init()
282 nv_wo32(ctx, i, 0x00080008); in nv30_31_graph_context_init()
285 for (i = 0x0864; i < 0x0874; i += 4) in nv30_31_graph_context_init()
286 nv_wo32(ctx, i, 0x00040004); in nv30_31_graph_context_init()
287 for (i = 0x1f18; i <= 0x3088 ; i += 16) { in nv30_31_graph_context_init()
288 nv_wo32(ctx, i + 0, 0x10700ff9); in nv30_31_graph_context_init()
289 nv_wo32(ctx, i + 1, 0x0436086c); in nv30_31_graph_context_init()
290 nv_wo32(ctx, i + 2, 0x000c001b); in nv30_31_graph_context_init()
292 for (i = 0x30b8; i < 0x30c8; i += 4) in nv30_31_graph_context_init()
293 nv_wo32(ctx, i, 0x0000ffff); in nv30_31_graph_context_init()
309 int i; in nv34_graph_context_init() local
319 for (i = 0x04d4; i < 0x04dc; i += 4) in nv34_graph_context_init()
320 nv_wo32(ctx, i, 0x0fff0000); in nv34_graph_context_init()
322 for (i = 0x04fc; i < 0x053c; i += 4) in nv34_graph_context_init()
323 nv_wo32(ctx, i, 0x07ff0000); in nv34_graph_context_init()
331 for (i = 0x05f0; i < 0x0630; i += 4) in nv34_graph_context_init()
332 nv_wo32(ctx, i, 0x00010588); in nv34_graph_context_init()
333 for (i = 0x0630; i < 0x0670; i += 4) in nv34_graph_context_init()
334 nv_wo32(ctx, i, 0x00030303); in nv34_graph_context_init()
335 for (i = 0x06b0; i < 0x06f0; i += 4) in nv34_graph_context_init()
336 nv_wo32(ctx, i, 0x0008aae4); in nv34_graph_context_init()
337 for (i = 0x06f0; i < 0x0730; i += 4) in nv34_graph_context_init()
338 nv_wo32(ctx, i, 0x01012000); in nv34_graph_context_init()
339 for (i = 0x0730; i < 0x0770; i += 4) in nv34_graph_context_init()
340 nv_wo32(ctx, i, 0x00080008); in nv34_graph_context_init()
343 for (i = 0x0858; i < 0x0868; i += 4) in nv34_graph_context_init()
344 nv_wo32(ctx, i, 0x00040004); in nv34_graph_context_init()
345 for (i = 0x15ac; i <= 0x271c ; i += 16) { in nv34_graph_context_init()
346 nv_wo32(ctx, i + 0, 0x10700ff9); in nv34_graph_context_init()
347 nv_wo32(ctx, i + 1, 0x0436086c); in nv34_graph_context_init()
348 nv_wo32(ctx, i + 2, 0x000c001b); in nv34_graph_context_init()
350 for (i = 0x274c; i < 0x275c; i += 4) in nv34_graph_context_init()
351 nv_wo32(ctx, i, 0x0000ffff); in nv34_graph_context_init()
367 int i; in nv35_36_graph_context_init() local
377 for (i = 0x04dc; i < 0x04e4; i += 4) in nv35_36_graph_context_init()
378 nv_wo32(ctx, i, 0x0fff0000); in nv35_36_graph_context_init()
380 for (i = 0x0504; i < 0x0544; i += 4) in nv35_36_graph_context_init()
381 nv_wo32(ctx, i, 0x07ff0000); in nv35_36_graph_context_init()
389 for (i = 0x0604; i < 0x0644; i += 4) in nv35_36_graph_context_init()
390 nv_wo32(ctx, i, 0x00010588); in nv35_36_graph_context_init()
391 for (i = 0x0644; i < 0x0684; i += 4) in nv35_36_graph_context_init()
392 nv_wo32(ctx, i, 0x00030303); in nv35_36_graph_context_init()
393 for (i = 0x06c4; i < 0x0704; i += 4) in nv35_36_graph_context_init()
394 nv_wo32(ctx, i, 0x0008aae4); in nv35_36_graph_context_init()
395 for (i = 0x0704; i < 0x0744; i += 4) in nv35_36_graph_context_init()
396 nv_wo32(ctx, i, 0x01012000); in nv35_36_graph_context_init()
397 for (i = 0x0744; i < 0x0784; i += 4) in nv35_36_graph_context_init()
398 nv_wo32(ctx, i, 0x00080008); in nv35_36_graph_context_init()
401 for (i = 0x0868; i < 0x0878; i += 4) in nv35_36_graph_context_init()
402 nv_wo32(ctx, i, 0x00040004); in nv35_36_graph_context_init()
403 for (i = 0x1f1c; i <= 0x308c ; i += 16) { in nv35_36_graph_context_init()
404 nv_wo32(ctx, i + 0, 0x10700ff9); in nv35_36_graph_context_init()
405 nv_wo32(ctx, i + 4, 0x0436086c); in nv35_36_graph_context_init()
406 nv_wo32(ctx, i + 8, 0x000c001b); in nv35_36_graph_context_init()
408 for (i = 0x30bc; i < 0x30cc; i += 4) in nv35_36_graph_context_init()
409 nv_wo32(ctx, i, 0x0000ffff); in nv35_36_graph_context_init()
425 struct nv20_graph_engine *pgraph = nv_engine(chan->dev, engine); in nv20_graph_context_new()
427 struct drm_device *dev = chan->dev; in nv20_graph_context_new()
430 ret = nouveau_gpuobj_new(dev, NULL, pgraph->grctx_size, 16, in nv20_graph_context_new()
436 pgraph->grctx_init(grctx); in nv20_graph_context_new()
438 /* nv20: nv_wo32(dev, chan->ramin_grctx->gpuobj, 10, chan->id<<24); */ in nv20_graph_context_new()
440 nv_wo32(grctx, pgraph->grctx_user, (chan->id << 24) | 0x1); in nv20_graph_context_new()
442 nv_wo32(pgraph->ctxtab, chan->id * 4, grctx->pinst >> 4); in nv20_graph_context_new()
443 chan->engctx[engine] = grctx; in nv20_graph_context_new()
450 struct nv20_graph_engine *pgraph = nv_engine(chan->dev, engine); in nv20_graph_context_del()
451 struct nouveau_gpuobj *grctx = chan->engctx[engine]; in nv20_graph_context_del()
452 struct drm_device *dev = chan->dev; in nv20_graph_context_del()
453 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_context_del()
456 spin_lock_irqsave(&dev_priv->context_switch_lock, flags); in nv20_graph_context_del()
464 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags); in nv20_graph_context_del()
467 nv_wo32(pgraph->ctxtab, chan->id * 4, 0); in nv20_graph_context_del()
470 chan->engctx[engine] = NULL; in nv20_graph_context_del()
474 nv20_graph_set_tile_region(struct drm_device *dev, int i) in nv20_graph_set_tile_region() argument
476 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_set_tile_region()
477 struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i]; in nv20_graph_set_tile_region()
479 nv_wr32(dev, NV20_PGRAPH_TLIMIT(i), tile->limit); in nv20_graph_set_tile_region()
480 nv_wr32(dev, NV20_PGRAPH_TSIZE(i), tile->pitch); in nv20_graph_set_tile_region()
481 nv_wr32(dev, NV20_PGRAPH_TILE(i), tile->addr); in nv20_graph_set_tile_region()
483 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0030 + 4 * i); in nv20_graph_set_tile_region()
484 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, tile->limit); in nv20_graph_set_tile_region()
485 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0050 + 4 * i); in nv20_graph_set_tile_region()
486 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, tile->pitch); in nv20_graph_set_tile_region()
487 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00EA0010 + 4 * i); in nv20_graph_set_tile_region()
488 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, tile->addr); in nv20_graph_set_tile_region()
490 if (dev_priv->card_type == NV_20) { in nv20_graph_set_tile_region()
491 nv_wr32(dev, NV20_PGRAPH_ZCOMP(i), tile->zcomp); in nv20_graph_set_tile_region()
492 nv_wr32(dev, NV10_PGRAPH_RDI_INDEX, 0x00ea0090 + 4 * i); in nv20_graph_set_tile_region()
493 nv_wr32(dev, NV10_PGRAPH_RDI_DATA, tile->zcomp); in nv20_graph_set_tile_region()
501 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_init()
503 int i; in nv20_graph_init() local
510 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE, pgraph->ctxtab->pinst >> 4); in nv20_graph_init()
524 if (dev_priv->chipset >= 0x25) { in nv20_graph_init()
545 for (i = 0; i < NV10_PFB_TILE__SIZE; i++) in nv20_graph_init()
546 nv20_graph_set_tile_region(dev, i); in nv20_graph_init()
561 vramsz = pci_resource_len(dev->pdev, 0) - 1; in nv20_graph_init()
570 nv_wr32(dev, 0x400864, vramsz - 1); in nv20_graph_init()
571 nv_wr32(dev, 0x400868, vramsz - 1); in nv20_graph_init()
589 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv30_graph_init()
590 int i; in nv30_graph_init() local
597 nv_wr32(dev, NV20_PGRAPH_CHANNEL_CTX_TABLE, pgraph->ctxtab->pinst >> 4); in nv30_graph_init()
621 if (dev_priv->chipset == 0x34) { in nv30_graph_init()
635 for (i = 0; i < NV10_PFB_TILE__SIZE; i++) in nv30_graph_init()
636 nv20_graph_set_tile_region(dev, i); in nv30_graph_init()
643 /* vramsz = pci_resource_len(dev->pdev, 0) - 1; */ in nv30_graph_init()
646 if (dev_priv->chipset != 0x34) { in nv30_graph_init()
662 return -EBUSY; in nv20_graph_fini()
696 NV_INFO(dev, "PGRAPH -"); in nv20_graph_isr()
703 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x " in nv20_graph_isr()
716 nouveau_gpuobj_ref(NULL, &pgraph->ctxtab); in nv20_graph_destroy()
725 struct drm_nouveau_private *dev_priv = dev->dev_private; in nv20_graph_create()
731 return -ENOMEM; in nv20_graph_create()
733 pgraph->base.destroy = nv20_graph_destroy; in nv20_graph_create()
734 pgraph->base.fini = nv20_graph_fini; in nv20_graph_create()
735 pgraph->base.context_new = nv20_graph_context_new; in nv20_graph_create()
736 pgraph->base.context_del = nv20_graph_context_del; in nv20_graph_create()
737 pgraph->base.object_new = nv04_graph_object_new; in nv20_graph_create()
738 pgraph->base.set_tile_region = nv20_graph_set_tile_region; in nv20_graph_create()
740 pgraph->grctx_user = 0x0028; in nv20_graph_create()
741 if (dev_priv->card_type == NV_20) { in nv20_graph_create()
742 pgraph->base.init = nv20_graph_init; in nv20_graph_create()
743 switch (dev_priv->chipset) { in nv20_graph_create()
745 pgraph->grctx_init = nv20_graph_context_init; in nv20_graph_create()
746 pgraph->grctx_size = NV20_GRCTX_SIZE; in nv20_graph_create()
747 pgraph->grctx_user = 0x0000; in nv20_graph_create()
751 pgraph->grctx_init = nv25_graph_context_init; in nv20_graph_create()
752 pgraph->grctx_size = NV25_GRCTX_SIZE; in nv20_graph_create()
755 pgraph->grctx_init = nv2a_graph_context_init; in nv20_graph_create()
756 pgraph->grctx_size = NV2A_GRCTX_SIZE; in nv20_graph_create()
757 pgraph->grctx_user = 0x0000; in nv20_graph_create()
765 pgraph->base.init = nv30_graph_init; in nv20_graph_create()
766 switch (dev_priv->chipset) { in nv20_graph_create()
769 pgraph->grctx_init = nv30_31_graph_context_init; in nv20_graph_create()
770 pgraph->grctx_size = NV30_31_GRCTX_SIZE; in nv20_graph_create()
773 pgraph->grctx_init = nv34_graph_context_init; in nv20_graph_create()
774 pgraph->grctx_size = NV34_GRCTX_SIZE; in nv20_graph_create()
778 pgraph->grctx_init = nv35_36_graph_context_init; in nv20_graph_create()
779 pgraph->grctx_size = NV35_36_GRCTX_SIZE; in nv20_graph_create()
790 &pgraph->ctxtab); in nv20_graph_create()
796 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base); in nv20_graph_create()
815 if (dev_priv->card_type == NV_20) { in nv20_graph_create()
820 if (dev_priv->chipset < 0x25) in nv20_graph_create()
831 if (0x00000003 & (1 << (dev_priv->chipset & 0x0f))) in nv20_graph_create()
834 if (0x00000010 & (1 << (dev_priv->chipset & 0x0f))) in nv20_graph_create()
837 if (0x000001e0 & (1 << (dev_priv->chipset & 0x0f))) in nv20_graph_create()