Lines Matching defs:gpu

29 static inline void set_preempt_state(struct a6xx_gpu *gpu,
38 atomic_set(&gpu->preempt_state, new);
44 static inline void update_wptr(struct msm_gpu *gpu, struct msm_ringbuffer *ring)
54 gpu_write(gpu, REG_A6XX_CP_RB_WPTR, wptr);
63 static struct msm_ringbuffer *get_next_ring(struct msm_gpu *gpu)
65 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
71 for (i = 0; i < gpu->nr_rings; i++) {
73 struct msm_ringbuffer *ring = gpu->rb[i];
76 empty = (get_wptr(ring) == gpu->funcs->get_rptr(gpu, ring));
92 struct msm_gpu *gpu = &a6xx_gpu->base.base;
93 struct drm_device *dev = gpu->dev;
98 dev_err(dev->dev, "%s: preemption timed out\n", gpu->name);
99 kthread_queue_work(gpu->worker, &gpu->recover_work);
139 void a6xx_preempt_irq(struct msm_gpu *gpu)
142 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
144 struct drm_device *dev = gpu->dev;
159 status = gpu_read(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL);
161 DRM_DEV_ERROR(&gpu->pdev->dev,
165 gpu->name);
166 kthread_queue_work(gpu->worker, &gpu->recover_work);
175 update_wptr(gpu, a6xx_gpu->cur_ring);
185 a6xx_preempt_trigger(gpu);
188 void a6xx_preempt_hw_init(struct msm_gpu *gpu)
190 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
195 if (gpu->nr_rings == 1)
198 for (i = 0; i < gpu->nr_rings; i++) {
203 record_ptr->rptr_addr = shadowptr(a6xx_gpu, gpu->rb[i]);
206 record_ptr->rbase = gpu->rb[i]->iova;
210 gpu_write64(gpu, REG_A6XX_CP_CONTEXT_SWITCH_SMMU_INFO, 0);
213 gpu_write(gpu, REG_A6XX_RB_CONTEXT_SWITCH_GMEM_SAVE_RESTORE_ENABLE, 0x1);
221 a6xx_gpu->cur_ring = gpu->rb[0];
224 void a6xx_preempt_trigger(struct msm_gpu *gpu)
226 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
233 if (gpu->nr_rings == 1)
263 ring = get_next_ring(gpu);
271 update_wptr(gpu, a6xx_gpu->cur_ring);
305 gpu_write64(gpu,
309 gpu_write64(gpu,
331 gpu_write(gpu, REG_A6XX_CP_CONTEXT_SWITCH_CNTL, cntl);
338 struct msm_gpu *gpu = &adreno_gpu->base;
345 ptr = msm_gem_kernel_new(gpu->dev,
347 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->vm, &bo, &iova);
362 ptr = msm_gem_kernel_new(gpu->dev,
365 gpu->vm, &bo, &iova);
380 msm_iommu_pagetable_params(to_msm_vm(gpu->vm)->mmu, &ttbr, &asid);
401 void a6xx_preempt_fini(struct msm_gpu *gpu)
403 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
407 for (i = 0; i < gpu->nr_rings; i++)
408 msm_gem_kernel_put(a6xx_gpu->preempt_bo[i], gpu->vm);
411 void a6xx_preempt_init(struct msm_gpu *gpu)
413 struct adreno_gpu *adreno_gpu = to_adreno_gpu(gpu);
418 if (gpu->nr_rings <= 1)
421 for (i = 0; i < gpu->nr_rings; i++) {
422 if (preempt_init_ring(a6xx_gpu, gpu->rb[i]))
431 a6xx_gpu->preempt_postamble_ptr = msm_gem_kernel_new(gpu->dev,
434 gpu->vm, &a6xx_gpu->preempt_postamble_bo,
450 a6xx_preempt_fini(gpu);
451 gpu->nr_rings = 1;
453 DRM_DEV_ERROR(&gpu->pdev->dev,