Lines Matching full:fault
30 struct nvkm_fault *fault = container_of(event, typeof(*fault), event); in nvkm_fault_ntfy_fini() local
31 fault->func->buffer.intr(fault->buffer[index], false); in nvkm_fault_ntfy_fini()
37 struct nvkm_fault *fault = container_of(event, typeof(*fault), event); in nvkm_fault_ntfy_init() local
38 fault->func->buffer.intr(fault->buffer[index], true); in nvkm_fault_ntfy_init()
65 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_intr() local
66 return fault->func->intr(fault); in nvkm_fault_intr()
72 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_fini() local
73 if (fault->func->fini) in nvkm_fault_fini()
74 fault->func->fini(fault); in nvkm_fault_fini()
81 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_init() local
82 if (fault->func->init) in nvkm_fault_init()
83 fault->func->init(fault); in nvkm_fault_init()
88 nvkm_fault_oneinit_buffer(struct nvkm_fault *fault, int id) in nvkm_fault_oneinit_buffer() argument
90 struct nvkm_subdev *subdev = &fault->subdev; in nvkm_fault_oneinit_buffer()
97 buffer->fault = fault; in nvkm_fault_oneinit_buffer()
99 fault->func->buffer.info(buffer); in nvkm_fault_oneinit_buffer()
100 fault->buffer[id] = buffer; in nvkm_fault_oneinit_buffer()
105 fault->func->buffer.entry_size, 0x1000, true, in nvkm_fault_oneinit_buffer()
110 /* Pin fault buffer in BAR2. */ in nvkm_fault_oneinit_buffer()
111 buffer->addr = fault->func->buffer.pin(buffer); in nvkm_fault_oneinit_buffer()
121 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_oneinit() local
124 for (i = 0; i < ARRAY_SIZE(fault->buffer); i++) { in nvkm_fault_oneinit()
125 if (i < fault->func->buffer.nr) { in nvkm_fault_oneinit()
126 ret = nvkm_fault_oneinit_buffer(fault, i); in nvkm_fault_oneinit()
129 fault->buffer_nr = i + 1; in nvkm_fault_oneinit()
133 ret = nvkm_event_init(&nvkm_fault_ntfy, 1, fault->buffer_nr, in nvkm_fault_oneinit()
134 &fault->event); in nvkm_fault_oneinit()
138 if (fault->func->oneinit) in nvkm_fault_oneinit()
139 ret = fault->func->oneinit(fault); in nvkm_fault_oneinit()
146 struct nvkm_fault *fault = nvkm_fault(subdev); in nvkm_fault_dtor() local
149 nvkm_notify_fini(&fault->nrpfb); in nvkm_fault_dtor()
150 nvkm_event_fini(&fault->event); in nvkm_fault_dtor()
152 for (i = 0; i < fault->buffer_nr; i++) { in nvkm_fault_dtor()
153 if (fault->buffer[i]) { in nvkm_fault_dtor()
154 nvkm_memory_unref(&fault->buffer[i]->mem); in nvkm_fault_dtor()
155 kfree(fault->buffer[i]); in nvkm_fault_dtor()
159 return fault; in nvkm_fault_dtor()
175 struct nvkm_fault *fault; in nvkm_fault_new_() local
176 if (!(fault = *pfault = kzalloc(sizeof(*fault), GFP_KERNEL))) in nvkm_fault_new_()
178 nvkm_subdev_ctor(&nvkm_fault, device, index, &fault->subdev); in nvkm_fault_new_()
179 fault->func = func; in nvkm_fault_new_()
180 fault->user.ctor = nvkm_ufault_new; in nvkm_fault_new_()
181 fault->user.base = func->user.base; in nvkm_fault_new_()