/linux/drivers/gpu/drm/msm/ |
H A D | msm_fence.c | 13 static struct msm_gpu *fctx2gpu(struct msm_fence_context *fctx) in fctx2gpu() argument 15 struct msm_drm_private *priv = fctx->dev->dev_private; in fctx2gpu() 21 struct msm_fence_context *fctx = container_of(t, in deadline_timer() local 24 kthread_queue_work(fctx2gpu(fctx)->worker, &fctx->deadline_work); in deadline_timer() 31 struct msm_fence_context *fctx = container_of(work, in deadline_work() local 35 if (msm_fence_completed(fctx, fctx->next_deadline_fence)) in deadline_work() 38 msm_devfreq_boost(fctx2gpu(fctx), 2); in deadline_work() 46 struct msm_fence_context *fctx; in msm_fence_context_alloc() local 77 msm_fence_context_free(struct msm_fence_context * fctx) msm_fence_context_free() argument 82 msm_fence_completed(struct msm_fence_context * fctx,uint32_t fence) msm_fence_completed() argument 93 msm_update_fence(struct msm_fence_context * fctx,uint32_t fence) msm_update_fence() argument 107 struct msm_fence_context *fctx; global() member 135 struct msm_fence_context *fctx = f->fctx; msm_fence_set_deadline() local 187 msm_fence_init(struct dma_fence * fence,struct msm_fence_context * fctx) msm_fence_init() argument [all...] |
H A D | msm_fence.h | 79 void msm_fence_context_free(struct msm_fence_context *fctx); 81 bool msm_fence_completed(struct msm_fence_context *fctx, uint32_t fence); 82 void msm_update_fence(struct msm_fence_context *fctx, uint32_t fence); 85 void msm_fence_init(struct dma_fence *fence, struct msm_fence_context *fctx);
|
H A D | msm_ringbuffer.c | 17 struct msm_fence_context *fctx = submit->ring->fctx; in msm_job_run() local 23 msm_fence_init(submit->hw_fence, fctx); in msm_job_run() 122 ring->fctx = msm_fence_context_alloc(gpu->dev, &ring->memptrs->fence, name); in msm_ringbuffer_new() 138 msm_fence_context_free(ring->fctx); in msm_ringbuffer_destroy()
|
H A D | msm_ringbuffer.h | 74 struct msm_fence_context *fctx; member
|
/linux/drivers/gpu/drm/nouveau/ |
H A D | nouveau_fence.c | 57 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); in nouveau_fence_signal() local 59 if (!--fctx->notify_ref) in nouveau_fence_signal() 78 nouveau_fence_context_kill(struct nouveau_fence_chan *fctx, int error) in nouveau_fence_context_kill() argument 83 spin_lock_irqsave(&fctx->lock, flags); in nouveau_fence_context_kill() 84 list_for_each_entry_safe(fence, tmp, &fctx->pending, head) { in nouveau_fence_context_kill() 89 nvif_event_block(&fctx->event); in nouveau_fence_context_kill() 91 fctx->killed = 1; in nouveau_fence_context_kill() 92 spin_unlock_irqrestore(&fctx->lock, flags); in nouveau_fence_context_kill() 96 nouveau_fence_context_del(struct nouveau_fence_chan *fctx) in nouveau_fence_context_del() argument 98 cancel_work_sync(&fctx in nouveau_fence_context_del() 117 nouveau_fence_context_free(struct nouveau_fence_chan * fctx) nouveau_fence_context_free() argument 123 nouveau_fence_update(struct nouveau_channel * chan,struct nouveau_fence_chan * fctx) nouveau_fence_update() argument 144 struct nouveau_fence_chan *fctx = container_of(work, struct nouveau_fence_chan, nouveau_fence_uevent_work() local 162 struct nouveau_fence_chan *fctx = container_of(event, typeof(*fctx), event); nouveau_fence_wait_uevent_handler() local 168 nouveau_fence_context_new(struct nouveau_channel * chan,struct nouveau_fence_chan * fctx) nouveau_fence_context_new() argument 210 struct nouveau_fence_chan *fctx = chan->fence; nouveau_fence_emit() local 246 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_cancel() local 261 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_done() local 358 struct nouveau_fence_chan *fctx = chan->fence; nouveau_fence_sync() local 461 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_get_timeline_name() local 475 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_is_signaled() local 516 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_release() local 534 struct nouveau_fence_chan *fctx = nouveau_fctx(fence); nouveau_fence_enable_signaling() local [all...] |
H A D | nv84_fence.c | 89 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_emit() local 90 u64 addr = fctx->vma->addr + nv84_fence_chid(chan) * 16; in nv84_fence_emit() 92 return fctx->base.emit32(chan, addr, fence->base.seqno); in nv84_fence_emit() 99 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_sync() local 100 u64 addr = fctx->vma->addr + nv84_fence_chid(prev) * 16; in nv84_fence_sync() 102 return fctx->base.sync32(chan, addr, fence->base.seqno); in nv84_fence_sync() 116 struct nv84_fence_chan *fctx = chan->fence; in nv84_fence_context_del() local 118 nouveau_bo_wr32(priv->bo, nv84_fence_chid(chan) * 16 / 4, fctx->base.sequence); in nv84_fence_context_del() 120 nouveau_vma_del(&fctx->vma); in nv84_fence_context_del() 122 nouveau_fence_context_del(&fctx in nv84_fence_context_del() 131 struct nv84_fence_chan *fctx; nv84_fence_context_new() local [all...] |
H A D | nv10_fence.c | 61 struct nv10_fence_chan *fctx = chan->fence; in nv10_fence_context_del() local 62 nouveau_fence_context_del(&fctx->base); in nv10_fence_context_del() 63 nvif_object_dtor(&fctx->sema); in nv10_fence_context_del() 65 nouveau_fence_context_free(&fctx->base); in nv10_fence_context_del() 71 struct nv10_fence_chan *fctx; in nv10_fence_context_new() local 73 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv10_fence_context_new() 74 if (!fctx) in nv10_fence_context_new() 77 nouveau_fence_context_new(chan, &fctx->base); in nv10_fence_context_new() 78 fctx in nv10_fence_context_new() [all...] |
H A D | nv04_fence.c | 70 struct nv04_fence_chan *fctx = chan->fence; in nv04_fence_context_del() local 71 nouveau_fence_context_del(&fctx->base); in nv04_fence_context_del() 73 nouveau_fence_context_free(&fctx->base); in nv04_fence_context_del() 79 struct nv04_fence_chan *fctx = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv04_fence_context_new() local 80 if (fctx) { in nv04_fence_context_new() 81 nouveau_fence_context_new(chan, &fctx->base); in nv04_fence_context_new() 82 fctx->base.emit = nv04_fence_emit; in nv04_fence_context_new() 83 fctx->base.sync = nv04_fence_sync; in nv04_fence_context_new() 84 fctx in nv04_fence_context_new() [all...] |
H A D | nv17_fence.c | 41 struct nv10_fence_chan *fctx = chan->fence; in nv17_fence_sync() local 57 PUSH_MTHD(ppush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle, in nv17_fence_sync() 65 PUSH_MTHD(npush, NV176E, SET_CONTEXT_DMA_SEMAPHORE, fctx->sema.handle, in nv17_fence_sync() 81 struct nv10_fence_chan *fctx; in nv17_fence_context_new() local 86 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv17_fence_context_new() 87 if (!fctx) in nv17_fence_context_new() 90 nouveau_fence_context_new(chan, &fctx->base); in nv17_fence_context_new() 91 fctx->base.emit = nv10_fence_emit; in nv17_fence_context_new() 92 fctx in nv17_fence_context_new() [all...] |
H A D | nv50_fence.c | 39 struct nv10_fence_chan *fctx; in nv50_fence_context_new() local 45 fctx = chan->fence = kzalloc(sizeof(*fctx), GFP_KERNEL); in nv50_fence_context_new() 46 if (!fctx) in nv50_fence_context_new() 49 nouveau_fence_context_new(chan, &fctx->base); in nv50_fence_context_new() 50 fctx->base.emit = nv10_fence_emit; in nv50_fence_context_new() 51 fctx->base.read = nv10_fence_read; in nv50_fence_context_new() 52 fctx->base.sync = nv17_fence_sync; in nv50_fence_context_new() 62 &fctx->sema); in nv50_fence_context_new()
|
H A D | gv100_fence.c | 67 struct nv84_fence_chan *fctx; in gv100_fence_context_new() local 74 fctx = chan->fence; in gv100_fence_context_new() 75 fctx->base.emit32 = gv100_fence_emit32; in gv100_fence_context_new() 76 fctx->base.sync32 = gv100_fence_sync32; in gv100_fence_context_new()
|
H A D | nvc0_fence.c | 82 struct nv84_fence_chan *fctx = chan->fence; in nvc0_fence_context_new() local 83 fctx->base.emit32 = nvc0_fence_emit32; in nvc0_fence_context_new() 84 fctx->base.sync32 = nvc0_fence_sync32; in nvc0_fence_context_new()
|
/linux/drivers/crypto/cavium/nitrox/ |
H A D | nitrox_aead.c | 38 struct flexi_crypto_context *fctx; in nitrox_aes_gcm_setkey() local 46 fctx = nctx->u.fctx; in nitrox_aes_gcm_setkey() 47 flags.fu = be64_to_cpu(fctx->flags.f); in nitrox_aes_gcm_setkey() 49 fctx->flags.f = cpu_to_be64(flags.fu); in nitrox_aes_gcm_setkey() 52 memset(&fctx->crypto, 0, sizeof(fctx->crypto)); in nitrox_aes_gcm_setkey() 53 memcpy(fctx->crypto.u.key, key, keylen); in nitrox_aes_gcm_setkey() 62 struct flexi_crypto_context *fctx = nctx->u.fctx; in nitrox_aead_setauthsize() local 219 struct flexi_crypto_context *fctx = nctx->u.fctx; nitrox_aes_gcm_enc() local 253 struct flexi_crypto_context *fctx = nctx->u.fctx; nitrox_aes_gcm_dec() local 347 struct flexi_crypto_context *fctx = nctx->u.fctx; nitrox_aead_exit() local 363 struct flexi_crypto_context *fctx = nctx->u.fctx; nitrox_rfc4106_setkey() local [all...] |
H A D | nitrox_skcipher.c | 153 struct flexi_crypto_context *fctx = nctx->u.fctx; in nitrox_skcipher_exit() local 155 memzero_explicit(&fctx->crypto, sizeof(struct crypto_keys)); in nitrox_skcipher_exit() 156 memzero_explicit(&fctx->auth, sizeof(struct auth_keys)); in nitrox_skcipher_exit() 171 struct flexi_crypto_context *fctx; in nitrox_skcipher_setkey() local 184 fctx = nctx->u.fctx; in nitrox_skcipher_setkey() 185 flags = &fctx->flags; in nitrox_skcipher_setkey() 192 memcpy(fctx->crypto.u.key, key, keylen); in nitrox_skcipher_setkey() 341 struct flexi_crypto_context *fctx; in nitrox_aes_xts_setkey() local 365 struct flexi_crypto_context *fctx; nitrox_aes_ctr_rfc3686_setkey() local [all...] |
H A D | nitrox_req.h | 206 struct flexi_crypto_context *fctx; member
|
/linux/drivers/crypto/marvell/octeontx/ |
H A D | otx_cptvf_algs.c | 108 if (memcmp(rctx->fctx.hmac.s.hmac_calc, in validate_hmac_cipher_null() 109 rctx->fctx.hmac.s.hmac_recv, in validate_hmac_cipher_null() 241 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; in create_ctx_hdr() local 273 fctx->enc.enc_ctrl.e.enc_cipher = ctx->cipher_type; in create_ctx_hdr() 274 fctx->enc.enc_ctrl.e.aes_key = ctx->key_type; in create_ctx_hdr() 275 fctx->enc.enc_ctrl.e.iv_source = OTX_CPT_FROM_CPTR; in create_ctx_hdr() 278 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len * 2); in create_ctx_hdr() 280 memcpy(fctx->enc.encr_key, ctx->enc_key, ctx->key_len); in create_ctx_hdr() 282 memcpy(fctx in create_ctx_hdr() 892 struct otx_cpt_fc_ctx *fctx = &rctx->fctx; create_aead_ctx_hdr() local [all...] |
H A D | otx_cptvf_algs.h | 162 struct otx_cpt_fc_ctx fctx; member
|
/linux/security/apparmor/ |
H A D | file.c | 457 static void update_file_ctx(struct aa_file_ctx *fctx, struct aa_label *label, in update_file_ctx() argument 463 spin_lock(&fctx->lock); in update_file_ctx() 464 old = rcu_dereference_protected(fctx->label, in update_file_ctx() 465 lockdep_is_held(&fctx->lock)); in update_file_ctx() 469 rcu_assign_pointer(fctx->label, l); in update_file_ctx() 473 fctx->allow |= request; in update_file_ctx() 475 spin_unlock(&fctx->lock); in update_file_ctx() 608 struct aa_file_ctx *fctx; in aa_file_perm() local 616 fctx = file_ctx(file); in aa_file_perm() 619 flabel = rcu_dereference(fctx in aa_file_perm() [all...] |
/linux/fs/ |
H A D | userfaultfd.c | 619 struct userfaultfd_fork_ctx *fctx; in dup_userfaultfd() local 630 list_for_each_entry(fctx, fcs, list) in dup_userfaultfd() 631 if (fctx->orig == octx) { in dup_userfaultfd() 632 ctx = fctx->new; in dup_userfaultfd() 637 fctx = kmalloc(sizeof(*fctx), GFP_KERNEL); in dup_userfaultfd() 638 if (!fctx) in dup_userfaultfd() 643 kfree(fctx); in dup_userfaultfd() 660 fctx->orig = octx; in dup_userfaultfd() 661 fctx in dup_userfaultfd() 669 dup_fctx(struct userfaultfd_fork_ctx * fctx) dup_fctx() argument 684 struct userfaultfd_fork_ctx *fctx, *n; dup_userfaultfd_complete() local 695 struct userfaultfd_fork_ctx *fctx, *n; dup_userfaultfd_fail() local [all...] |
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | nv04.c | 45 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_chan_stop() local 63 nvkm_kmap(fctx); in nv04_chan_stop() 68 u32 cv = (nvkm_ro32(fctx, c->ctxp + data) & ~cm); in nv04_chan_stop() 69 nvkm_wo32(fctx, c->ctxp + data, cv | (rv << c->ctxs)); in nv04_chan_stop() 71 nvkm_done(fctx); in nv04_chan_stop()
|
/linux/drivers/crypto/cavium/cpt/ |
H A D | cptvf_algs.h | 113 struct fc_context fctx; member
|
/linux/drivers/gpu/drm/msm/adreno/ |
H A D | adreno_gpu.c | 679 ring->memptrs->bv_fence = ring->fctx->completed_fence; in adreno_hw_init() 685 if (fence_before(ring->fctx->last_fence, ring->memptrs->fence)) { in adreno_hw_init() 686 ring->memptrs->fence = ring->fctx->last_fence; in adreno_hw_init() 777 state->ring[i].seqno = gpu->rb[i]->fctx->last_fence; in adreno_gpu_state_get() 1051 ring->fctx->last_fence); in adreno_dump_info()
|
/linux/drivers/block/ |
H A D | rbd.c | 2499 struct rbd_img_fill_ctx *fctx) in rbd_img_fill_request_nocopy() argument 2504 img_req->data_type = fctx->pos_type; in rbd_img_fill_request_nocopy() 2510 fctx->iter = *fctx->pos; in rbd_img_fill_request_nocopy() 2517 fctx->set_pos_fn, &fctx->iter); in rbd_img_fill_request_nocopy() 2530 * @fctx->pos data buffer. 2534 * different chunks of @fctx->pos data buffer. 2536 * @fctx->pos data buffer is assumed to be large enough. 2541 struct rbd_img_fill_ctx *fctx) in rbd_img_fill_request() argument 2605 struct rbd_img_fill_ctx fctx = { rbd_img_fill_nodata() local 2655 struct rbd_img_fill_ctx fctx = { __rbd_img_fill_from_bio() local 2715 struct rbd_img_fill_ctx fctx = { __rbd_img_fill_from_bvecs() local [all...] |
/linux/drivers/gpu/drm/nouveau/dispnv04/ |
H A D | crtc.c | 1057 struct nouveau_fence_chan *fctx = chan->fence; in nv04_finish_page_flip() local 1065 if (list_empty(&fctx->flip)) { in nv04_finish_page_flip() 1071 s = list_first_entry(&fctx->flip, struct nv04_page_flip_state, head); in nv04_finish_page_flip() 1113 struct nouveau_fence_chan *fctx = chan->fence; in nv04_page_flip_emit() local 1122 list_add_tail(&s->head, &fctx->flip); in nv04_page_flip_emit()
|
/linux/fs/fuse/ |
H A D | dir.c | 467 struct fuse_secctx *fctx; in get_security_context() local 490 total_len += FUSE_REC_ALIGN(sizeof(*fctx) + namelen + in get_security_context() 503 fctx = ptr; in get_security_context() 504 fctx->size = lsmctx.len; in get_security_context() 505 ptr += sizeof(*fctx); in get_security_context()
|