Home
last modified time | relevance | path

Searched refs:pt (Results 1 – 25 of 368) sorted by relevance

12345678910>>...15

/linux/tools/perf/util/
H A Dintel-pt.c183 struct intel_pt *pt; member
233 static void intel_pt_dump(struct intel_pt *pt __maybe_unused, in intel_pt_dump()
273 static void intel_pt_dump_event(struct intel_pt *pt, unsigned char *buf, in intel_pt_dump_event() argument
277 intel_pt_dump(pt, buf, len); in intel_pt_dump_event()
293 struct intel_pt *pt = container_of(session->auxtrace, struct intel_pt, in intel_pt_dump_sample() local
297 intel_pt_dump(pt, sample->aux_sample.data, sample->aux_sample.size); in intel_pt_dump_sample()
300 static bool intel_pt_log_events(struct intel_pt *pt, u64 tm) in intel_pt_log_events() argument
302 struct perf_time_interval *range = pt->synth_opts.ptime_range; in intel_pt_log_events()
303 int n = pt->synth_opts.range_num; in intel_pt_log_events()
305 if (pt->synth_opts.log_plus_flags & AUXTRACE_LOG_FLG_ALL_PERF_EVTS) in intel_pt_log_events()
[all …]
/linux/drivers/dma/amd/ptdma/
H A Dptdma-pci.c38 struct pt_device *pt; in pt_alloc_struct() local
40 pt = devm_kzalloc(dev, sizeof(*pt), GFP_KERNEL); in pt_alloc_struct()
42 if (!pt) in pt_alloc_struct()
44 pt->dev = dev; in pt_alloc_struct()
46 INIT_LIST_HEAD(&pt->cmd); in pt_alloc_struct()
48 return pt; in pt_alloc_struct()
51 static int pt_get_msix_irqs(struct pt_device *pt) in pt_get_msix_irqs() argument
53 struct pt_msix *pt_msix = pt->pt_msix; in pt_get_msix_irqs()
54 struct device *dev = pt->dev; in pt_get_msix_irqs()
66 pt->pt_irq = pt_msix->msix_entry.vector; in pt_get_msix_irqs()
[all …]
H A Dptdma-dev.c104 struct pt_device *pt = container_of(cmd_q, struct pt_device, cmd_q); in pt_core_perform_passthru() local
117 pt_core_enable_queue_interrupts(pt); in pt_core_perform_passthru()
119 pt_core_disable_queue_interrupts(pt); in pt_core_perform_passthru()
128 struct pt_cmd_queue *cmd_q = &cmd->pt->cmd_q; in pt_do_cmd_complete()
137 pt_log_error(cmd_q->pt, cmd_q->cmd_error); in pt_do_cmd_complete()
144 void pt_check_status_trans(struct pt_device *pt, struct pt_cmd_queue *cmd_q) in pt_check_status_trans() argument
160 pt_do_cmd_complete((ulong)&pt->tdata); in pt_check_status_trans()
166 struct pt_device *pt = data; in pt_core_irq_handler() local
167 struct pt_cmd_queue *cmd_q = &pt->cmd_q; in pt_core_irq_handler()
169 pt_core_disable_queue_interrupts(pt); in pt_core_irq_handler()
[all …]
H A Dptdma-dmaengine.c60 ae4_log_error(cmd_q->pt, cmd_q->cmd_error); in ae4_check_status_error()
92 struct pt_device *pt = desc->pt; in pt_do_cleanup() local
94 kmem_cache_free(pt->dma_desc_cache, desc); in pt_do_cleanup()
97 static struct pt_cmd_queue *pt_get_cmd_queue(struct pt_device *pt, struct pt_dma_chan *chan) in pt_get_cmd_queue() argument
103 if (pt->ver == AE4_DMA_VERSION) { in pt_get_cmd_queue()
104 ae4 = container_of(pt, struct ae4_device, pt); in pt_get_cmd_queue()
108 cmd_q = &pt->cmd_q; in pt_get_cmd_queue()
164 struct pt_device *pt; in pt_dma_start_desc() local
171 pt = pt_cmd->pt; in pt_dma_start_desc()
173 cmd_q = pt_get_cmd_queue(pt, chan); in pt_dma_start_desc()
[all …]
H A Dptdma-debugfs.c26 struct pt_device *pt = s->private; in pt_debugfs_info_show() local
30 seq_printf(s, "Device name: %s\n", dev_name(pt->dev)); in pt_debugfs_info_show()
32 if (pt->ver == AE4_DMA_VERSION) { in pt_debugfs_info_show()
33 ae4 = container_of(pt, struct ae4_device, pt); in pt_debugfs_info_show()
38 seq_printf(s, " # Cmds: %d\n", pt->cmd_count); in pt_debugfs_info_show()
41 regval = ioread32(pt->io_regs + CMD_PT_VERSION); in pt_debugfs_info_show()
57 struct pt_device *pt = s->private; in pt_debugfs_stats_show() local
59 seq_printf(s, "Total Interrupts Handled: %ld\n", pt->total_interrupts); in pt_debugfs_stats_show()
67 struct pt_device *pt; in pt_debugfs_queue_show() local
75 pt = cmd_q->pt; in pt_debugfs_queue_show()
[all …]
/linux/sound/i2c/other/
H A Dpt2258.c32 int snd_pt2258_reset(struct snd_pt2258 *pt) in snd_pt2258_reset() argument
39 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset()
40 if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) in snd_pt2258_reset()
42 snd_i2c_unlock(pt->i2c_bus); in snd_pt2258_reset()
45 pt->mute = 1; in snd_pt2258_reset()
47 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset()
48 if (snd_i2c_sendbytes(pt->i2c_dev, bytes, 1) != 1) in snd_pt2258_reset()
50 snd_i2c_unlock(pt->i2c_bus); in snd_pt2258_reset()
54 pt->volume[i] = 0; in snd_pt2258_reset()
57 snd_i2c_lock(pt->i2c_bus); in snd_pt2258_reset()
[all …]
/linux/drivers/gpu/drm/i915/gt/
H A Dintel_ppgtt.c18 struct i915_page_table *pt; in alloc_pt() local
20 pt = kmalloc(sizeof(*pt), I915_GFP_ALLOW_FAIL); in alloc_pt()
21 if (unlikely(!pt)) in alloc_pt()
24 pt->base = vm->alloc_pt_dma(vm, sz); in alloc_pt()
25 if (IS_ERR(pt->base)) { in alloc_pt()
26 kfree(pt); in alloc_pt()
30 pt->is_compact = false; in alloc_pt()
31 atomic_set(&pt->used, 0); in alloc_pt()
32 return pt; in alloc_pt()
61 pd->pt.base = vm->alloc_pt_dma(vm, I915_GTT_PAGE_SIZE_4K); in alloc_pd()
[all …]
H A Dgen6_ppgtt.c21 const struct i915_page_table *pt) in gen6_write_pde() argument
23 dma_addr_t addr = pt ? px_dma(pt) : px_dma(ppgtt->base.vm.scratch[1]); in gen6_write_pde()
85 struct i915_page_table * const pt = in gen6_ppgtt_clear_range() local
92 GEM_BUG_ON(count > atomic_read(&pt->used)); in gen6_ppgtt_clear_range()
93 if (!atomic_sub_return(count, &pt->used)) in gen6_ppgtt_clear_range()
103 vaddr = px_vaddr(pt); in gen6_ppgtt_clear_range()
153 struct i915_page_table *pt; in gen6_flush_pd() local
161 gen6_for_each_pde(pt, pd, start, end, pde) in gen6_flush_pd()
162 gen6_write_pde(ppgtt, pde, pt); in gen6_flush_pd()
178 struct i915_page_table *pt; in gen6_alloc_va_range() local
[all …]
/linux/tools/testing/selftests/bpf/progs/
H A Dkfunc_call_fail.c41 struct prog_test_ref_kfunc *pt; in kfunc_call_test_get_mem_fail_rdonly() local
46 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_get_mem_fail_rdonly()
47 if (pt) { in kfunc_call_test_get_mem_fail_rdonly()
48 p = bpf_kfunc_call_test_get_rdonly_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem_fail_rdonly()
54 bpf_kfunc_call_test_release(pt); in kfunc_call_test_get_mem_fail_rdonly()
62 struct prog_test_ref_kfunc *pt; in kfunc_call_test_get_mem_fail_use_after_free() local
67 pt = bpf_kfunc_call_test_acquire(&s); in kfunc_call_test_get_mem_fail_use_after_free()
68 if (pt) { in kfunc_call_test_get_mem_fail_use_after_free()
69 p = bpf_kfunc_call_test_get_rdwr_mem(pt, 2 * sizeof(int)); in kfunc_call_test_get_mem_fail_use_after_free()
77 bpf_kfunc_call_test_release(pt); in kfunc_call_test_get_mem_fail_use_after_free()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/mmu/
H A Dbase.c34 struct nvkm_mmu_pt *pt; member
42 nvkm_mmu_ptp_put(struct nvkm_mmu *mmu, bool force, struct nvkm_mmu_pt *pt) in nvkm_mmu_ptp_put() argument
44 const int slot = pt->base >> pt->ptp->shift; in nvkm_mmu_ptp_put()
45 struct nvkm_mmu_ptp *ptp = pt->ptp; in nvkm_mmu_ptp_put()
56 nvkm_mmu_ptc_put(mmu, force, &ptp->pt); in nvkm_mmu_ptp_put()
61 kfree(pt); in nvkm_mmu_ptp_put()
67 struct nvkm_mmu_pt *pt; in nvkm_mmu_ptp_get() local
71 if (!(pt = kzalloc(sizeof(*pt), GFP_KERNEL))) in nvkm_mmu_ptp_get()
78 kfree(pt); in nvkm_mmu_ptp_get()
82 ptp->pt = nvkm_mmu_ptc_get(mmu, 0x1000, 0x1000, false); in nvkm_mmu_ptp_get()
[all …]
H A Dvmmnv44.c27 nv44_vmm_pgt_fill(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_fill() argument
33 tmp[0] = nvkm_ro32(pt->memory, pteo + 0x0); in nv44_vmm_pgt_fill()
34 tmp[1] = nvkm_ro32(pt->memory, pteo + 0x4); in nv44_vmm_pgt_fill()
35 tmp[2] = nvkm_ro32(pt->memory, pteo + 0x8); in nv44_vmm_pgt_fill()
36 tmp[3] = nvkm_ro32(pt->memory, pteo + 0xc); in nv44_vmm_pgt_fill()
66 VMM_WO032(pt, vmm, pteo + 0x0, tmp[0]); in nv44_vmm_pgt_fill()
67 VMM_WO032(pt, vmm, pteo + 0x4, tmp[1]); in nv44_vmm_pgt_fill()
68 VMM_WO032(pt, vmm, pteo + 0x8, tmp[2]); in nv44_vmm_pgt_fill()
69 VMM_WO032(pt, vmm, pteo + 0xc, tmp[3] | 0x40000000); in nv44_vmm_pgt_fill()
73 nv44_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, in nv44_vmm_pgt_pte() argument
[all …]
H A Dvmmgp100.c35 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_unmap() argument
40 nvkm_kmap(pt->memory); in gp100_vmm_pfn_unmap()
42 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_unmap()
43 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_unmap()
51 nvkm_done(pt->memory); in gp100_vmm_pfn_unmap()
56 struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes) in gp100_vmm_pfn_clear() argument
59 nvkm_kmap(pt->memory); in gp100_vmm_pfn_clear()
61 u32 datalo = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 0); in gp100_vmm_pfn_clear()
62 u32 datahi = nvkm_ro32(pt->memory, pt->base + ptei * 8 + 4); in gp100_vmm_pfn_clear()
65 VMM_WO064(pt, vmm, ptei * 8, data & ~BIT_ULL(0)); in gp100_vmm_pfn_clear()
[all …]
H A Dvmmgh100.c13 gh100_vmm_pgt_pte(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, in gh100_vmm_pgt_pte() argument
19 VMM_WO064(pt, vmm, ptei++ * NV_MMU_VER3_PTE__SIZE, data); in gh100_vmm_pgt_pte()
25 gh100_vmm_pgt_sgl(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, in gh100_vmm_pgt_sgl() argument
28 VMM_MAP_ITER_SGL(vmm, pt, ptei, ptes, map, gh100_vmm_pgt_pte); in gh100_vmm_pgt_sgl()
32 gh100_vmm_pgt_dma(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, in gh100_vmm_pgt_dma() argument
38 nvkm_kmap(pt->memory); in gh100_vmm_pgt_dma()
42 VMM_WO064(pt, vmm, ptei++ * NV_MMU_VER3_PTE__SIZE, data); in gh100_vmm_pgt_dma()
44 nvkm_done(pt->memory); in gh100_vmm_pgt_dma()
48 VMM_MAP_ITER_DMA(vmm, pt, ptei, ptes, map, gh100_vmm_pgt_pte); in gh100_vmm_pgt_dma()
52 gh100_vmm_pgt_mem(struct nvkm_vmm *vmm, struct nvkm_mmu_pt *pt, u32 ptei, u32 ptes, in gh100_vmm_pgt_mem() argument
[all …]
/linux/arch/sparc/lib/
H A DMemcpy_utils.S20 ba,pt %xcc, __restore_asi
24 ba,pt %xcc, __restore_asi
28 ba,pt %xcc, __restore_asi
32 ba,pt %xcc, __restore_asi
36 ba,pt %xcc, __restore_asi
40 ba,pt %xcc, __restore_asi
44 ba,pt %xcc, __restore_asi
48 ba,pt %xcc, __restore_asi
52 ba,pt %xcc, __restore_asi
57 ba,pt %xcc, __restore_asi
[all …]
/linux/drivers/gpu/drm/i915/selftests/
H A Dscatterlist.c41 static noinline int expect_pfn_sg(struct pfn_table *pt, in expect_pfn_sg() argument
50 pfn = pt->start; in expect_pfn_sg()
51 for_each_sg(pt->st.sgl, sg, pt->st.nents, n) { in expect_pfn_sg()
53 unsigned int npages = npages_fn(n, pt->st.nents, rnd); in expect_pfn_sg()
72 if (pfn != pt->end) { in expect_pfn_sg()
74 __func__, who, pt->end, pfn); in expect_pfn_sg()
81 static noinline int expect_pfn_sg_page_iter(struct pfn_table *pt, in expect_pfn_sg_page_iter() argument
88 pfn = pt->start; in expect_pfn_sg_page_iter()
89 for_each_sg_page(pt->st.sgl, &sgiter, pt->st.nents, 0) { in expect_pfn_sg_page_iter()
103 if (pfn != pt->end) { in expect_pfn_sg_page_iter()
[all …]
/linux/drivers/hv/
H A Dmshv_eventfd.c223 struct mshv_partition *pt = rp->rsmplr_partn; in mshv_irqfd_resampler_shutdown() local
225 mutex_lock(&pt->irqfds_resampler_lock); in mshv_irqfd_resampler_shutdown()
228 synchronize_srcu(&pt->pt_irq_srcu); in mshv_irqfd_resampler_shutdown()
232 mshv_unregister_irq_ack_notifier(pt, &rp->rsmplr_notifier); in mshv_irqfd_resampler_shutdown()
236 mutex_unlock(&pt->irqfds_resampler_lock); in mshv_irqfd_resampler_shutdown()
297 struct mshv_partition *pt = irqfd->irqfd_partn; in mshv_irqfd_wakeup() local
304 idx = srcu_read_lock(&pt->pt_irq_srcu); in mshv_irqfd_wakeup()
314 srcu_read_unlock(&pt->pt_irq_srcu, idx); in mshv_irqfd_wakeup()
323 spin_lock_irqsave(&pt->pt_irqfds_lock, flags); in mshv_irqfd_wakeup()
337 spin_unlock_irqrestore(&pt->pt_irqfds_lock, flags); in mshv_irqfd_wakeup()
[all …]
/linux/drivers/dma-buf/
H A Dsw_sync.c154 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_release() local
159 if (!list_empty(&pt->link)) { in timeline_fence_release()
160 list_del(&pt->link); in timeline_fence_release()
161 rb_erase(&pt->node, &parent->pt_tree); in timeline_fence_release()
178 struct sync_pt *pt = dma_fence_to_sync_pt(fence); in timeline_fence_set_deadline() local
183 if (ktime_before(deadline, pt->deadline)) in timeline_fence_set_deadline()
184 pt->deadline = deadline; in timeline_fence_set_deadline()
186 pt->deadline = deadline; in timeline_fence_set_deadline()
211 struct sync_pt *pt, *next; in sync_timeline_signal() local
219 list_for_each_entry_safe(pt, next, &obj->pt_list, link) { in sync_timeline_signal()
[all …]
/linux/drivers/gpu/drm/gma500/
H A Dmmu.c224 static void psb_mmu_free_pt(struct psb_mmu_pt *pt) in psb_mmu_free_pt() argument
226 __free_page(pt->p); in psb_mmu_free_pt()
227 kfree(pt); in psb_mmu_free_pt()
235 struct psb_mmu_pt *pt; in psb_mmu_free_pagedir() local
248 pt = pd->tables[i]; in psb_mmu_free_pagedir()
249 if (pt) in psb_mmu_free_pagedir()
250 psb_mmu_free_pt(pt); in psb_mmu_free_pagedir()
263 struct psb_mmu_pt *pt = kmalloc(sizeof(*pt), GFP_KERNEL); in psb_mmu_alloc_pt() local
272 if (!pt) in psb_mmu_alloc_pt()
275 pt->p = alloc_page(GFP_DMA32); in psb_mmu_alloc_pt()
[all …]
/linux/lib/
H A Dnlattr.c119 void nla_get_range_unsigned(const struct nla_policy *pt, in nla_get_range_unsigned() argument
122 WARN_ON_ONCE(pt->validation_type != NLA_VALIDATE_RANGE_PTR && in nla_get_range_unsigned()
123 (pt->min < 0 || pt->max < 0)); in nla_get_range_unsigned()
127 switch (pt->type) { in nla_get_range_unsigned()
150 switch (pt->validation_type) { in nla_get_range_unsigned()
153 range->min = pt->min; in nla_get_range_unsigned()
154 range->max = pt->max; in nla_get_range_unsigned()
157 *range = *pt->range; in nla_get_range_unsigned()
160 range->min = pt->min; in nla_get_range_unsigned()
163 range->max = pt->max; in nla_get_range_unsigned()
[all …]
/linux/drivers/gpu/drm/xe/
H A Dxe_lmtt.c60 struct xe_lmtt_pt *pt; in lmtt_pt_alloc() local
64 pt = kzalloc(struct_size(pt, entries, num_entries), GFP_KERNEL); in lmtt_pt_alloc()
65 if (!pt) { in lmtt_pt_alloc()
86 pt->level = level; in lmtt_pt_alloc()
87 pt->bo = bo; in lmtt_pt_alloc()
88 return pt; in lmtt_pt_alloc()
91 kfree(pt); in lmtt_pt_alloc()
96 static void lmtt_pt_free(struct xe_lmtt_pt *pt) in lmtt_pt_free() argument
98 lmtt_debug(&pt->bo->tile->sriov.pf.lmtt, "level=%u addr=%llx\n", in lmtt_pt_free()
99 pt->level, (u64)xe_bo_main_addr(pt->bo, XE_PAGE_SIZE)); in lmtt_pt_free()
[all …]
/linux/net/netlink/
H A Dpolicy.c210 int netlink_policy_dump_attr_size_estimate(const struct nla_policy *pt) in netlink_policy_dump_attr_size_estimate() argument
215 switch (pt->type) { in netlink_policy_dump_attr_size_estimate()
256 const struct nla_policy *pt, in __netlink_policy_dump_write_attr() argument
259 int estimate = netlink_policy_dump_attr_size_estimate(pt); in __netlink_policy_dump_write_attr()
267 switch (pt->type) { in __netlink_policy_dump_write_attr()
278 if (pt->type == NLA_NESTED_ARRAY) in __netlink_policy_dump_write_attr()
280 if (state && pt->nested_policy && pt->len && in __netlink_policy_dump_write_attr()
283 pt->nested_policy, in __netlink_policy_dump_write_attr()
284 pt->len)) || in __netlink_policy_dump_write_attr()
286 pt->len))) in __netlink_policy_dump_write_attr()
[all …]
/linux/arch/alpha/kernel/
H A Dprocess.c286 dump_elf_thread(elf_greg_t *dest, struct pt_regs *pt, struct thread_info *ti) in dump_elf_thread() argument
289 struct switch_stack * sw = ((struct switch_stack *) pt) - 1; in dump_elf_thread()
291 dest[ 0] = pt->r0; in dump_elf_thread()
292 dest[ 1] = pt->r1; in dump_elf_thread()
293 dest[ 2] = pt->r2; in dump_elf_thread()
294 dest[ 3] = pt->r3; in dump_elf_thread()
295 dest[ 4] = pt->r4; in dump_elf_thread()
296 dest[ 5] = pt->r5; in dump_elf_thread()
297 dest[ 6] = pt->r6; in dump_elf_thread()
298 dest[ 7] = pt->r7; in dump_elf_thread()
[all …]
/linux/arch/x86/events/intel/
H A Dpt.c32 static DEFINE_PER_CPU(struct pt, pt_ctx);
108 PMU_FORMAT_ATTR(pt, "config:0" );
420 struct pt *pt = this_cpu_ptr(&pt_ctx); in pt_config_start() local
427 if (READ_ONCE(pt->vmx_on)) in pt_config_start()
428 perf_aux_output_flag(&pt->handle, PERF_AUX_FLAG_PARTIAL); in pt_config_start()
466 struct pt *pt = this_cpu_ptr(&pt_ctx); in pt_config_filters() local
488 if (pt->filters.filter[range].msr_a != filter->msr_a) { in pt_config_filters()
490 pt->filters.filter[range].msr_a = filter->msr_a; in pt_config_filters()
493 if (pt->filters.filter[range].msr_b != filter->msr_b) { in pt_config_filters()
495 pt->filters.filter[range].msr_b = filter->msr_b; in pt_config_filters()
[all …]
/linux/net/core/
H A Dnet-procfs.c176 struct packet_type *pt = NULL; in ptype_get_idx() local
183 list_for_each_entry_rcu(pt, ptype_list, list) { in ptype_get_idx()
185 return pt; in ptype_get_idx()
190 list_for_each_entry_rcu(pt, &seq_file_net(seq)->ptype_all, list) { in ptype_get_idx()
192 return pt; in ptype_get_idx()
196 list_for_each_entry_rcu(pt, &seq_file_net(seq)->ptype_specific, list) { in ptype_get_idx()
198 return pt; in ptype_get_idx()
203 list_for_each_entry_rcu(pt, &ptype_base[t], list) { in ptype_get_idx()
205 return pt; in ptype_get_idx()
223 struct packet_type *pt; in ptype_seq_next() local
[all …]
/linux/arch/x86/kernel/
H A Dvm86_32.c79 #define AL(regs) (((unsigned char *)&((regs)->pt.ax))[0])
80 #define AH(regs) (((unsigned char *)&((regs)->pt.ax))[1])
81 #define IP(regs) (*(unsigned short *)&((regs)->pt.ip))
82 #define SP(regs) (*(unsigned short *)&((regs)->pt.sp))
111 set_flags(regs->pt.flags, VEFLAGS, X86_EFLAGS_VIF | vm86->veflags_mask); in save_v86_state()
119 unsafe_put_user(regs->pt.bx, &user->regs.ebx, Efault_end); in save_v86_state()
120 unsafe_put_user(regs->pt.cx, &user->regs.ecx, Efault_end); in save_v86_state()
121 unsafe_put_user(regs->pt.dx, &user->regs.edx, Efault_end); in save_v86_state()
122 unsafe_put_user(regs->pt.si, &user->regs.esi, Efault_end); in save_v86_state()
123 unsafe_put_user(regs->pt.di, &user->regs.edi, Efault_end); in save_v86_state()
[all …]

12345678910>>...15