Lines Matching full:ea

53 extern int do_lq(unsigned long ea, unsigned long *regs);
54 extern int do_stq(unsigned long ea, unsigned long val0, unsigned long val1);
55 extern int do_lqarx(unsigned long ea, unsigned long *regs);
56 extern int do_stqcx(unsigned long ea, unsigned long val0, unsigned long val1,
107 unsigned long ea, int nb) in address_ok() argument
111 if (__access_ok(ea, nb)) in address_ok()
113 if (__access_ok(ea, 1)) in address_ok()
117 regs->dar = ea; in address_ok()
128 unsigned long ea; in dform_ea() local
131 ea = (signed short) instr; /* sign-extend */ in dform_ea()
133 ea += regs->gpr[ra]; in dform_ea()
135 return ea; in dform_ea()
146 unsigned long ea; in dsform_ea() local
149 ea = (signed short) (instr & ~3); /* sign-extend */ in dsform_ea()
151 ea += regs->gpr[ra]; in dsform_ea()
153 return ea; in dsform_ea()
163 unsigned long ea; in dqform_ea() local
166 ea = (signed short) (instr & ~0xf); /* sign-extend */ in dqform_ea()
168 ea += regs->gpr[ra]; in dqform_ea()
170 return ea; in dqform_ea()
181 unsigned long ea; in xform_ea() local
185 ea = regs->gpr[rb]; in xform_ea()
187 ea += regs->gpr[ra]; in xform_ea()
189 return ea; in xform_ea()
202 unsigned long ea, d0, d1, d; in mlsd_8lsd_ea() local
215 ea = (signed int)dd; in mlsd_8lsd_ea()
216 ea = (ea << 2) | (d & 0x3); in mlsd_8lsd_ea()
219 ea += regs->gpr[ra]; in mlsd_8lsd_ea()
221 ; /* Leave ea as is */ in mlsd_8lsd_ea()
223 ea += regs->nip; in mlsd_8lsd_ea()
230 return ea; in mlsd_8lsd_ea()
289 unsigned long ea, int nb, in read_mem_aligned() argument
297 err = __get_user(x, (unsigned char __user *) ea); in read_mem_aligned()
300 err = __get_user(x, (unsigned short __user *) ea); in read_mem_aligned()
303 err = __get_user(x, (unsigned int __user *) ea); in read_mem_aligned()
307 err = __get_user(x, (unsigned long __user *) ea); in read_mem_aligned()
314 regs->dar = ea; in read_mem_aligned()
322 static nokprobe_inline int copy_mem_in(u8 *dest, unsigned long ea, int nb, in copy_mem_in() argument
329 c = max_align(ea); in copy_mem_in()
334 err = __get_user(*dest, (unsigned char __user *) ea); in copy_mem_in()
338 (unsigned short __user *) ea); in copy_mem_in()
342 (unsigned int __user *) ea); in copy_mem_in()
347 (unsigned long __user *) ea); in copy_mem_in()
352 regs->dar = ea; in copy_mem_in()
356 ea += c; in copy_mem_in()
362 unsigned long ea, int nb, in read_mem_unaligned() argument
374 err = copy_mem_in(&u.b[i], ea, nb, regs); in read_mem_unaligned()
381 * Read memory at address ea for nb bytes, return 0 for success
385 static int read_mem(unsigned long *dest, unsigned long ea, int nb, in read_mem() argument
388 if (!address_ok(regs, ea, nb)) in read_mem()
390 if ((ea & (nb - 1)) == 0) in read_mem()
391 return read_mem_aligned(dest, ea, nb, regs); in read_mem()
392 return read_mem_unaligned(dest, ea, nb, regs); in read_mem()
397 unsigned long ea, int nb, in write_mem_aligned() argument
404 err = __put_user(val, (unsigned char __user *) ea); in write_mem_aligned()
407 err = __put_user(val, (unsigned short __user *) ea); in write_mem_aligned()
410 err = __put_user(val, (unsigned int __user *) ea); in write_mem_aligned()
414 err = __put_user(val, (unsigned long __user *) ea); in write_mem_aligned()
419 regs->dar = ea; in write_mem_aligned()
427 static nokprobe_inline int copy_mem_out(u8 *dest, unsigned long ea, int nb, in copy_mem_out() argument
434 c = max_align(ea); in copy_mem_out()
439 err = __put_user(*dest, (unsigned char __user *) ea); in copy_mem_out()
443 (unsigned short __user *) ea); in copy_mem_out()
447 (unsigned int __user *) ea); in copy_mem_out()
452 (unsigned long __user *) ea); in copy_mem_out()
457 regs->dar = ea; in copy_mem_out()
461 ea += c; in copy_mem_out()
467 unsigned long ea, int nb, in write_mem_unaligned() argument
478 return copy_mem_out(&u.b[i], ea, nb, regs); in write_mem_unaligned()
482 * Write memory at address ea for nb bytes, return 0 for success
485 static int write_mem(unsigned long val, unsigned long ea, int nb, in write_mem() argument
488 if (!address_ok(regs, ea, nb)) in write_mem()
490 if ((ea & (nb - 1)) == 0) in write_mem()
491 return write_mem_aligned(val, ea, nb, regs); in write_mem()
492 return write_mem_unaligned(val, ea, nb, regs); in write_mem()
501 static int do_fp_load(struct instruction_op *op, unsigned long ea, in do_fp_load() argument
515 if (!address_ok(regs, ea, nb)) in do_fp_load()
518 err = copy_mem_in(u.b, ea, nb, regs); in do_fp_load()
552 static int do_fp_store(struct instruction_op *op, unsigned long ea, in do_fp_store() argument
565 if (!address_ok(regs, ea, nb)) in do_fp_store()
592 return copy_mem_out(u.b, ea, nb, regs); in do_fp_store()
599 static nokprobe_inline int do_vec_load(int rn, unsigned long ea, in do_vec_load() argument
609 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_load()
612 ea &= ~(size - 1); in do_vec_load()
613 err = copy_mem_in(&u.b[ea & 0xf], ea, size, regs); in do_vec_load()
617 do_byte_reverse(&u.b[ea & 0xf], size); in do_vec_load()
627 static nokprobe_inline int do_vec_store(int rn, unsigned long ea, in do_vec_store() argument
636 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_store()
639 ea &= ~(size - 1); in do_vec_store()
648 do_byte_reverse(&u.b[ea & 0xf], size); in do_vec_store()
649 return copy_mem_out(&u.b[ea & 0xf], ea, size, regs); in do_vec_store()
654 static nokprobe_inline int emulate_lq(struct pt_regs *regs, unsigned long ea, in emulate_lq() argument
659 if (!address_ok(regs, ea, 16)) in emulate_lq()
662 if ((ea & 0xf) == 0) { in emulate_lq()
663 err = do_lq(ea, &regs->gpr[reg]); in emulate_lq()
665 err = read_mem(&regs->gpr[reg + IS_LE], ea, 8, regs); in emulate_lq()
667 err = read_mem(&regs->gpr[reg + IS_BE], ea + 8, 8, regs); in emulate_lq()
674 static nokprobe_inline int emulate_stq(struct pt_regs *regs, unsigned long ea, in emulate_stq() argument
680 if (!address_ok(regs, ea, 16)) in emulate_stq()
688 if ((ea & 0xf) == 0) in emulate_stq()
689 return do_stq(ea, vals[0], vals[1]); in emulate_stq()
691 err = write_mem(vals[IS_LE], ea, 8, regs); in emulate_stq()
693 err = write_mem(vals[IS_BE], ea + 8, 8, regs); in emulate_stq()
860 unsigned long ea, struct pt_regs *regs, in do_vsx_load() argument
868 if (!address_ok(regs, ea, size) || copy_mem_in(mem, ea, size, regs)) in do_vsx_load()
892 unsigned long ea, struct pt_regs *regs, in do_vsx_store() argument
900 if (!address_ok(regs, ea, size)) in do_vsx_store()
920 return copy_mem_out(mem, ea, size, regs); in do_vsx_store()
924 int emulate_dcbz(unsigned long ea, struct pt_regs *regs) in emulate_dcbz() argument
932 ea &= 0xffffffffUL; in emulate_dcbz()
936 ea &= ~(size - 1); in emulate_dcbz()
937 if (!address_ok(regs, ea, size)) in emulate_dcbz()
940 err = __put_user(0, (unsigned long __user *) (ea + i)); in emulate_dcbz()
942 regs->dar = ea; in emulate_dcbz()
2066 op->ea = xform_ea(word, regs); in analyse_instr()
2071 op->ea = xform_ea(word, regs); in analyse_instr()
2076 op->ea = xform_ea(word, regs); in analyse_instr()
2082 op->ea = xform_ea(word, regs); in analyse_instr()
2088 op->ea = xform_ea(word, regs); in analyse_instr()
2093 op->ea = xform_ea(word, regs); in analyse_instr()
2112 op->ea = xform_ea(word, regs); in analyse_instr()
2171 * bits of the EA say which field of the VMX register to use. in analyse_instr()
2277 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2344 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2386 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2421 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2544 op->ea = dform_ea(word, regs); in analyse_instr()
2550 op->ea = dform_ea(word, regs); in analyse_instr()
2556 op->ea = dform_ea(word, regs); in analyse_instr()
2562 op->ea = dform_ea(word, regs); in analyse_instr()
2568 op->ea = dform_ea(word, regs); in analyse_instr()
2574 op->ea = dform_ea(word, regs); in analyse_instr()
2580 op->ea = dform_ea(word, regs); in analyse_instr()
2587 op->ea = dform_ea(word, regs); in analyse_instr()
2592 op->ea = dform_ea(word, regs); in analyse_instr()
2599 op->ea = dform_ea(word, regs); in analyse_instr()
2605 op->ea = dform_ea(word, regs); in analyse_instr()
2611 op->ea = dform_ea(word, regs); in analyse_instr()
2617 op->ea = dform_ea(word, regs); in analyse_instr()
2625 op->ea = dqform_ea(word, regs); in analyse_instr()
2631 op->ea = dsform_ea(word, regs); in analyse_instr()
2656 op->ea = dsform_ea(word, regs); in analyse_instr()
2676 op->ea = dsform_ea(word, regs); in analyse_instr()
2681 op->ea = dqform_ea(word, regs); in analyse_instr()
2691 op->ea = dsform_ea(word, regs); in analyse_instr()
2700 op->ea = dsform_ea(word, regs); in analyse_instr()
2708 op->ea = dqform_ea(word, regs); in analyse_instr()
2721 op->ea = dsform_ea(word, regs); in analyse_instr()
2749 op->ea = mlsd_8lsd_ea(word, suffix, regs); in analyse_instr()
2813 op->ea = mlsd_8lsd_ea(word, suffix, regs); in analyse_instr()
2904 static nokprobe_inline int handle_stack_update(unsigned long ea, struct pt_regs *regs) in handle_stack_update() argument
2910 if (ea - STACK_INT_FRAME_SIZE <= current->thread.ksp_limit) { in handle_stack_update()
3055 unsigned long ea; in emulate_loadstore() local
3062 ea = truncate_if_32bit(regs->msr, op->ea); in emulate_loadstore()
3066 if (ea & (size - 1)) in emulate_loadstore()
3068 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3075 __get_user_asmx(val, ea, err, "lbarx"); in emulate_loadstore()
3078 __get_user_asmx(val, ea, err, "lharx"); in emulate_loadstore()
3082 __get_user_asmx(val, ea, err, "lwarx"); in emulate_loadstore()
3086 __get_user_asmx(val, ea, err, "ldarx"); in emulate_loadstore()
3089 err = do_lqarx(ea, &regs->gpr[op->reg]); in emulate_loadstore()
3096 regs->dar = ea; in emulate_loadstore()
3104 if (ea & (size - 1)) in emulate_loadstore()
3106 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3112 __put_user_asmx(op->val, ea, err, "stbcx.", cr); in emulate_loadstore()
3115 __put_user_asmx(op->val, ea, err, "stbcx.", cr); in emulate_loadstore()
3119 __put_user_asmx(op->val, ea, err, "stwcx.", cr); in emulate_loadstore()
3123 __put_user_asmx(op->val, ea, err, "stdcx.", cr); in emulate_loadstore()
3126 err = do_stqcx(ea, regs->gpr[op->reg], in emulate_loadstore()
3138 regs->dar = ea; in emulate_loadstore()
3144 err = emulate_lq(regs, ea, op->reg, cross_endian); in emulate_loadstore()
3148 err = read_mem(&regs->gpr[op->reg], ea, size, regs); in emulate_loadstore()
3167 err = do_fp_load(op, ea, regs, cross_endian); in emulate_loadstore()
3174 err = do_vec_load(op->reg, ea, size, regs, cross_endian); in emulate_loadstore()
3189 err = do_vsx_load(op, ea, regs, cross_endian); in emulate_loadstore()
3194 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3203 err = copy_mem_in((u8 *) &v32, ea, nb, regs); in emulate_loadstore()
3209 ea += 4; in emulate_loadstore()
3218 err = emulate_stq(regs, ea, op->reg, cross_endian); in emulate_loadstore()
3225 ea >= regs->gpr[1] - STACK_INT_FRAME_SIZE) { in emulate_loadstore()
3226 err = handle_stack_update(ea, regs); in emulate_loadstore()
3231 err = write_mem(op->val, ea, size, regs); in emulate_loadstore()
3238 err = do_fp_store(op, ea, regs, cross_endian); in emulate_loadstore()
3245 err = do_vec_store(op->reg, ea, size, regs, cross_endian); in emulate_loadstore()
3260 err = do_vsx_store(op, ea, regs, cross_endian); in emulate_loadstore()
3265 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3276 err = copy_mem_out((u8 *) &v32, ea, nb, regs); in emulate_loadstore()
3279 ea += 4; in emulate_loadstore()
3293 regs->gpr[op->update_reg] = op->ea; in emulate_loadstore()
3311 unsigned long ea; in emulate_step() local
3333 ea = truncate_if_32bit(regs->msr, op.ea); in emulate_step()
3334 if (!address_ok(regs, ea, 8)) in emulate_step()
3338 __cacheop_user_asmx(ea, err, "dcbst"); in emulate_step()
3341 __cacheop_user_asmx(ea, err, "dcbf"); in emulate_step()
3345 prefetchw((void *) ea); in emulate_step()
3349 prefetch((void *) ea); in emulate_step()
3352 __cacheop_user_asmx(ea, err, "icbi"); in emulate_step()
3355 err = emulate_dcbz(ea, regs); in emulate_step()
3359 regs->dar = ea; in emulate_step()