Lines Matching full:ea
57 extern int do_lq(unsigned long ea, unsigned long *regs);
58 extern int do_stq(unsigned long ea, unsigned long val0, unsigned long val1);
59 extern int do_lqarx(unsigned long ea, unsigned long *regs);
60 extern int do_stqcx(unsigned long ea, unsigned long val0, unsigned long val1,
111 unsigned long ea, int nb) in address_ok() argument
115 if (__access_ok(ea, nb)) in address_ok()
117 if (__access_ok(ea, 1)) in address_ok()
121 regs->dar = ea; in address_ok()
132 unsigned long ea; in dform_ea() local
135 ea = (signed short) instr; /* sign-extend */ in dform_ea()
137 ea += regs->gpr[ra]; in dform_ea()
139 return ea; in dform_ea()
150 unsigned long ea; in dsform_ea() local
153 ea = (signed short) (instr & ~3); /* sign-extend */ in dsform_ea()
155 ea += regs->gpr[ra]; in dsform_ea()
157 return ea; in dsform_ea()
167 unsigned long ea; in dqform_ea() local
170 ea = (signed short) (instr & ~0xf); /* sign-extend */ in dqform_ea()
172 ea += regs->gpr[ra]; in dqform_ea()
174 return ea; in dqform_ea()
185 unsigned long ea; in xform_ea() local
189 ea = regs->gpr[rb]; in xform_ea()
191 ea += regs->gpr[ra]; in xform_ea()
193 return ea; in xform_ea()
206 unsigned long ea, d0, d1, d; in mlsd_8lsd_ea() local
219 ea = (signed int)dd; in mlsd_8lsd_ea()
220 ea = (ea << 2) | (d & 0x3); in mlsd_8lsd_ea()
223 ea += regs->gpr[ra]; in mlsd_8lsd_ea()
225 ; /* Leave ea as is */ in mlsd_8lsd_ea()
227 ea += regs->nip; in mlsd_8lsd_ea()
234 return ea; in mlsd_8lsd_ea()
306 unsigned long ea, int nb, in read_mem_aligned() argument
314 err = __get_user(x, (unsigned char __user *) ea); in read_mem_aligned()
317 err = __get_user(x, (unsigned short __user *) ea); in read_mem_aligned()
320 err = __get_user(x, (unsigned int __user *) ea); in read_mem_aligned()
324 err = __get_user(x, (unsigned long __user *) ea); in read_mem_aligned()
331 regs->dar = ea; in read_mem_aligned()
339 static nokprobe_inline int copy_mem_in(u8 *dest, unsigned long ea, int nb, in copy_mem_in() argument
346 c = max_align(ea); in copy_mem_in()
351 err = __get_user(*dest, (unsigned char __user *) ea); in copy_mem_in()
355 (unsigned short __user *) ea); in copy_mem_in()
359 (unsigned int __user *) ea); in copy_mem_in()
364 (unsigned long __user *) ea); in copy_mem_in()
369 regs->dar = ea; in copy_mem_in()
373 ea += c; in copy_mem_in()
379 unsigned long ea, int nb, in read_mem_unaligned() argument
391 err = copy_mem_in(&u.b[i], ea, nb, regs); in read_mem_unaligned()
398 * Read memory at address ea for nb bytes, return 0 for success
402 static int read_mem(unsigned long *dest, unsigned long ea, int nb, in read_mem() argument
405 if (!address_ok(regs, ea, nb)) in read_mem()
407 if ((ea & (nb - 1)) == 0) in read_mem()
408 return read_mem_aligned(dest, ea, nb, regs); in read_mem()
409 return read_mem_unaligned(dest, ea, nb, regs); in read_mem()
414 unsigned long ea, int nb, in write_mem_aligned() argument
421 err = __put_user(val, (unsigned char __user *) ea); in write_mem_aligned()
424 err = __put_user(val, (unsigned short __user *) ea); in write_mem_aligned()
427 err = __put_user(val, (unsigned int __user *) ea); in write_mem_aligned()
431 err = __put_user(val, (unsigned long __user *) ea); in write_mem_aligned()
436 regs->dar = ea; in write_mem_aligned()
444 static nokprobe_inline int copy_mem_out(u8 *dest, unsigned long ea, int nb, in copy_mem_out() argument
451 c = max_align(ea); in copy_mem_out()
456 err = __put_user(*dest, (unsigned char __user *) ea); in copy_mem_out()
460 (unsigned short __user *) ea); in copy_mem_out()
464 (unsigned int __user *) ea); in copy_mem_out()
469 (unsigned long __user *) ea); in copy_mem_out()
474 regs->dar = ea; in copy_mem_out()
478 ea += c; in copy_mem_out()
484 unsigned long ea, int nb, in write_mem_unaligned() argument
495 return copy_mem_out(&u.b[i], ea, nb, regs); in write_mem_unaligned()
499 * Write memory at address ea for nb bytes, return 0 for success
502 static int write_mem(unsigned long val, unsigned long ea, int nb, in write_mem() argument
505 if (!address_ok(regs, ea, nb)) in write_mem()
507 if ((ea & (nb - 1)) == 0) in write_mem()
508 return write_mem_aligned(val, ea, nb, regs); in write_mem()
509 return write_mem_unaligned(val, ea, nb, regs); in write_mem()
518 static int do_fp_load(struct instruction_op *op, unsigned long ea, in do_fp_load() argument
532 if (!address_ok(regs, ea, nb)) in do_fp_load()
535 err = copy_mem_in(u.b, ea, nb, regs); in do_fp_load()
569 static int do_fp_store(struct instruction_op *op, unsigned long ea, in do_fp_store() argument
582 if (!address_ok(regs, ea, nb)) in do_fp_store()
609 return copy_mem_out(u.b, ea, nb, regs); in do_fp_store()
616 static nokprobe_inline int do_vec_load(int rn, unsigned long ea, in do_vec_load() argument
626 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_load()
629 ea &= ~(size - 1); in do_vec_load()
630 err = copy_mem_in(&u.b[ea & 0xf], ea, size, regs); in do_vec_load()
634 do_byte_reverse(&u.b[ea & 0xf], size); in do_vec_load()
644 static nokprobe_inline int do_vec_store(int rn, unsigned long ea, in do_vec_store() argument
653 if (!address_ok(regs, ea & ~0xfUL, 16)) in do_vec_store()
656 ea &= ~(size - 1); in do_vec_store()
665 do_byte_reverse(&u.b[ea & 0xf], size); in do_vec_store()
666 return copy_mem_out(&u.b[ea & 0xf], ea, size, regs); in do_vec_store()
671 static nokprobe_inline int emulate_lq(struct pt_regs *regs, unsigned long ea, in emulate_lq() argument
676 if (!address_ok(regs, ea, 16)) in emulate_lq()
679 if ((ea & 0xf) == 0) { in emulate_lq()
680 err = do_lq(ea, ®s->gpr[reg]); in emulate_lq()
682 err = read_mem(®s->gpr[reg + IS_LE], ea, 8, regs); in emulate_lq()
684 err = read_mem(®s->gpr[reg + IS_BE], ea + 8, 8, regs); in emulate_lq()
691 static nokprobe_inline int emulate_stq(struct pt_regs *regs, unsigned long ea, in emulate_stq() argument
697 if (!address_ok(regs, ea, 16)) in emulate_stq()
705 if ((ea & 0xf) == 0) in emulate_stq()
706 return do_stq(ea, vals[0], vals[1]); in emulate_stq()
708 err = write_mem(vals[IS_LE], ea, 8, regs); in emulate_stq()
710 err = write_mem(vals[IS_BE], ea + 8, 8, regs); in emulate_stq()
895 unsigned long ea, struct pt_regs *regs, in do_vsx_load() argument
904 if (!address_ok(regs, ea, size) || copy_mem_in(mem, ea, size, regs)) in do_vsx_load()
942 unsigned long ea, struct pt_regs *regs, in do_vsx_store() argument
951 if (!address_ok(regs, ea, size)) in do_vsx_store()
985 return copy_mem_out(mem, ea, size, regs); in do_vsx_store()
989 int emulate_dcbz(unsigned long ea, struct pt_regs *regs) in emulate_dcbz() argument
997 ea &= 0xffffffffUL; in emulate_dcbz()
1001 ea &= ~(size - 1); in emulate_dcbz()
1002 if (!address_ok(regs, ea, size)) in emulate_dcbz()
1005 err = __put_user(0, (unsigned long __user *) (ea + i)); in emulate_dcbz()
1007 regs->dar = ea; in emulate_dcbz()
2161 op->ea = xform_ea(word, regs); in analyse_instr()
2166 op->ea = xform_ea(word, regs); in analyse_instr()
2171 op->ea = xform_ea(word, regs); in analyse_instr()
2177 op->ea = xform_ea(word, regs); in analyse_instr()
2183 op->ea = xform_ea(word, regs); in analyse_instr()
2188 op->ea = xform_ea(word, regs); in analyse_instr()
2207 op->ea = xform_ea(word, regs); in analyse_instr()
2266 * bits of the EA say which field of the VMX register to use. in analyse_instr()
2372 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2439 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2485 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2534 op->ea = ra ? regs->gpr[ra] : 0; in analyse_instr()
2680 op->ea = dform_ea(word, regs); in analyse_instr()
2686 op->ea = dform_ea(word, regs); in analyse_instr()
2692 op->ea = dform_ea(word, regs); in analyse_instr()
2698 op->ea = dform_ea(word, regs); in analyse_instr()
2704 op->ea = dform_ea(word, regs); in analyse_instr()
2710 op->ea = dform_ea(word, regs); in analyse_instr()
2716 op->ea = dform_ea(word, regs); in analyse_instr()
2723 op->ea = dform_ea(word, regs); in analyse_instr()
2728 op->ea = dform_ea(word, regs); in analyse_instr()
2735 op->ea = dform_ea(word, regs); in analyse_instr()
2741 op->ea = dform_ea(word, regs); in analyse_instr()
2747 op->ea = dform_ea(word, regs); in analyse_instr()
2753 op->ea = dform_ea(word, regs); in analyse_instr()
2761 op->ea = dqform_ea(word, regs); in analyse_instr()
2767 op->ea = dsform_ea(word, regs); in analyse_instr()
2796 op->ea = dsform_ea(word, regs); in analyse_instr()
2815 op->ea = dqform_ea(word, regs); in analyse_instr()
2832 op->ea = dsform_ea(word, regs); in analyse_instr()
2839 op->ea = dqform_ea(word, regs); in analyse_instr()
2851 op->ea = dsform_ea(word, regs); in analyse_instr()
2862 op->ea = dsform_ea(word, regs); in analyse_instr()
2872 op->ea = dqform_ea(word, regs); in analyse_instr()
2885 op->ea = dsform_ea(word, regs); in analyse_instr()
2916 op->ea = mlsd_8lsd_ea(word, suffix, regs); in analyse_instr()
2996 op->ea = mlsd_8lsd_ea(word, suffix, regs); in analyse_instr()
3105 static nokprobe_inline int handle_stack_update(unsigned long ea, struct pt_regs *regs) in handle_stack_update() argument
3247 unsigned long ea; in emulate_loadstore() local
3254 ea = truncate_if_32bit(regs->msr, op->ea); in emulate_loadstore()
3258 if (ea & (size - 1)) in emulate_loadstore()
3260 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3267 __get_user_asmx(val, ea, err, "lbarx"); in emulate_loadstore()
3270 __get_user_asmx(val, ea, err, "lharx"); in emulate_loadstore()
3274 __get_user_asmx(val, ea, err, "lwarx"); in emulate_loadstore()
3278 __get_user_asmx(val, ea, err, "ldarx"); in emulate_loadstore()
3281 err = do_lqarx(ea, ®s->gpr[op->reg]); in emulate_loadstore()
3288 regs->dar = ea; in emulate_loadstore()
3296 if (ea & (size - 1)) in emulate_loadstore()
3298 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3304 __put_user_asmx(op->val, ea, err, "stbcx.", cr); in emulate_loadstore()
3307 __put_user_asmx(op->val, ea, err, "stbcx.", cr); in emulate_loadstore()
3311 __put_user_asmx(op->val, ea, err, "stwcx.", cr); in emulate_loadstore()
3315 __put_user_asmx(op->val, ea, err, "stdcx.", cr); in emulate_loadstore()
3318 err = do_stqcx(ea, regs->gpr[op->reg], in emulate_loadstore()
3330 regs->dar = ea; in emulate_loadstore()
3336 err = emulate_lq(regs, ea, op->reg, cross_endian); in emulate_loadstore()
3340 err = read_mem(®s->gpr[op->reg], ea, size, regs); in emulate_loadstore()
3359 err = do_fp_load(op, ea, regs, cross_endian); in emulate_loadstore()
3366 err = do_vec_load(op->reg, ea, size, regs, cross_endian); in emulate_loadstore()
3381 err = do_vsx_load(op, ea, regs, cross_endian); in emulate_loadstore()
3386 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3395 err = copy_mem_in((u8 *) &v32, ea, nb, regs); in emulate_loadstore()
3401 ea += 4; in emulate_loadstore()
3410 err = emulate_stq(regs, ea, op->reg, cross_endian); in emulate_loadstore()
3417 ea >= regs->gpr[1] - STACK_INT_FRAME_SIZE) { in emulate_loadstore()
3418 err = handle_stack_update(ea, regs); in emulate_loadstore()
3423 err = write_mem(op->val, ea, size, regs); in emulate_loadstore()
3430 err = do_fp_store(op, ea, regs, cross_endian); in emulate_loadstore()
3437 err = do_vec_store(op->reg, ea, size, regs, cross_endian); in emulate_loadstore()
3452 err = do_vsx_store(op, ea, regs, cross_endian); in emulate_loadstore()
3457 if (!address_ok(regs, ea, size)) in emulate_loadstore()
3468 err = copy_mem_out((u8 *) &v32, ea, nb, regs); in emulate_loadstore()
3471 ea += 4; in emulate_loadstore()
3485 regs->gpr[op->update_reg] = op->ea; in emulate_loadstore()
3503 unsigned long ea; in emulate_step() local
3525 ea = truncate_if_32bit(regs->msr, op.ea); in emulate_step()
3526 if (!address_ok(regs, ea, 8)) in emulate_step()
3530 __cacheop_user_asmx(ea, err, "dcbst"); in emulate_step()
3533 __cacheop_user_asmx(ea, err, "dcbf"); in emulate_step()
3537 prefetchw((void *) ea); in emulate_step()
3541 prefetch((void *) ea); in emulate_step()
3544 __cacheop_user_asmx(ea, err, "icbi"); in emulate_step()
3547 err = emulate_dcbz(ea, regs); in emulate_step()
3551 regs->dar = ea; in emulate_step()