summaryrefslogtreecommitdiff
path: root/arch/powerpc/lib/sstep.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/powerpc/lib/sstep.c')
-rw-r--r--arch/powerpc/lib/sstep.c38
1 files changed, 36 insertions, 2 deletions
diff --git a/arch/powerpc/lib/sstep.c b/arch/powerpc/lib/sstep.c
index c4d6a02a9d1a..0de41f29e0b0 100644
--- a/arch/powerpc/lib/sstep.c
+++ b/arch/powerpc/lib/sstep.c
@@ -476,7 +476,7 @@ static nokprobe_inline int do_vec_load(int rn, unsigned long ea,
return -EFAULT;
/* align to multiple of size */
ea &= ~(size - 1);
- err = copy_mem_in(u.b, ea, size);
+ err = copy_mem_in(&u.b[ea & 0xf], ea, size);
if (err)
return err;
@@ -508,7 +508,7 @@ static nokprobe_inline int do_vec_store(int rn, unsigned long ea,
else
u.v = current->thread.vr_state.vr[rn];
preempt_enable();
- return copy_mem_out(u.b, ea, size);
+ return copy_mem_out(&u.b[ea & 0xf], ea, size);
}
#endif /* CONFIG_ALTIVEC */
@@ -1807,12 +1807,46 @@ int analyse_instr(struct instruction_op *op, const struct pt_regs *regs,
break;
#ifdef CONFIG_ALTIVEC
+ /*
+ * Note: for the load/store vector element instructions,
+ * bits of the EA say which field of the VMX register to use.
+ */
+ case 7: /* lvebx */
+ op->type = MKOP(LOAD_VMX, 0, 1);
+ op->element_size = 1;
+ break;
+
+ case 39: /* lvehx */
+ op->type = MKOP(LOAD_VMX, 0, 2);
+ op->element_size = 2;
+ break;
+
+ case 71: /* lvewx */
+ op->type = MKOP(LOAD_VMX, 0, 4);
+ op->element_size = 4;
+ break;
+
case 103: /* lvx */
case 359: /* lvxl */
op->type = MKOP(LOAD_VMX, 0, 16);
op->element_size = 16;
break;
+ case 135: /* stvebx */
+ op->type = MKOP(STORE_VMX, 0, 1);
+ op->element_size = 1;
+ break;
+
+ case 167: /* stvehx */
+ op->type = MKOP(STORE_VMX, 0, 2);
+ op->element_size = 2;
+ break;
+
+ case 199: /* stvewx */
+ op->type = MKOP(STORE_VMX, 0, 4);
+ op->element_size = 4;
+ break;
+
case 231: /* stvx */
case 487: /* stvxl */
op->type = MKOP(STORE_VMX, 0, 16);