traps_misaligned.c (f19c3b4239f5bfb69aacbaf75d4277c095e7aa7d) traps_misaligned.c (7c83232161f609bbc452a1255f823f41afc411dd)
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) 2020 Western Digital Corporation or its affiliates.
4 */
5#include <linux/kernel.h>
6#include <linux/init.h>
7#include <linux/mm.h>
8#include <linux/module.h>
9#include <linux/irq.h>
10#include <linux/stringify.h>
11
12#include <asm/processor.h>
13#include <asm/ptrace.h>
14#include <asm/csr.h>
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) 2020 Western Digital Corporation or its affiliates.
4 */
5#include <linux/kernel.h>
6#include <linux/init.h>
7#include <linux/mm.h>
8#include <linux/module.h>
9#include <linux/irq.h>
10#include <linux/stringify.h>
11
12#include <asm/processor.h>
13#include <asm/ptrace.h>
14#include <asm/csr.h>
15#include <asm/entry-common.h>
15
16#define INSN_MATCH_LB 0x3
17#define INSN_MASK_LB 0x707f
18#define INSN_MATCH_LH 0x1003
19#define INSN_MASK_LH 0x707f
20#define INSN_MATCH_LW 0x2003
21#define INSN_MASK_LW 0x707f
22#define INSN_MATCH_LD 0x3003

--- 123 unchanged lines hidden (view full) ---

146 (s32)(((insn) >> 7) & 0x1f))
147#define MASK_FUNCT3 0x7000
148
149#define GET_PRECISION(insn) (((insn) >> 25) & 3)
150#define GET_RM(insn) (((insn) >> 12) & 7)
151#define PRECISION_S 0
152#define PRECISION_D 1
153
16
17#define INSN_MATCH_LB 0x3
18#define INSN_MASK_LB 0x707f
19#define INSN_MATCH_LH 0x1003
20#define INSN_MASK_LH 0x707f
21#define INSN_MATCH_LW 0x2003
22#define INSN_MASK_LW 0x707f
23#define INSN_MATCH_LD 0x3003

--- 123 unchanged lines hidden (view full) ---

147 (s32)(((insn) >> 7) & 0x1f))
148#define MASK_FUNCT3 0x7000
149
150#define GET_PRECISION(insn) (((insn) >> 25) & 3)
151#define GET_RM(insn) (((insn) >> 12) & 7)
152#define PRECISION_S 0
153#define PRECISION_D 1
154
154static inline u8 load_u8(const u8 *addr)
155#ifdef CONFIG_RISCV_M_MODE
156static inline int load_u8(struct pt_regs *regs, const u8 *addr, u8 *r_val)
155{
156 u8 val;
157
158 asm volatile("lbu %0, %1" : "=&r" (val) : "m" (*addr));
157{
158 u8 val;
159
160 asm volatile("lbu %0, %1" : "=&r" (val) : "m" (*addr));
161 *r_val = val;
159
162
160 return val;
163 return 0;
161}
162
164}
165
163static inline void store_u8(u8 *addr, u8 val)
166static inline int store_u8(struct pt_regs *regs, u8 *addr, u8 val)
164{
165 asm volatile ("sb %0, %1\n" : : "r" (val), "m" (*addr));
167{
168 asm volatile ("sb %0, %1\n" : : "r" (val), "m" (*addr));
169
170 return 0;
166}
167
171}
172
168static inline ulong get_insn(ulong mepc)
173static inline int get_insn(struct pt_regs *regs, ulong mepc, ulong *r_insn)
169{
170 register ulong __mepc asm ("a2") = mepc;
171 ulong val, rvc_mask = 3, tmp;
172
173 asm ("and %[tmp], %[addr], 2\n"
174 "bnez %[tmp], 1f\n"
175#if defined(CONFIG_64BIT)
176 __stringify(LWU) " %[insn], (%[addr])\n"

--- 12 unchanged lines hidden (view full) ---

189 "lhu %[tmp], 2(%[addr])\n"
190 "sll %[tmp], %[tmp], 16\n"
191 "add %[insn], %[insn], %[tmp]\n"
192 "2:"
193 : [insn] "=&r" (val), [tmp] "=&r" (tmp)
194 : [addr] "r" (__mepc), [rvc_mask] "r" (rvc_mask),
195 [xlen_minus_16] "i" (XLEN_MINUS_16));
196
174{
175 register ulong __mepc asm ("a2") = mepc;
176 ulong val, rvc_mask = 3, tmp;
177
178 asm ("and %[tmp], %[addr], 2\n"
179 "bnez %[tmp], 1f\n"
180#if defined(CONFIG_64BIT)
181 __stringify(LWU) " %[insn], (%[addr])\n"

--- 12 unchanged lines hidden (view full) ---

194 "lhu %[tmp], 2(%[addr])\n"
195 "sll %[tmp], %[tmp], 16\n"
196 "add %[insn], %[insn], %[tmp]\n"
197 "2:"
198 : [insn] "=&r" (val), [tmp] "=&r" (tmp)
199 : [addr] "r" (__mepc), [rvc_mask] "r" (rvc_mask),
200 [xlen_minus_16] "i" (XLEN_MINUS_16));
201
197 return val;
202 *r_insn = val;
203
204 return 0;
198}
205}
206#else
207static inline int load_u8(struct pt_regs *regs, const u8 *addr, u8 *r_val)
208{
209 if (user_mode(regs)) {
210 return __get_user(*r_val, addr);
211 } else {
212 *r_val = *addr;
213 return 0;
214 }
215}
199
216
217static inline int store_u8(struct pt_regs *regs, u8 *addr, u8 val)
218{
219 if (user_mode(regs)) {
220 return __put_user(val, addr);
221 } else {
222 *addr = val;
223 return 0;
224 }
225}
226
227#define __read_insn(regs, insn, insn_addr) \
228({ \
229 int __ret; \
230 \
231 if (user_mode(regs)) { \
232 __ret = __get_user(insn, insn_addr); \
233 } else { \
234 insn = *insn_addr; \
235 __ret = 0; \
236 } \
237 \
238 __ret; \
239})
240
241static inline int get_insn(struct pt_regs *regs, ulong epc, ulong *r_insn)
242{
243 ulong insn = 0;
244
245 if (epc & 0x2) {
246 ulong tmp = 0;
247 u16 __user *insn_addr = (u16 __user *)epc;
248
249 if (__read_insn(regs, insn, insn_addr))
250 return -EFAULT;
251 /* __get_user() uses regular "lw" which sign extend the loaded
252 * value make sure to clear higher order bits in case we "or" it
253 * below with the upper 16 bits half.
254 */
255 insn &= GENMASK(15, 0);
256 if ((insn & __INSN_LENGTH_MASK) != __INSN_LENGTH_32) {
257 *r_insn = insn;
258 return 0;
259 }
260 insn_addr++;
261 if (__read_insn(regs, tmp, insn_addr))
262 return -EFAULT;
263 *r_insn = (tmp << 16) | insn;
264
265 return 0;
266 } else {
267 u32 __user *insn_addr = (u32 __user *)epc;
268
269 if (__read_insn(regs, insn, insn_addr))
270 return -EFAULT;
271 if ((insn & __INSN_LENGTH_MASK) == __INSN_LENGTH_32) {
272 *r_insn = insn;
273 return 0;
274 }
275 insn &= GENMASK(15, 0);
276 *r_insn = insn;
277
278 return 0;
279 }
280}
281#endif
282
200union reg_data {
201 u8 data_bytes[8];
202 ulong data_ulong;
203 u64 data_u64;
204};
205
206int handle_misaligned_load(struct pt_regs *regs)
207{
208 union reg_data val;
209 unsigned long epc = regs->epc;
283union reg_data {
284 u8 data_bytes[8];
285 ulong data_ulong;
286 u64 data_u64;
287};
288
289int handle_misaligned_load(struct pt_regs *regs)
290{
291 union reg_data val;
292 unsigned long epc = regs->epc;
210 unsigned long insn = get_insn(epc);
211 unsigned long addr = csr_read(mtval);
293 unsigned long insn;
294 unsigned long addr = regs->badaddr;
212 int i, fp = 0, shift = 0, len = 0;
213
295 int i, fp = 0, shift = 0, len = 0;
296
297 if (get_insn(regs, epc, &insn))
298 return -1;
299
214 regs->epc = 0;
215
216 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
217 len = 4;
218 shift = 8 * (sizeof(unsigned long) - len);
219#if defined(CONFIG_64BIT)
220 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
221 len = 8;

--- 47 unchanged lines hidden (view full) ---

269 len = 4;
270#endif
271 } else {
272 regs->epc = epc;
273 return -1;
274 }
275
276 val.data_u64 = 0;
300 regs->epc = 0;
301
302 if ((insn & INSN_MASK_LW) == INSN_MATCH_LW) {
303 len = 4;
304 shift = 8 * (sizeof(unsigned long) - len);
305#if defined(CONFIG_64BIT)
306 } else if ((insn & INSN_MASK_LD) == INSN_MATCH_LD) {
307 len = 8;

--- 47 unchanged lines hidden (view full) ---

355 len = 4;
356#endif
357 } else {
358 regs->epc = epc;
359 return -1;
360 }
361
362 val.data_u64 = 0;
277 for (i = 0; i < len; i++)
278 val.data_bytes[i] = load_u8((void *)(addr + i));
363 for (i = 0; i < len; i++) {
364 if (load_u8(regs, (void *)(addr + i), &val.data_bytes[i]))
365 return -1;
366 }
279
280 if (fp)
281 return -1;
282 SET_RD(insn, regs, val.data_ulong << shift >> shift);
283
284 regs->epc = epc + INSN_LEN(insn);
285
286 return 0;
287}
288
289int handle_misaligned_store(struct pt_regs *regs)
290{
291 union reg_data val;
292 unsigned long epc = regs->epc;
367
368 if (fp)
369 return -1;
370 SET_RD(insn, regs, val.data_ulong << shift >> shift);
371
372 regs->epc = epc + INSN_LEN(insn);
373
374 return 0;
375}
376
377int handle_misaligned_store(struct pt_regs *regs)
378{
379 union reg_data val;
380 unsigned long epc = regs->epc;
293 unsigned long insn = get_insn(epc);
294 unsigned long addr = csr_read(mtval);
381 unsigned long insn;
382 unsigned long addr = regs->badaddr;
295 int i, len = 0;
296
383 int i, len = 0;
384
385 if (get_insn(regs, epc, &insn))
386 return -1;
387
297 regs->epc = 0;
298
299 val.data_ulong = GET_RS2(insn, regs);
300
301 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
302 len = 4;
303#if defined(CONFIG_64BIT)
304 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {

--- 17 unchanged lines hidden (view full) ---

322 ((insn >> SH_RD) & 0x1f)) {
323 len = 4;
324 val.data_ulong = GET_RS2C(insn, regs);
325 } else {
326 regs->epc = epc;
327 return -1;
328 }
329
388 regs->epc = 0;
389
390 val.data_ulong = GET_RS2(insn, regs);
391
392 if ((insn & INSN_MASK_SW) == INSN_MATCH_SW) {
393 len = 4;
394#if defined(CONFIG_64BIT)
395 } else if ((insn & INSN_MASK_SD) == INSN_MATCH_SD) {

--- 17 unchanged lines hidden (view full) ---

413 ((insn >> SH_RD) & 0x1f)) {
414 len = 4;
415 val.data_ulong = GET_RS2C(insn, regs);
416 } else {
417 regs->epc = epc;
418 return -1;
419 }
420
330 for (i = 0; i < len; i++)
331 store_u8((void *)(addr + i), val.data_bytes[i]);
421 for (i = 0; i < len; i++) {
422 if (store_u8(regs, (void *)(addr + i), val.data_bytes[i]))
423 return -1;
424 }
332
333 regs->epc = epc + INSN_LEN(insn);
334
335 return 0;
336}
425
426 regs->epc = epc + INSN_LEN(insn);
427
428 return 0;
429}