1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */ 2*4882a593Smuzhiyun /* 3*4882a593Smuzhiyun * Preliminary support for HW exception handing for Microblaze 4*4882a593Smuzhiyun * 5*4882a593Smuzhiyun * Copyright (C) 2008-2009 Michal Simek <monstr@monstr.eu> 6*4882a593Smuzhiyun * Copyright (C) 2008-2009 PetaLogix 7*4882a593Smuzhiyun * Copyright (C) 2005 John Williams <jwilliams@itee.uq.edu.au> 8*4882a593Smuzhiyun */ 9*4882a593Smuzhiyun 10*4882a593Smuzhiyun #ifndef _ASM_MICROBLAZE_EXCEPTIONS_H 11*4882a593Smuzhiyun #define _ASM_MICROBLAZE_EXCEPTIONS_H 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun #ifdef __KERNEL__ 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun #ifndef CONFIG_MMU 16*4882a593Smuzhiyun #define EX_HANDLER_STACK_SIZ (4*19) 17*4882a593Smuzhiyun #endif 18*4882a593Smuzhiyun 19*4882a593Smuzhiyun #ifndef __ASSEMBLY__ 20*4882a593Smuzhiyun 21*4882a593Smuzhiyun /* Macros to enable and disable HW exceptions in the MSR */ 22*4882a593Smuzhiyun /* Define MSR enable bit for HW exceptions */ 23*4882a593Smuzhiyun #define HWEX_MSR_BIT (1 << 8) 24*4882a593Smuzhiyun 25*4882a593Smuzhiyun #if CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR 26*4882a593Smuzhiyun #define __enable_hw_exceptions() \ 27*4882a593Smuzhiyun __asm__ __volatile__ (" msrset r0, %0; \ 28*4882a593Smuzhiyun nop;" \ 29*4882a593Smuzhiyun : \ 30*4882a593Smuzhiyun : "i" (HWEX_MSR_BIT) \ 31*4882a593Smuzhiyun : "memory") 32*4882a593Smuzhiyun 33*4882a593Smuzhiyun #define __disable_hw_exceptions() \ 34*4882a593Smuzhiyun __asm__ __volatile__ (" msrclr r0, %0; \ 35*4882a593Smuzhiyun nop;" \ 36*4882a593Smuzhiyun : \ 37*4882a593Smuzhiyun : "i" (HWEX_MSR_BIT) \ 38*4882a593Smuzhiyun : "memory") 39*4882a593Smuzhiyun #else /* !CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR */ 40*4882a593Smuzhiyun #define __enable_hw_exceptions() \ 41*4882a593Smuzhiyun __asm__ __volatile__ (" \ 42*4882a593Smuzhiyun mfs r12, rmsr; \ 43*4882a593Smuzhiyun nop; \ 44*4882a593Smuzhiyun ori r12, r12, %0; \ 45*4882a593Smuzhiyun mts rmsr, r12; \ 46*4882a593Smuzhiyun nop;" \ 47*4882a593Smuzhiyun : \ 48*4882a593Smuzhiyun : "i" (HWEX_MSR_BIT) \ 49*4882a593Smuzhiyun : "memory", "r12") 50*4882a593Smuzhiyun 51*4882a593Smuzhiyun #define __disable_hw_exceptions() \ 52*4882a593Smuzhiyun __asm__ __volatile__ (" \ 53*4882a593Smuzhiyun mfs r12, rmsr; \ 54*4882a593Smuzhiyun nop; \ 55*4882a593Smuzhiyun andi r12, r12, ~%0; \ 56*4882a593Smuzhiyun mts rmsr, r12; \ 57*4882a593Smuzhiyun nop;" \ 58*4882a593Smuzhiyun : \ 59*4882a593Smuzhiyun : "i" (HWEX_MSR_BIT) \ 60*4882a593Smuzhiyun : "memory", "r12") 61*4882a593Smuzhiyun #endif /* CONFIG_XILINX_MICROBLAZE0_USE_MSR_INSTR */ 62*4882a593Smuzhiyun 63*4882a593Smuzhiyun asmlinkage void full_exception(struct pt_regs *regs, unsigned int type, 64*4882a593Smuzhiyun int fsr, int addr); 65*4882a593Smuzhiyun 66*4882a593Smuzhiyun asmlinkage void sw_exception(struct pt_regs *regs); 67*4882a593Smuzhiyun void bad_page_fault(struct pt_regs *regs, unsigned long address, int sig); 68*4882a593Smuzhiyun 69*4882a593Smuzhiyun void die(const char *str, struct pt_regs *fp, long err); 70*4882a593Smuzhiyun void _exception(int signr, struct pt_regs *regs, int code, unsigned long addr); 71*4882a593Smuzhiyun 72*4882a593Smuzhiyun #endif /*__ASSEMBLY__ */ 73*4882a593Smuzhiyun #endif /* __KERNEL__ */ 74*4882a593Smuzhiyun #endif /* _ASM_MICROBLAZE_EXCEPTIONS_H */ 75