xref: /kvm-unit-tests/lib/arm/asm/sysreg.h (revision 0cc3a351b925928827baa4b69cf0e46ff5837083)
1 #ifndef _ASMARM_SYSREG_H_
2 #define _ASMARM_SYSREG_H_
3 /*
4  * From the Linux kernel arch/arm/include/asm/cp15.h
5  *
6  * This work is licensed under the terms of the GNU GPL, version 2.
7  */
8 
9 /*
10  * CR1 bits (CP#15 CR1)
11  */
12 #define CR_M	(1 << 0)	/* MMU enable				*/
13 #define CR_A	(1 << 1)	/* Alignment abort enable		*/
14 #define CR_C	(1 << 2)	/* Dcache enable			*/
15 #define CR_W	(1 << 3)	/* Write buffer enable			*/
16 #define CR_P	(1 << 4)	/* 32-bit exception handler		*/
17 #define CR_D	(1 << 5)	/* 32-bit data address range		*/
18 #define CR_L	(1 << 6)	/* Implementation defined		*/
19 #define CR_B	(1 << 7)	/* Big endian				*/
20 #define CR_S	(1 << 8)	/* System MMU protection		*/
21 #define CR_R	(1 << 9)	/* ROM MMU protection			*/
22 #define CR_F	(1 << 10)	/* Implementation defined		*/
23 #define CR_Z	(1 << 11)	/* Implementation defined		*/
24 #define CR_I	(1 << 12)	/* Icache enable			*/
25 #define CR_V	(1 << 13)	/* Vectors relocated to 0xffff0000	*/
26 #define CR_RR	(1 << 14)	/* Round Robin cache replacement	*/
27 #define CR_L4	(1 << 15)	/* LDR pc can set T bit			*/
28 #define CR_DT	(1 << 16)
29 #define CR_HA	(1 << 17)	/* Hardware management of Access Flag	*/
30 #define CR_IT	(1 << 18)
31 #define CR_ST	(1 << 19)
32 #define CR_FI	(1 << 21)	/* Fast interrupt (lower latency mode)	*/
33 #define CR_U	(1 << 22)	/* Unaligned access operation		*/
34 #define CR_XP	(1 << 23)	/* Extended page tables			*/
35 #define CR_VE	(1 << 24)	/* Vectored interrupts			*/
36 #define CR_EE	(1 << 25)	/* Exception (Big) Endian		*/
37 #define CR_TRE	(1 << 28)	/* TEX remap enable			*/
38 #define CR_AFE	(1 << 29)	/* Access flag enable			*/
39 #define CR_TE	(1 << 30)	/* Thumb exception enable		*/
40 
41 #ifndef __ASSEMBLER__
42 #include <libcflat.h>
43 
44 #define __ACCESS_CP15(CRn, Op1, CRm, Op2)			\
45 	"mrc", "mcr", xstr(p15, Op1, %0, CRn, CRm, Op2), u32
46 #define __ACCESS_CP15_64(Op1, CRm)					\
47 	"mrrc", "mcrr", xstr(p15, Op1, %Q0, %R0, CRm), u64
48 
49 #define __ACCESS_CP14(CRn, Op1, CRm, Op2)	\
50 	"mrc", "mcr", xstr(p14, Op1, %0, CRn, CRm, Op2), u32
51 #define __ACCESS_CP14_64(Op1, CRm)		\
52 	"mrrc", "mcrr", xstr(p14, Op1, %Q0, %R0, CRm), u64
53 
54 #define __read_sysreg(r, w, c, t) ({				\
55 			t __val;				\
56 			asm volatile(r " " c : "=r" (__val));	\
57 			__val;					\
58 		})
59 #define read_sysreg(...)                 __read_sysreg(__VA_ARGS__)
60 
61 #define __write_sysreg(v, r, w, c, t)   asm volatile(w " " c : : "r" ((t)(v)))
62 #define write_sysreg(v, ...)            __write_sysreg(v, __VA_ARGS__)
63 #endif /* !__ASSEMBLER__ */
64 
65 #endif /* _ASMARM_SYSREG_H_ */
66