1 /* 2 * Ripped off from arch/arm64/include/asm/sysreg.h 3 * 4 * Copyright (C) 2016, Red Hat Inc, Andrew Jones <drjones@redhat.com> 5 * 6 * This work is licensed under the terms of the GNU LGPL, version 2. 7 */ 8 #ifndef _ASMARM64_SYSREG_H_ 9 #define _ASMARM64_SYSREG_H_ 10 11 #define sys_reg(op0, op1, crn, crm, op2) \ 12 ((((op0)&3)<<19)|((op1)<<16)|((crn)<<12)|((crm)<<8)|((op2)<<5)) 13 14 #ifdef __ASSEMBLY__ 15 .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30 16 .equ .L__reg_num_x\num, \num 17 .endr 18 .equ .L__reg_num_xzr, 31 19 20 .macro mrs_s, rt, sreg 21 .inst 0xd5200000|(\sreg)|(.L__reg_num_\rt) 22 .endm 23 24 .macro msr_s, sreg, rt 25 .inst 0xd5000000|(\sreg)|(.L__reg_num_\rt) 26 .endm 27 #else 28 #include <libcflat.h> 29 30 #define read_sysreg(r) ({ \ 31 u64 __val; \ 32 asm volatile("mrs %0, " xstr(r) : "=r" (__val)); \ 33 __val; \ 34 }) 35 36 #define write_sysreg(v, r) do { \ 37 u64 __val = (u64)v; \ 38 asm volatile("msr " xstr(r) ", %x0" : : "rZ" (__val)); \ 39 } while (0) 40 41 asm( 42 " .irp num,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30\n" 43 " .equ .L__reg_num_x\\num, \\num\n" 44 " .endr\n" 45 " .equ .L__reg_num_xzr, 31\n" 46 "\n" 47 " .macro mrs_s, rt, sreg\n" 48 " .inst 0xd5200000|(\\sreg)|(.L__reg_num_\\rt)\n" 49 " .endm\n" 50 "\n" 51 " .macro msr_s, sreg, rt\n" 52 " .inst 0xd5000000|(\\sreg)|(.L__reg_num_\\rt)\n" 53 " .endm\n" 54 ); 55 #endif /* __ASSEMBLY__ */ 56 #endif /* _ASMARM64_SYSREG_H_ */ 57