Searched refs:SH_RD (Results 1 – 3 of 3) sorted by relevance
268 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()270 ((insn >> SH_RD) & 0x1f)) { in handle_scalar_misaligned_load()277 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()279 ((insn >> SH_RD) & 0x1f)) { in handle_scalar_misaligned_load()285 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()293 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()300 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()304 insn = RVC_RS2S(insn) << SH_RD; in handle_scalar_misaligned_load()
137 if ((insn >> SH_RD) & MASK_RX) in kvm_riscv_vcpu_csr_return()435 insn = RVC_RS2S(insn) << SH_RD; in kvm_riscv_vcpu_mmio_load()437 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_load()444 insn = RVC_RS2S(insn) << SH_RD; in kvm_riscv_vcpu_mmio_load()446 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_load()555 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_store()563 ((insn >> SH_RD) & 0x1f)) { in kvm_riscv_vcpu_mmio_store()
412 #define SET_RD(insn, regs, val) (*REG_PTR(insn, SH_RD, regs) = (val))417 #define SH_RD 7 macro452 #define RVC_RS1S(insn) (8 + RV_X(insn, SH_RD, 3))