|
|
|
@ -132,22 +132,28 @@ |
|
|
|
asm volatile ("csrr %0, " #reg : "=r"(__tmp)); \ |
|
|
|
__tmp; }) |
|
|
|
|
|
|
|
#define write_csr(reg, val) \ |
|
|
|
asm volatile ("csrw " #reg ", %0" :: "r"(val)) |
|
|
|
#define write_csr(reg, val) ({ \ |
|
|
|
if (__builtin_constant_p(val) && (unsigned long)(val) < 32) \ |
|
|
|
asm volatile ("csrw " #reg ", %0" :: "i"(val)); \ |
|
|
|
else \ |
|
|
|
asm volatile ("csrw " #reg ", %0" :: "r"(val)); }) |
|
|
|
|
|
|
|
#define swap_csr(reg, val) ({ long __tmp; \ |
|
|
|
asm volatile ("csrrw %0, " #reg ", %1" : "=r"(__tmp) : "r"(val)); \ |
|
|
|
#define swap_csr(reg, val) ({ unsigned long __tmp; \ |
|
|
|
if (__builtin_constant_p(val) && (unsigned long)(val) < 32) \ |
|
|
|
asm volatile ("csrrw %0, " #reg ", %1" : "=r"(__tmp) : "i"(val)); \ |
|
|
|
else \ |
|
|
|
asm volatile ("csrrw %0, " #reg ", %1" : "=r"(__tmp) : "r"(val)); \ |
|
|
|
__tmp; }) |
|
|
|
|
|
|
|
#define set_csr(reg, bit) ({ unsigned long __tmp; \ |
|
|
|
if (__builtin_constant_p(bit) && (bit) < 32) \ |
|
|
|
if (__builtin_constant_p(bit) && (unsigned long)(bit) < 32) \ |
|
|
|
asm volatile ("csrrs %0, " #reg ", %1" : "=r"(__tmp) : "i"(bit)); \ |
|
|
|
else \ |
|
|
|
asm volatile ("csrrs %0, " #reg ", %1" : "=r"(__tmp) : "r"(bit)); \ |
|
|
|
__tmp; }) |
|
|
|
|
|
|
|
#define clear_csr(reg, bit) ({ unsigned long __tmp; \ |
|
|
|
if (__builtin_constant_p(bit) && (bit) < 32) \ |
|
|
|
if (__builtin_constant_p(bit) && (unsigned long)(bit) < 32) \ |
|
|
|
asm volatile ("csrrc %0, " #reg ", %1" : "=r"(__tmp) : "i"(bit)); \ |
|
|
|
else \ |
|
|
|
asm volatile ("csrrc %0, " #reg ", %1" : "=r"(__tmp) : "r"(bit)); \ |
|
|
|
|