Home
last modified time | relevance | path

Searched full:volatile (Results 1 – 25 of 2107) sorted by relevance

12345678910>>...85

/linux/tools/perf/arch/x86/tests/
H A Dinsn-x86-dat-src.c21 asm volatile("rdtsc"); /* Start here */ in main()
25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main()
31 asm volatile("cmovno %rax,%rbx"); in main()
32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main()
33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main()
35 asm volatile("cmove %rax,%rbx"); in main()
36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main()
37 asm volatile("cmove 0x12345678(%rax),%cx"); in main()
39 asm volatile("seto 0x12345678(%rax)"); in main()
40 asm volatile("setno 0x12345678(%rax)"); in main()
[all …]
/linux/drivers/video/fbdev/kyro/
H A DSTG4000Reg.h76 volatile u32 Thread0Enable; /* 0x0000 */
77 volatile u32 Thread1Enable; /* 0x0004 */
78 volatile u32 Thread0Recover; /* 0x0008 */
79 volatile u32 Thread1Recover; /* 0x000C */
80 volatile u32 Thread0Step; /* 0x0010 */
81 volatile u32 Thread1Step; /* 0x0014 */
82 volatile u32 VideoInStatus; /* 0x0018 */
83 volatile u32 Core2InSignStart; /* 0x001C */
84 volatile u32 Core1ResetVector; /* 0x0020 */
85 volatile u32 Core1ROMOffset; /* 0x0024 */
[all …]
/linux/lib/raid6/
H A Dsse2.c48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome()
49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome()
52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome()
53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome()
54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome()
56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome()
60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome()
[all …]
H A Davx2.c46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome()
47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome()
50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome()
51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome()
52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome()
54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome()
57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome()
58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome()
[all …]
H A Drecov_ssse3.c54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3()
57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3()
58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3()
59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3()
67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3()
68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3()
69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3()
70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3()
71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3()
72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3()
[all …]
H A Drecov_loongarch_simd.c69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx()
70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx()
71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx()
72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx()
76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx()
77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx()
78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx()
79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx()
81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx()
82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx()
[all …]
H A Drecov_avx2.c53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2()
57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2()
58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2()
59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2()
60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2()
61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2()
62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2()
63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2()
64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2()
73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2()
[all …]
H A Dloongarch_simd.c54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome()
55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome()
56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome()
57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome()
58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome()
59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome()
60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome()
61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome()
64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome()
65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome()
[all …]
H A Dsse1.c52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome()
53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome()
56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome()
57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome()
60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome()
63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome()
64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome()
[all …]
H A Dmmx.c47 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_mmx1_gen_syndrome()
48 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_mmx1_gen_syndrome()
51 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx1_gen_syndrome()
52 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_mmx1_gen_syndrome()
54 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_mmx1_gen_syndrome()
55 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_mmx1_gen_syndrome()
56 asm volatile("paddb %mm4,%mm4"); in raid6_mmx1_gen_syndrome()
57 asm volatile("pand %mm0,%mm5"); in raid6_mmx1_gen_syndrome()
58 asm volatile("pxor %mm5,%mm4"); in raid6_mmx1_gen_syndrome()
59 asm volatile("pxor %mm5,%mm5"); in raid6_mmx1_gen_syndrome()
[all …]
/linux/tools/sched_ext/include/scx/
H A Denums.autogen.bpf.h7 const volatile u64 __SCX_OPS_NAME_LEN __weak;
10 const volatile u64 __SCX_SLICE_DFL __weak;
13 const volatile u64 __SCX_SLICE_INF __weak;
16 const volatile u64 __SCX_RQ_ONLINE __weak;
19 const volatile u64 __SCX_RQ_CAN_STOP_TICK __weak;
22 const volatile u64 __SCX_RQ_BAL_PENDING __weak;
25 const volatile u64 __SCX_RQ_BAL_KEEP __weak;
28 const volatile u64 __SCX_RQ_BYPASSING __weak;
31 const volatile u64 __SCX_RQ_CLK_VALID __weak;
34 const volatile u64 __SCX_RQ_IN_WAKEUP __weak;
[all …]
/linux/arch/m68k/include/asm/
H A Dmvme147hw.h10 volatile u_long dma_tadr;
11 volatile u_long dma_dadr;
12 volatile u_long dma_bcr;
13 volatile u_long dma_hr;
14 volatile u_short t1_preload;
15 volatile u_short t1_count;
16 volatile u_short t2_preload;
17 volatile u_short t2_count;
18 volatile u_char t1_int_cntrl;
19 volatile u_char t1_cntrl;
[all …]
/linux/arch/mips/include/asm/ip32/
H A Dmace.h24 volatile unsigned int error_addr;
25 volatile unsigned int error;
49 volatile unsigned int control;
70 volatile unsigned int rev;
72 volatile unsigned int config_addr;
74 volatile unsigned char b[4];
75 volatile unsigned short w[2];
76 volatile unsigned int l;
98 volatile u64 mac_ctrl;
99 volatile unsigned long int_stat;
[all …]
/linux/arch/loongarch/include/asm/
H A Dloongson.h18 (*(volatile u32 *)((char *)TO_UNCACHE(LOONGSON_REG_BASE) + (x)))
43 static inline void xconf_writel(u32 val, volatile void __iomem *addr) in xconf_writel()
45 asm volatile ( in xconf_writel()
53 static inline void xconf_writeq(u64 val64, volatile void __iomem *addr) in xconf_writeq()
55 asm volatile ( in xconf_writeq()
76 #define LS7A_DMA_CFG (volatile void *)TO_UNCACHE(LS7A_CHIPCFG_REG_BASE + 0x041c)
80 #define LS7A_INT_MASK_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x020)
81 #define LS7A_INT_EDGE_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x060)
82 #define LS7A_INT_CLEAR_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x080)
83 #define LS7A_INT_HTMSI_EN_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x040)
[all …]
/linux/arch/parisc/include/asm/
H A Dhardware.h43 volatile uint32_t nothing; /* reg 0 */
44 volatile uint32_t io_eim;
45 volatile uint32_t io_dc_adata;
46 volatile uint32_t io_ii_cdata;
47 volatile uint32_t io_dma_link; /* reg 4 */
48 volatile uint32_t io_dma_command;
49 volatile uint32_t io_dma_address;
50 volatile uint32_t io_dma_count;
51 volatile uint32_t io_flex; /* reg 8 */
52 volatile uint32_t io_spa_address;
[all …]
/linux/include/video/
H A Dgbe.h12 volatile uint32_t ctrlstat; /* general control */
13 volatile uint32_t dotclock; /* dot clock PLL control */
14 volatile uint32_t i2c; /* crt I2C control */
15 volatile uint32_t sysclk; /* system clock PLL control */
16 volatile uint32_t i2cfp; /* flat panel I2C control */
17 volatile uint32_t id; /* device id/chip revision */
18 volatile uint32_t config; /* power on configuration [1] */
19 volatile uint32_t bist; /* internal bist status [1] */
21 volatile uint32_t vt_xy; /* current dot coords */
22 volatile uint32_t vt_xymax; /* maximum dot coords */
[all …]
/linux/arch/alpha/include/asm/
H A Dio_trivial.h12 return __kernel_ldbu(*(const volatile u8 __force *)a); in IO_CONCAT()
18 return __kernel_ldwu(*(const volatile u16 __force *)a); in IO_CONCAT()
24 __kernel_stb(b, *(volatile u8 __force *)a); in IO_CONCAT()
30 __kernel_stw(b, *(volatile u16 __force *)a); in IO_CONCAT()
38 return *(const volatile u32 __force *)a; in IO_CONCAT()
44 *(volatile u32 __force *)a = b; in IO_CONCAT()
50 return *(const volatile u64 __force *)a; in IO_CONCAT()
56 *(volatile u64 __force *)a = b; in IO_CONCAT()
62 IO_CONCAT(__IO_PREFIX,readb)(const volatile void __iomem *a) in IO_CONCAT()
64 return __kernel_ldbu(*(const volatile u8 __force *)a); in IO_CONCAT()
[all …]
H A Dio.h55 static inline unsigned long virt_to_phys(volatile void *address) in virt_to_phys()
65 static inline unsigned long virt_to_phys(volatile void *address) in virt_to_phys()
102 static inline unsigned long __deprecated isa_virt_to_bus(volatile void *address) in isa_virt_to_bus()
150 REMAP1(u8, readb, const volatile) in REMAP1()
151 REMAP1(u16, readw, const volatile) in REMAP1()
152 REMAP1(u32, readl, const volatile) in REMAP1()
153 REMAP1(u64, readq, const volatile) in REMAP1()
159 REMAP2(u8, writeb, volatile) in REMAP1()
160 REMAP2(u16, writew, volatile) in REMAP1()
161 REMAP2(u32, writel, volatile) in REMAP1()
[all …]
/linux/tools/testing/selftests/bpf/progs/
H A Dset_global_vars.c18 const volatile __s64 var_s64 = -1;
19 const volatile __u64 var_u64 = 0;
20 const volatile i32 var_s32 = -1;
21 const volatile __u32 var_u32 = 0;
22 const volatile __s16 var_s16 = -1;
23 const volatile __u16 var_u16 = 0;
24 const volatile __s8 var_s8 = -1;
25 const volatile u8 var_u8 = 0;
26 const volatile enum Enum var_ea = EA1;
27 const volatile enum Enumu64 var_eb = EB1;
[all …]
/linux/arch/mips/include/asm/sgi/
H A Dmc.h18 volatile u32 cpuctrl0; /* CPU control register 0, readwrite */
39 volatile u32 cpuctrl1; /* CPU control register 1, readwrite */
49 volatile u32 watchdogt; /* Watchdog reg rdonly, write clears */
52 volatile u32 systemid; /* MC system ID register, readonly */
57 volatile u32 divider; /* Divider reg for RPSS */
68 volatile u32 rcntpre; /* Preload refresh counter */
71 volatile u32 rcounter; /* Readonly refresh counter */
74 volatile u32 giopar; /* Parameter word for GIO64 */
93 volatile u32 cputp; /* CPU bus arb time period */
96 volatile u32 lbursttp; /* Time period for long bursts */
[all …]
H A Dioc.h26 volatile u8 ctrl1;
28 volatile u8 data1;
30 volatile u8 ctrl2;
32 volatile u8 data2;
37 volatile u8 data;
39 volatile u8 command;
44 volatile u8 istat0; /* Interrupt status zero */
54 volatile u8 imask0; /* Interrupt mask zero */
56 volatile u8 istat1; /* Interrupt status one */
66 volatile u8 imask1; /* Interrupt mask one */
[all …]
/linux/tools/testing/selftests/arm64/abi/
H A Dhwcap.c44 asm volatile(".inst 0x4e284800" : : : ); in aes_sigill()
50 asm volatile(".inst 0xb82003ff" : : : ); in atomics_sigill()
62 asm volatile(".inst 0x1ac14800" : : : ); in crc32_sigill()
68 asm volatile(".inst 0xdac01c00" : : : "x0"); in cssc_sigill()
74 asm volatile(".inst 0x2ec03c00"); in f8cvt_sigill()
80 asm volatile(".inst 0xe40fc00"); in f8dp2_sigill()
86 asm volatile(".inst 0xe00fc00"); in f8dp4_sigill()
92 asm volatile(".inst 0xec0fc00"); in f8fma_sigill()
98 asm volatile(".inst 0x6e00ec00"); in f8mm4_sigill()
104 asm volatile(".inst 0x6e80ec00"); in f8mm8_sigill()
[all …]
/linux/arch/sparc/include/asm/
H A Dio_32.h19 void iounmap(volatile void __iomem *addr);
23 static inline void _memset_io(volatile void __iomem *dst, in _memset_io()
26 volatile void __iomem *d = dst; in _memset_io()
34 static inline void _memcpy_fromio(void *dst, const volatile void __iomem *src, in _memcpy_fromio()
46 static inline void _memcpy_toio(volatile void __iomem *dst, const void *src, in _memcpy_toio()
50 volatile void __iomem *d = dst; in _memcpy_toio()
65 static inline u8 sbus_readb(const volatile void __iomem *addr) in sbus_readb()
67 return *(__force volatile u8 *)addr; in sbus_readb()
70 static inline u16 sbus_readw(const volatile void __iomem *addr) in sbus_readw()
72 return *(__force volatile u16 *)addr; in sbus_readw()
[all …]
/linux/Documentation/process/
H A Dvolatile-considered-harmful.rst4 Why the "volatile" type class should not be used
7 C programmers have often taken volatile to mean that the variable could be
10 being used. In other words, they have been known to treat volatile types
11 as a sort of easy atomic variable, which they are not. The use of volatile in
14 The key point to understand with regard to volatile is that its purpose is
21 Like volatile, the kernel primitives which make concurrent access to data
24 need to use volatile as well. If volatile is still necessary, there is
26 code, volatile can only serve to slow things down.
45 If shared_data were declared volatile, the locking would still be
49 volatile. When dealing with shared data, proper locking makes volatile
[all …]
/linux/Documentation/translations/it_IT/process/
H A Dvolatile-considered-harmful.rst3 :Original: :ref:`Documentation/process/volatile-considered-harmful.rst <volatile_considered_harmful…
8 Perché la parola chiave "volatile" non dovrebbe essere usata
13 a volte saranno tentati dall'utilizzare *volatile* nel kernel per le
15 *volatile* come una variabile atomica di facile utilizzo, ma non è così.
16 L'uso di *volatile* nel kernel non è quasi mai corretto; questo documento ne
19 Il punto chiave da capire su *volatile* è che il suo scopo è quello di
27 Come *volatile*, le primitive del kernel che rendono sicuro l'accesso ai dati
30 non ci sarà bisogno di utilizzare *volatile*. Se vi sembra che *volatile* sia
32 In un pezzo di codice kernel scritto a dovere, *volatile* può solo servire a
52 Se il dato condiviso fosse stato dichiarato come *volatile*, la
[all …]

12345678910>>...85