Home
last modified time | relevance | path

Searched full:volatile (Results 1 – 25 of 2168) sorted by relevance

12345678910>>...87

/linux/tools/perf/arch/x86/tests/
H A Dinsn-x86-dat-src.c21 asm volatile("rdtsc"); /* Start here */ in main()
25 asm volatile("vcvtph2ps %xmm3,%ymm5"); in main()
31 asm volatile("cmovno %rax,%rbx"); in main()
32 asm volatile("cmovno 0x12345678(%rax),%rcx"); in main()
33 asm volatile("cmovno 0x12345678(%rax),%cx"); in main()
35 asm volatile("cmove %rax,%rbx"); in main()
36 asm volatile("cmove 0x12345678(%rax),%rcx"); in main()
37 asm volatile("cmove 0x12345678(%rax),%cx"); in main()
39 asm volatile("seto 0x12345678(%rax)"); in main()
40 asm volatile("setno 0x12345678(%rax)"); in main()
[all …]
/linux/drivers/video/fbdev/kyro/
H A DSTG4000Reg.h76 volatile u32 Thread0Enable; /* 0x0000 */
77 volatile u32 Thread1Enable; /* 0x0004 */
78 volatile u32 Thread0Recover; /* 0x0008 */
79 volatile u32 Thread1Recover; /* 0x000C */
80 volatile u32 Thread0Step; /* 0x0010 */
81 volatile u32 Thread1Step; /* 0x0014 */
82 volatile u32 VideoInStatus; /* 0x0018 */
83 volatile u32 Core2InSignStart; /* 0x001C */
84 volatile u32 Core1ResetVector; /* 0x0020 */
85 volatile u32 Core1ROMOffset; /* 0x0024 */
[all …]
/linux/lib/raid6/
H A Dsse2.c48 asm volatile("movdqa %0,%%xmm0" : : "m" (raid6_sse_constants.x1d[0])); in raid6_sse21_gen_syndrome()
49 asm volatile("pxor %xmm5,%xmm5"); /* Zero temp */ in raid6_sse21_gen_syndrome()
52 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse21_gen_syndrome()
53 asm volatile("movdqa %0,%%xmm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse21_gen_syndrome()
54 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
55 asm volatile("movdqa %xmm2,%xmm4"); /* Q[0] */ in raid6_sse21_gen_syndrome()
56 asm volatile("movdqa %0,%%xmm6" : : "m" (dptr[z0-1][d])); in raid6_sse21_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse21_gen_syndrome()
59 asm volatile("pcmpgtb %xmm4,%xmm5"); in raid6_sse21_gen_syndrome()
60 asm volatile("paddb %xmm4,%xmm4"); in raid6_sse21_gen_syndrome()
[all …]
H A Davx2.c46 asm volatile("vmovdqa %0,%%ymm0" : : "m" (raid6_avx2_constants.x1d[0])); in raid6_avx21_gen_syndrome()
47 asm volatile("vpxor %ymm3,%ymm3,%ymm3"); /* Zero temp */ in raid6_avx21_gen_syndrome()
50 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_avx21_gen_syndrome()
51 asm volatile("vmovdqa %0,%%ymm2" : : "m" (dptr[z0][d]));/* P[0] */ in raid6_avx21_gen_syndrome()
52 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
53 asm volatile("vmovdqa %ymm2,%ymm4");/* Q[0] */ in raid6_avx21_gen_syndrome()
54 asm volatile("vmovdqa %0,%%ymm6" : : "m" (dptr[z0-1][d])); in raid6_avx21_gen_syndrome()
56 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_avx21_gen_syndrome()
57 asm volatile("vpcmpgtb %ymm4,%ymm3,%ymm5"); in raid6_avx21_gen_syndrome()
58 asm volatile("vpaddb %ymm4,%ymm4,%ymm4"); in raid6_avx21_gen_syndrome()
[all …]
H A Drecov_ssse3.c54 asm volatile("movdqa %0,%%xmm7" : : "m" (x0f[0])); in raid6_2data_recov_ssse3()
57 asm volatile("movdqa %0,%%xmm6" : : "m" (qmul[0])); in raid6_2data_recov_ssse3()
58 asm volatile("movdqa %0,%%xmm14" : : "m" (pbmul[0])); in raid6_2data_recov_ssse3()
59 asm volatile("movdqa %0,%%xmm15" : : "m" (pbmul[16])); in raid6_2data_recov_ssse3()
67 asm volatile("movdqa %0,%%xmm1" : : "m" (q[0])); in raid6_2data_recov_ssse3()
68 asm volatile("movdqa %0,%%xmm9" : : "m" (q[16])); in raid6_2data_recov_ssse3()
69 asm volatile("movdqa %0,%%xmm0" : : "m" (p[0])); in raid6_2data_recov_ssse3()
70 asm volatile("movdqa %0,%%xmm8" : : "m" (p[16])); in raid6_2data_recov_ssse3()
71 asm volatile("pxor %0,%%xmm1" : : "m" (dq[0])); in raid6_2data_recov_ssse3()
72 asm volatile("pxor %0,%%xmm9" : : "m" (dq[16])); in raid6_2data_recov_ssse3()
[all …]
H A Drecov_loongarch_simd.c69 asm volatile("vld $vr20, %0" : : "m" (qmul[0])); in raid6_2data_recov_lsx()
70 asm volatile("vld $vr21, %0" : : "m" (qmul[16])); in raid6_2data_recov_lsx()
71 asm volatile("vld $vr22, %0" : : "m" (pbmul[0])); in raid6_2data_recov_lsx()
72 asm volatile("vld $vr23, %0" : : "m" (pbmul[16])); in raid6_2data_recov_lsx()
76 asm volatile("vld $vr4, %0" : : "m" (q[0])); in raid6_2data_recov_lsx()
77 asm volatile("vld $vr5, %0" : : "m" (q[16])); in raid6_2data_recov_lsx()
78 asm volatile("vld $vr6, %0" : : "m" (q[32])); in raid6_2data_recov_lsx()
79 asm volatile("vld $vr7, %0" : : "m" (q[48])); in raid6_2data_recov_lsx()
81 asm volatile("vld $vr8, %0" : : "m" (dq[0])); in raid6_2data_recov_lsx()
82 asm volatile("vld $vr9, %0" : : "m" (dq[16])); in raid6_2data_recov_lsx()
[all …]
H A Drecov_avx2.c53 asm volatile("vpbroadcastb %0, %%ymm7" : : "m" (x0f)); in raid6_2data_recov_avx2()
57 asm volatile("vmovdqa %0, %%ymm1" : : "m" (q[0])); in raid6_2data_recov_avx2()
58 asm volatile("vmovdqa %0, %%ymm9" : : "m" (q[32])); in raid6_2data_recov_avx2()
59 asm volatile("vmovdqa %0, %%ymm0" : : "m" (p[0])); in raid6_2data_recov_avx2()
60 asm volatile("vmovdqa %0, %%ymm8" : : "m" (p[32])); in raid6_2data_recov_avx2()
61 asm volatile("vpxor %0, %%ymm1, %%ymm1" : : "m" (dq[0])); in raid6_2data_recov_avx2()
62 asm volatile("vpxor %0, %%ymm9, %%ymm9" : : "m" (dq[32])); in raid6_2data_recov_avx2()
63 asm volatile("vpxor %0, %%ymm0, %%ymm0" : : "m" (dp[0])); in raid6_2data_recov_avx2()
64 asm volatile("vpxor %0, %%ymm8, %%ymm8" : : "m" (dp[32])); in raid6_2data_recov_avx2()
73 asm volatile("vbroadcasti128 %0, %%ymm4" : : "m" (qmul[0])); in raid6_2data_recov_avx2()
[all …]
H A Dloongarch_simd.c54 asm volatile("vld $vr0, %0" : : "m"(dptr[z0][d+0*NSIZE])); in raid6_lsx_gen_syndrome()
55 asm volatile("vld $vr1, %0" : : "m"(dptr[z0][d+1*NSIZE])); in raid6_lsx_gen_syndrome()
56 asm volatile("vld $vr2, %0" : : "m"(dptr[z0][d+2*NSIZE])); in raid6_lsx_gen_syndrome()
57 asm volatile("vld $vr3, %0" : : "m"(dptr[z0][d+3*NSIZE])); in raid6_lsx_gen_syndrome()
58 asm volatile("vori.b $vr4, $vr0, 0"); in raid6_lsx_gen_syndrome()
59 asm volatile("vori.b $vr5, $vr1, 0"); in raid6_lsx_gen_syndrome()
60 asm volatile("vori.b $vr6, $vr2, 0"); in raid6_lsx_gen_syndrome()
61 asm volatile("vori.b $vr7, $vr3, 0"); in raid6_lsx_gen_syndrome()
64 asm volatile("vld $vr8, %0" : : "m"(dptr[z][d+0*NSIZE])); in raid6_lsx_gen_syndrome()
65 asm volatile("vld $vr9, %0" : : "m"(dptr[z][d+1*NSIZE])); in raid6_lsx_gen_syndrome()
[all …]
H A Dsse1.c52 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_sse11_gen_syndrome()
53 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_sse11_gen_syndrome()
56 asm volatile("prefetchnta %0" : : "m" (dptr[z0][d])); in raid6_sse11_gen_syndrome()
57 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_sse11_gen_syndrome()
58 asm volatile("prefetchnta %0" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
59 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_sse11_gen_syndrome()
60 asm volatile("movq %0,%%mm6" : : "m" (dptr[z0-1][d])); in raid6_sse11_gen_syndrome()
62 asm volatile("prefetchnta %0" : : "m" (dptr[z][d])); in raid6_sse11_gen_syndrome()
63 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_sse11_gen_syndrome()
64 asm volatile("paddb %mm4,%mm4"); in raid6_sse11_gen_syndrome()
[all …]
H A Dmmx.c47 asm volatile("movq %0,%%mm0" : : "m" (raid6_mmx_constants.x1d)); in raid6_mmx1_gen_syndrome()
48 asm volatile("pxor %mm5,%mm5"); /* Zero temp */ in raid6_mmx1_gen_syndrome()
51 asm volatile("movq %0,%%mm2" : : "m" (dptr[z0][d])); /* P[0] */ in raid6_mmx1_gen_syndrome()
52 asm volatile("movq %mm2,%mm4"); /* Q[0] */ in raid6_mmx1_gen_syndrome()
54 asm volatile("movq %0,%%mm6" : : "m" (dptr[z][d])); in raid6_mmx1_gen_syndrome()
55 asm volatile("pcmpgtb %mm4,%mm5"); in raid6_mmx1_gen_syndrome()
56 asm volatile("paddb %mm4,%mm4"); in raid6_mmx1_gen_syndrome()
57 asm volatile("pand %mm0,%mm5"); in raid6_mmx1_gen_syndrome()
58 asm volatile("pxor %mm5,%mm4"); in raid6_mmx1_gen_syndrome()
59 asm volatile("pxor %mm5,%mm5"); in raid6_mmx1_gen_syndrome()
[all …]
/linux/arch/m68k/include/asm/
H A Dmvme147hw.h23 #define m147_rtc ((MK48T02 * volatile)0xfffe07f8)
27 volatile u_long dma_tadr;
28 volatile u_long dma_dadr;
29 volatile u_long dma_bcr;
30 volatile u_long dma_hr;
31 volatile u_short t1_preload;
32 volatile u_short t1_count;
33 volatile u_short t2_preload;
34 volatile u_short t2_count;
35 volatile u_char t1_int_cntrl;
[all …]
/linux/arch/mips/include/asm/ip32/
H A Dmace.h24 volatile unsigned int error_addr;
25 volatile unsigned int error;
49 volatile unsigned int control;
70 volatile unsigned int rev;
72 volatile unsigned int config_addr;
74 volatile unsigned char b[4];
75 volatile unsigned short w[2];
76 volatile unsigned int l;
98 volatile u64 mac_ctrl;
99 volatile unsigned long int_stat;
[all …]
/linux/arch/loongarch/include/asm/
H A Dloongson.h18 (*(volatile u32 *)((char *)TO_UNCACHE(LOONGSON_REG_BASE) + (x)))
43 static inline void xconf_writel(u32 val, volatile void __iomem *addr) in xconf_writel()
45 asm volatile ( in xconf_writel()
53 static inline void xconf_writeq(u64 val64, volatile void __iomem *addr) in xconf_writeq()
55 asm volatile ( in xconf_writeq()
76 #define LS7A_DMA_CFG (volatile void *)TO_UNCACHE(LS7A_CHIPCFG_REG_BASE + 0x041c)
80 #define LS7A_INT_MASK_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x020)
81 #define LS7A_INT_EDGE_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x060)
82 #define LS7A_INT_CLEAR_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x080)
83 #define LS7A_INT_HTMSI_EN_REG (volatile void *)TO_UNCACHE(LS7A_PCH_REG_BASE + 0x040)
[all …]
/linux/include/video/
H A Dgbe.h12 volatile uint32_t ctrlstat; /* general control */
13 volatile uint32_t dotclock; /* dot clock PLL control */
14 volatile uint32_t i2c; /* crt I2C control */
15 volatile uint32_t sysclk; /* system clock PLL control */
16 volatile uint32_t i2cfp; /* flat panel I2C control */
17 volatile uint32_t id; /* device id/chip revision */
18 volatile uint32_t config; /* power on configuration [1] */
19 volatile uint32_t bist; /* internal bist status [1] */
21 volatile uint32_t vt_xy; /* current dot coords */
22 volatile uint32_t vt_xymax; /* maximum dot coords */
[all …]
/linux/arch/parisc/include/asm/
H A Dhardware.h43 volatile uint32_t nothing; /* reg 0 */
44 volatile uint32_t io_eim;
45 volatile uint32_t io_dc_adata;
46 volatile uint32_t io_ii_cdata;
47 volatile uint32_t io_dma_link; /* reg 4 */
48 volatile uint32_t io_dma_command;
49 volatile uint32_t io_dma_address;
50 volatile uint32_t io_dma_count;
51 volatile uint32_t io_flex; /* reg 8 */
52 volatile uint32_t io_spa_address;
[all …]
/linux/arch/powerpc/kernel/
H A Dio.c27 void _insb(const volatile u8 __iomem *port, void *buf, long count) in _insb()
34 asm volatile("sync"); in _insb()
36 tmp = *(const volatile u8 __force *)port; in _insb()
40 asm volatile("twi 0,%0,0; isync" : : "r" (tmp)); in _insb()
44 void _outsb(volatile u8 __iomem *port, const void *buf, long count) in _outsb()
50 asm volatile("sync"); in _outsb()
52 *(volatile u8 __force *)port = *tbuf++; in _outsb()
54 asm volatile("sync"); in _outsb()
58 void _insw_ns(const volatile u16 __iomem *port, void *buf, long count) in _insw_ns()
65 asm volatile("sync"); in _insw_ns()
[all …]
/linux/arch/alpha/include/asm/
H A Dio_trivial.h12 return __kernel_ldbu(*(const volatile u8 __force *)a); in IO_CONCAT()
18 return __kernel_ldwu(*(const volatile u16 __force *)a); in IO_CONCAT()
24 __kernel_stb(b, *(volatile u8 __force *)a); in IO_CONCAT()
30 __kernel_stw(b, *(volatile u16 __force *)a); in IO_CONCAT()
38 return *(const volatile u32 __force *)a; in IO_CONCAT()
44 *(volatile u32 __force *)a = b; in IO_CONCAT()
50 return *(const volatile u64 __force *)a; in IO_CONCAT()
56 *(volatile u64 __force *)a = b; in IO_CONCAT()
62 IO_CONCAT(__IO_PREFIX,readb)(const volatile void __iomem *a) in IO_CONCAT()
64 return __kernel_ldbu(*(const volatile u8 __force *)a); in IO_CONCAT()
[all …]
H A Dio.h59 static inline unsigned long virt_to_phys(volatile void *address) in virt_to_phys()
69 static inline unsigned long virt_to_phys(volatile void *address) in virt_to_phys()
107 static inline unsigned long __deprecated isa_virt_to_bus(volatile void *address) in isa_virt_to_bus()
155 REMAP1(u8, readb, const volatile) in REMAP1()
156 REMAP1(u16, readw, const volatile) in REMAP1()
157 REMAP1(u32, readl, const volatile) in REMAP1()
158 REMAP1(u64, readq, const volatile) in REMAP1()
164 REMAP2(u8, writeb, volatile) in REMAP1()
165 REMAP2(u16, writew, volatile) in REMAP1()
166 REMAP2(u32, writel, volatile) in REMAP1()
[all …]
/linux/arch/mips/include/asm/sgi/
H A Dmc.h18 volatile u32 cpuctrl0; /* CPU control register 0, readwrite */
39 volatile u32 cpuctrl1; /* CPU control register 1, readwrite */
49 volatile u32 watchdogt; /* Watchdog reg rdonly, write clears */
52 volatile u32 systemid; /* MC system ID register, readonly */
57 volatile u32 divider; /* Divider reg for RPSS */
68 volatile u32 rcntpre; /* Preload refresh counter */
71 volatile u32 rcounter; /* Readonly refresh counter */
74 volatile u32 giopar; /* Parameter word for GIO64 */
93 volatile u32 cputp; /* CPU bus arb time period */
96 volatile u32 lbursttp; /* Time period for long bursts */
[all …]
H A Dioc.h26 volatile u8 ctrl1;
28 volatile u8 data1;
30 volatile u8 ctrl2;
32 volatile u8 data2;
37 volatile u8 data;
39 volatile u8 command;
44 volatile u8 istat0; /* Interrupt status zero */
54 volatile u8 imask0; /* Interrupt mask zero */
56 volatile u8 istat1; /* Interrupt status one */
66 volatile u8 imask1; /* Interrupt mask one */
[all …]
/linux/arch/x86/include/asm/
H A Dkexec.h80 asm volatile("movl %%ebx,%0" : "=m"(newregs->bx)); in crash_setup_regs()
81 asm volatile("movl %%ecx,%0" : "=m"(newregs->cx)); in crash_setup_regs()
82 asm volatile("movl %%edx,%0" : "=m"(newregs->dx)); in crash_setup_regs()
83 asm volatile("movl %%esi,%0" : "=m"(newregs->si)); in crash_setup_regs()
84 asm volatile("movl %%edi,%0" : "=m"(newregs->di)); in crash_setup_regs()
85 asm volatile("movl %%ebp,%0" : "=m"(newregs->bp)); in crash_setup_regs()
86 asm volatile("movl %%eax,%0" : "=m"(newregs->ax)); in crash_setup_regs()
87 asm volatile("movl %%esp,%0" : "=m"(newregs->sp)); in crash_setup_regs()
88 asm volatile("movl %%ss, %%eax;" :"=a"(newregs->ss)); in crash_setup_regs()
89 asm volatile("movl %%cs, %%eax;" :"=a"(newregs->cs)); in crash_setup_regs()
[all …]
/linux/tools/testing/selftests/arm64/abi/
H A Dhwcap.c40 asm volatile(".inst 0x4e284800" : : : ); in aes_sigill()
46 asm volatile(".inst 0xb82003ff" : : : ); in atomics_sigill()
52 asm volatile(".inst 0x1ac14800" : : : ); in crc32_sigill()
58 asm volatile(".inst 0xdac01c00" : : : "x0"); in cssc_sigill()
64 asm volatile(".inst 0x2ec03c00"); in f8cvt_sigill()
70 asm volatile(".inst 0xe40fc00"); in f8dp2_sigill()
76 asm volatile(".inst 0xe00fc00"); in f8dp4_sigill()
82 asm volatile(".inst 0xec0fc00"); in f8fma_sigill()
88 asm volatile(".inst 0x2ec01c00"); in faminmax_sigill()
93 asm volatile("fmov s0, #1"); in fp_sigill()
[all …]
/linux/Documentation/process/
H A Dvolatile-considered-harmful.rst4 Why the "volatile" type class should not be used
7 C programmers have often taken volatile to mean that the variable could be
10 being used. In other words, they have been known to treat volatile types
11 as a sort of easy atomic variable, which they are not. The use of volatile in
14 The key point to understand with regard to volatile is that its purpose is
21 Like volatile, the kernel primitives which make concurrent access to data
24 need to use volatile as well. If volatile is still necessary, there is
26 code, volatile can only serve to slow things down.
45 If shared_data were declared volatile, the locking would still be
49 volatile. When dealing with shared data, proper locking makes volatile
[all …]
/linux/arch/sparc/include/asm/
H A Dio_32.h19 void iounmap(volatile void __iomem *addr);
23 static inline void _memset_io(volatile void __iomem *dst, in _memset_io()
26 volatile void __iomem *d = dst; in _memset_io()
34 static inline void _memcpy_fromio(void *dst, const volatile void __iomem *src, in _memcpy_fromio()
46 static inline void _memcpy_toio(volatile void __iomem *dst, const void *src, in _memcpy_toio()
50 volatile void __iomem *d = dst; in _memcpy_toio()
65 static inline u8 sbus_readb(const volatile void __iomem *addr) in sbus_readb()
67 return *(__force volatile u8 *)addr; in sbus_readb()
70 static inline u16 sbus_readw(const volatile void __iomem *addr) in sbus_readw()
72 return *(__force volatile u16 *)addr; in sbus_readw()
[all …]
/linux/tools/testing/selftests/bpf/progs/
H A Dverifier_sdiv.c19 asm volatile (" \ in sdiv32_non_zero_imm_1()
31 asm volatile (" \ in sdiv32_non_zero_imm_2()
43 asm volatile (" \ in sdiv32_non_zero_imm_3()
55 asm volatile (" \ in sdiv32_non_zero_imm_4()
67 asm volatile (" \ in sdiv32_non_zero_imm_5()
79 asm volatile (" \ in sdiv32_non_zero_imm_6()
91 asm volatile (" \ in sdiv32_non_zero_imm_7()
103 asm volatile (" \ in sdiv32_non_zero_imm_8()
115 asm volatile (" \ in sdiv32_non_zero_reg_1()
128 asm volatile (" \ in sdiv32_non_zero_reg_2()
[all …]

12345678910>>...87