1 2 #if __LINUX_ARM_ARCH__ >= 6 && defined(CONFIG_CPU_32v6K) 3 .macro bitop, instr 4 mov r2, #1 5 and r3, r0, #7 @ Get bit offset 6 add r1, r1, r0, lsr #3 @ Get byte offset 7 mov r3, r2, lsl r3 8 1: ldrexb r2, [r1] 9 \instr r2, r2, r3 10 strexb r0, r2, [r1] 11 cmp r0, #0 12 bne 1b 13 mov pc, lr 14 .endm 15 16 .macro testop, instr, store 17 and r3, r0, #7 @ Get bit offset 18 mov r2, #1 19 add r1, r1, r0, lsr #3 @ Get byte offset 20 mov r3, r2, lsl r3 @ create mask 21 smp_dmb 22 1: ldrexb r2, [r1] 23 ands r0, r2, r3 @ save old value of bit 24 \instr r2, r2, r3 @ toggle bit 25 strexb ip, r2, [r1] 26 cmp ip, #0 27 bne 1b 28 smp_dmb 29 cmp r0, #0 30 movne r0, #1 31 2: mov pc, lr 32 .endm 33 #else 34 .macro bitop, instr 35 and r2, r0, #7 36 mov r3, #1 37 mov r3, r3, lsl r2 38 save_and_disable_irqs ip 39 ldrb r2, [r1, r0, lsr #3] 40 \instr r2, r2, r3 41 strb r2, [r1, r0, lsr #3] 42 restore_irqs ip 43 mov pc, lr 44 .endm 45 46 /** 47 * testop - implement a test_and_xxx_bit operation. 48 * @instr: operational instruction 49 * @store: store instruction 50 * 51 * Note: we can trivially conditionalise the store instruction 52 * to avoid dirtying the data cache. 53 */ 54 .macro testop, instr, store 55 add r1, r1, r0, lsr #3 56 and r3, r0, #7 57 mov r0, #1 58 save_and_disable_irqs ip 59 ldrb r2, [r1] 60 tst r2, r0, lsl r3 61 \instr r2, r2, r0, lsl r3 62 \store r2, [r1] 63 moveq r0, #0 64 restore_irqs ip 65 mov pc, lr 66 .endm 67 #endif 68