1/* SPDX-License-Identifier: GPL-2.0-only */ 2/* Copyright(c) 2016-2020 Intel Corporation. All rights reserved. */ 3 4#include <linux/linkage.h> 5#include <asm/asm.h> 6 7#ifndef CONFIG_UML 8 9#ifdef CONFIG_X86_MCE 10 11/* 12 * copy_mc_fragile - copy memory with indication if an exception / fault happened 13 * 14 * The 'fragile' version is opted into by platform quirks and takes 15 * pains to avoid unrecoverable corner cases like 'fast-string' 16 * instruction sequences, and consuming poison across a cacheline 17 * boundary. The non-fragile version is equivalent to memcpy() 18 * regardless of CPU machine-check-recovery capability. 19 */ 20SYM_FUNC_START(copy_mc_fragile) 21 cmpl $8, %edx 22 /* Less than 8 bytes? Go to byte copy loop */ 23 jb .L_no_whole_words 24 25 /* Check for bad alignment of source */ 26 testl $7, %esi 27 /* Already aligned */ 28 jz .L_8byte_aligned 29 30 /* Copy one byte at a time until source is 8-byte aligned */ 31 movl %esi, %ecx 32 andl $7, %ecx 33 subl $8, %ecx 34 negl %ecx 35 subl %ecx, %edx 36.L_read_leading_bytes: 37 movb (%rsi), %al 38.L_write_leading_bytes: 39 movb %al, (%rdi) 40 incq %rsi 41 incq %rdi 42 decl %ecx 43 jnz .L_read_leading_bytes 44 45.L_8byte_aligned: 46 movl %edx, %ecx 47 andl $7, %edx 48 shrl $3, %ecx 49 jz .L_no_whole_words 50 51.L_read_words: 52 movq (%rsi), %r8 53.L_write_words: 54 movq %r8, (%rdi) 55 addq $8, %rsi 56 addq $8, %rdi 57 decl %ecx 58 jnz .L_read_words 59 60 /* Any trailing bytes? */ 61.L_no_whole_words: 62 andl %edx, %edx 63 jz .L_done_memcpy_trap 64 65 /* Copy trailing bytes */ 66 movl %edx, %ecx 67.L_read_trailing_bytes: 68 movb (%rsi), %al 69.L_write_trailing_bytes: 70 movb %al, (%rdi) 71 incq %rsi 72 incq %rdi 73 decl %ecx 74 jnz .L_read_trailing_bytes 75 76 /* Copy successful. Return zero */ 77.L_done_memcpy_trap: 78 xorl %eax, %eax 79.L_done: 80 ret 81SYM_FUNC_END(copy_mc_fragile) 82 83 .section .fixup, "ax" 84 /* 85 * Return number of bytes not copied for any failure. Note that 86 * there is no "tail" handling since the source buffer is 8-byte 87 * aligned and poison is cacheline aligned. 88 */ 89.E_read_words: 90 shll $3, %ecx 91.E_leading_bytes: 92 addl %edx, %ecx 93.E_trailing_bytes: 94 mov %ecx, %eax 95 jmp .L_done 96 97 /* 98 * For write fault handling, given the destination is unaligned, 99 * we handle faults on multi-byte writes with a byte-by-byte 100 * copy up to the write-protected page. 101 */ 102.E_write_words: 103 shll $3, %ecx 104 addl %edx, %ecx 105 movl %ecx, %edx 106 jmp copy_mc_fragile_handle_tail 107 108 .previous 109 110 _ASM_EXTABLE_TYPE(.L_read_leading_bytes, .E_leading_bytes, EX_TYPE_DEFAULT_MCE_SAFE) 111 _ASM_EXTABLE_TYPE(.L_read_words, .E_read_words, EX_TYPE_DEFAULT_MCE_SAFE) 112 _ASM_EXTABLE_TYPE(.L_read_trailing_bytes, .E_trailing_bytes, EX_TYPE_DEFAULT_MCE_SAFE) 113 _ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes) 114 _ASM_EXTABLE(.L_write_words, .E_write_words) 115 _ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes) 116#endif /* CONFIG_X86_MCE */ 117 118/* 119 * copy_mc_enhanced_fast_string - memory copy with exception handling 120 * 121 * Fast string copy + fault / exception handling. If the CPU does 122 * support machine check exception recovery, but does not support 123 * recovering from fast-string exceptions then this CPU needs to be 124 * added to the copy_mc_fragile_key set of quirks. Otherwise, absent any 125 * machine check recovery support this version should be no slower than 126 * standard memcpy. 127 */ 128SYM_FUNC_START(copy_mc_enhanced_fast_string) 129 movq %rdi, %rax 130 movq %rdx, %rcx 131.L_copy: 132 rep movsb 133 /* Copy successful. Return zero */ 134 xorl %eax, %eax 135 ret 136SYM_FUNC_END(copy_mc_enhanced_fast_string) 137 138 .section .fixup, "ax" 139.E_copy: 140 /* 141 * On fault %rcx is updated such that the copy instruction could 142 * optionally be restarted at the fault position, i.e. it 143 * contains 'bytes remaining'. A non-zero return indicates error 144 * to copy_mc_generic() users, or indicate short transfers to 145 * user-copy routines. 146 */ 147 movq %rcx, %rax 148 ret 149 150 .previous 151 152 _ASM_EXTABLE_TYPE(.L_copy, .E_copy, EX_TYPE_DEFAULT_MCE_SAFE) 153#endif /* !CONFIG_UML */ 154