1 // SPDX-License-Identifier: GPL-2.0 2 /* 3 * This file contains core generic KASAN code. 4 * 5 * Copyright (c) 2014 Samsung Electronics Co., Ltd. 6 * Author: Andrey Ryabinin <ryabinin.a.a@gmail.com> 7 * 8 * Some code borrowed from https://github.com/xairy/kasan-prototype by 9 * Andrey Konovalov <andreyknvl@gmail.com> 10 * 11 * This program is free software; you can redistribute it and/or modify 12 * it under the terms of the GNU General Public License version 2 as 13 * published by the Free Software Foundation. 14 * 15 */ 16 17 #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt 18 #define DISABLE_BRANCH_PROFILING 19 20 #include <linux/export.h> 21 #include <linux/interrupt.h> 22 #include <linux/init.h> 23 #include <linux/kasan.h> 24 #include <linux/kernel.h> 25 #include <linux/kmemleak.h> 26 #include <linux/linkage.h> 27 #include <linux/memblock.h> 28 #include <linux/memory.h> 29 #include <linux/mm.h> 30 #include <linux/module.h> 31 #include <linux/printk.h> 32 #include <linux/sched.h> 33 #include <linux/sched/task_stack.h> 34 #include <linux/slab.h> 35 #include <linux/stacktrace.h> 36 #include <linux/string.h> 37 #include <linux/types.h> 38 #include <linux/vmalloc.h> 39 #include <linux/bug.h> 40 41 #include "kasan.h" 42 #include "../slab.h" 43 44 /* 45 * All functions below always inlined so compiler could 46 * perform better optimizations in each of __asan_loadX/__assn_storeX 47 * depending on memory access size X. 48 */ 49 50 static __always_inline bool memory_is_poisoned_1(unsigned long addr) 51 { 52 s8 shadow_value = *(s8 *)kasan_mem_to_shadow((void *)addr); 53 54 if (unlikely(shadow_value)) { 55 s8 last_accessible_byte = addr & KASAN_SHADOW_MASK; 56 return unlikely(last_accessible_byte >= shadow_value); 57 } 58 59 return false; 60 } 61 62 static __always_inline bool memory_is_poisoned_2_4_8(unsigned long addr, 63 unsigned long size) 64 { 65 u8 *shadow_addr = (u8 *)kasan_mem_to_shadow((void *)addr); 66 67 /* 68 * Access crosses 8(shadow size)-byte boundary. Such access maps 69 * into 2 shadow bytes, so we need to check them both. 70 */ 71 if (unlikely(((addr + size - 1) & KASAN_SHADOW_MASK) < size - 1)) 72 return *shadow_addr || memory_is_poisoned_1(addr + size - 1); 73 74 return memory_is_poisoned_1(addr + size - 1); 75 } 76 77 static __always_inline bool memory_is_poisoned_16(unsigned long addr) 78 { 79 u16 *shadow_addr = (u16 *)kasan_mem_to_shadow((void *)addr); 80 81 /* Unaligned 16-bytes access maps into 3 shadow bytes. */ 82 if (unlikely(!IS_ALIGNED(addr, KASAN_SHADOW_SCALE_SIZE))) 83 return *shadow_addr || memory_is_poisoned_1(addr + 15); 84 85 return *shadow_addr; 86 } 87 88 static __always_inline unsigned long bytes_is_nonzero(const u8 *start, 89 size_t size) 90 { 91 while (size) { 92 if (unlikely(*start)) 93 return (unsigned long)start; 94 start++; 95 size--; 96 } 97 98 return 0; 99 } 100 101 static __always_inline unsigned long memory_is_nonzero(const void *start, 102 const void *end) 103 { 104 unsigned int words; 105 unsigned long ret; 106 unsigned int prefix = (unsigned long)start % 8; 107 108 if (end - start <= 16) 109 return bytes_is_nonzero(start, end - start); 110 111 if (prefix) { 112 prefix = 8 - prefix; 113 ret = bytes_is_nonzero(start, prefix); 114 if (unlikely(ret)) 115 return ret; 116 start += prefix; 117 } 118 119 words = (end - start) / 8; 120 while (words) { 121 if (unlikely(*(u64 *)start)) 122 return bytes_is_nonzero(start, 8); 123 start += 8; 124 words--; 125 } 126 127 return bytes_is_nonzero(start, (end - start) % 8); 128 } 129 130 static __always_inline bool memory_is_poisoned_n(unsigned long addr, 131 size_t size) 132 { 133 unsigned long ret; 134 135 ret = memory_is_nonzero(kasan_mem_to_shadow((void *)addr), 136 kasan_mem_to_shadow((void *)addr + size - 1) + 1); 137 138 if (unlikely(ret)) { 139 unsigned long last_byte = addr + size - 1; 140 s8 *last_shadow = (s8 *)kasan_mem_to_shadow((void *)last_byte); 141 142 if (unlikely(ret != (unsigned long)last_shadow || 143 ((long)(last_byte & KASAN_SHADOW_MASK) >= *last_shadow))) 144 return true; 145 } 146 return false; 147 } 148 149 static __always_inline bool memory_is_poisoned(unsigned long addr, size_t size) 150 { 151 if (__builtin_constant_p(size)) { 152 switch (size) { 153 case 1: 154 return memory_is_poisoned_1(addr); 155 case 2: 156 case 4: 157 case 8: 158 return memory_is_poisoned_2_4_8(addr, size); 159 case 16: 160 return memory_is_poisoned_16(addr); 161 default: 162 BUILD_BUG(); 163 } 164 } 165 166 return memory_is_poisoned_n(addr, size); 167 } 168 169 static __always_inline bool check_memory_region_inline(unsigned long addr, 170 size_t size, bool write, 171 unsigned long ret_ip) 172 { 173 if (unlikely(size == 0)) 174 return true; 175 176 if (unlikely(addr + size < addr)) 177 return !kasan_report(addr, size, write, ret_ip); 178 179 if (unlikely((void *)addr < 180 kasan_shadow_to_mem((void *)KASAN_SHADOW_START))) { 181 return !kasan_report(addr, size, write, ret_ip); 182 } 183 184 if (likely(!memory_is_poisoned(addr, size))) 185 return true; 186 187 return !kasan_report(addr, size, write, ret_ip); 188 } 189 190 bool check_memory_region(unsigned long addr, size_t size, bool write, 191 unsigned long ret_ip) 192 { 193 return check_memory_region_inline(addr, size, write, ret_ip); 194 } 195 196 void kasan_cache_shrink(struct kmem_cache *cache) 197 { 198 quarantine_remove_cache(cache); 199 } 200 201 void kasan_cache_shutdown(struct kmem_cache *cache) 202 { 203 if (!__kmem_cache_empty(cache)) 204 quarantine_remove_cache(cache); 205 } 206 207 static void register_global(struct kasan_global *global) 208 { 209 size_t aligned_size = round_up(global->size, KASAN_SHADOW_SCALE_SIZE); 210 211 kasan_unpoison_shadow(global->beg, global->size); 212 213 kasan_poison_shadow(global->beg + aligned_size, 214 global->size_with_redzone - aligned_size, 215 KASAN_GLOBAL_REDZONE); 216 } 217 218 void __asan_register_globals(struct kasan_global *globals, size_t size) 219 { 220 int i; 221 222 for (i = 0; i < size; i++) 223 register_global(&globals[i]); 224 } 225 EXPORT_SYMBOL(__asan_register_globals); 226 227 void __asan_unregister_globals(struct kasan_global *globals, size_t size) 228 { 229 } 230 EXPORT_SYMBOL(__asan_unregister_globals); 231 232 #define DEFINE_ASAN_LOAD_STORE(size) \ 233 void __asan_load##size(unsigned long addr) \ 234 { \ 235 check_memory_region_inline(addr, size, false, _RET_IP_);\ 236 } \ 237 EXPORT_SYMBOL(__asan_load##size); \ 238 __alias(__asan_load##size) \ 239 void __asan_load##size##_noabort(unsigned long); \ 240 EXPORT_SYMBOL(__asan_load##size##_noabort); \ 241 void __asan_store##size(unsigned long addr) \ 242 { \ 243 check_memory_region_inline(addr, size, true, _RET_IP_); \ 244 } \ 245 EXPORT_SYMBOL(__asan_store##size); \ 246 __alias(__asan_store##size) \ 247 void __asan_store##size##_noabort(unsigned long); \ 248 EXPORT_SYMBOL(__asan_store##size##_noabort) 249 250 DEFINE_ASAN_LOAD_STORE(1); 251 DEFINE_ASAN_LOAD_STORE(2); 252 DEFINE_ASAN_LOAD_STORE(4); 253 DEFINE_ASAN_LOAD_STORE(8); 254 DEFINE_ASAN_LOAD_STORE(16); 255 256 void __asan_loadN(unsigned long addr, size_t size) 257 { 258 check_memory_region(addr, size, false, _RET_IP_); 259 } 260 EXPORT_SYMBOL(__asan_loadN); 261 262 __alias(__asan_loadN) 263 void __asan_loadN_noabort(unsigned long, size_t); 264 EXPORT_SYMBOL(__asan_loadN_noabort); 265 266 void __asan_storeN(unsigned long addr, size_t size) 267 { 268 check_memory_region(addr, size, true, _RET_IP_); 269 } 270 EXPORT_SYMBOL(__asan_storeN); 271 272 __alias(__asan_storeN) 273 void __asan_storeN_noabort(unsigned long, size_t); 274 EXPORT_SYMBOL(__asan_storeN_noabort); 275 276 /* to shut up compiler complaints */ 277 void __asan_handle_no_return(void) {} 278 EXPORT_SYMBOL(__asan_handle_no_return); 279 280 /* Emitted by compiler to poison alloca()ed objects. */ 281 void __asan_alloca_poison(unsigned long addr, size_t size) 282 { 283 size_t rounded_up_size = round_up(size, KASAN_SHADOW_SCALE_SIZE); 284 size_t padding_size = round_up(size, KASAN_ALLOCA_REDZONE_SIZE) - 285 rounded_up_size; 286 size_t rounded_down_size = round_down(size, KASAN_SHADOW_SCALE_SIZE); 287 288 const void *left_redzone = (const void *)(addr - 289 KASAN_ALLOCA_REDZONE_SIZE); 290 const void *right_redzone = (const void *)(addr + rounded_up_size); 291 292 WARN_ON(!IS_ALIGNED(addr, KASAN_ALLOCA_REDZONE_SIZE)); 293 294 kasan_unpoison_shadow((const void *)(addr + rounded_down_size), 295 size - rounded_down_size); 296 kasan_poison_shadow(left_redzone, KASAN_ALLOCA_REDZONE_SIZE, 297 KASAN_ALLOCA_LEFT); 298 kasan_poison_shadow(right_redzone, 299 padding_size + KASAN_ALLOCA_REDZONE_SIZE, 300 KASAN_ALLOCA_RIGHT); 301 } 302 EXPORT_SYMBOL(__asan_alloca_poison); 303 304 /* Emitted by compiler to unpoison alloca()ed areas when the stack unwinds. */ 305 void __asan_allocas_unpoison(const void *stack_top, const void *stack_bottom) 306 { 307 if (unlikely(!stack_top || stack_top > stack_bottom)) 308 return; 309 310 kasan_unpoison_shadow(stack_top, stack_bottom - stack_top); 311 } 312 EXPORT_SYMBOL(__asan_allocas_unpoison); 313 314 /* Emitted by the compiler to [un]poison local variables. */ 315 #define DEFINE_ASAN_SET_SHADOW(byte) \ 316 void __asan_set_shadow_##byte(const void *addr, size_t size) \ 317 { \ 318 __memset((void *)addr, 0x##byte, size); \ 319 } \ 320 EXPORT_SYMBOL(__asan_set_shadow_##byte) 321 322 DEFINE_ASAN_SET_SHADOW(00); 323 DEFINE_ASAN_SET_SHADOW(f1); 324 DEFINE_ASAN_SET_SHADOW(f2); 325 DEFINE_ASAN_SET_SHADOW(f3); 326 DEFINE_ASAN_SET_SHADOW(f5); 327 DEFINE_ASAN_SET_SHADOW(f8); 328