Lines Matching full:size

53  * depending on memory access size X.
69 unsigned long size) in memory_is_poisoned_2_4_8() argument
74 * Access crosses 8(shadow size)-byte boundary. Such access maps in memory_is_poisoned_2_4_8()
77 if (unlikely((((unsigned long)addr + size - 1) & KASAN_GRANULE_MASK) < size - 1)) in memory_is_poisoned_2_4_8()
78 return *shadow_addr || memory_is_poisoned_1(addr + size - 1); in memory_is_poisoned_2_4_8()
80 return memory_is_poisoned_1(addr + size - 1); in memory_is_poisoned_2_4_8()
95 size_t size) in bytes_is_nonzero() argument
97 while (size) { in bytes_is_nonzero()
101 size--; in bytes_is_nonzero()
136 static __always_inline bool memory_is_poisoned_n(const void *addr, size_t size) in memory_is_poisoned_n() argument
141 kasan_mem_to_shadow(addr + size - 1) + 1); in memory_is_poisoned_n()
144 const void *last_byte = addr + size - 1; in memory_is_poisoned_n()
155 static __always_inline bool memory_is_poisoned(const void *addr, size_t size) in memory_is_poisoned() argument
157 if (__builtin_constant_p(size)) { in memory_is_poisoned()
158 switch (size) { in memory_is_poisoned()
164 return memory_is_poisoned_2_4_8(addr, size); in memory_is_poisoned()
172 return memory_is_poisoned_n(addr, size); in memory_is_poisoned()
176 size_t size, bool write, in check_region_inline() argument
182 if (unlikely(size == 0)) in check_region_inline()
185 if (unlikely(addr + size < addr)) in check_region_inline()
186 return !kasan_report(addr, size, write, ret_ip); in check_region_inline()
189 return !kasan_report(addr, size, write, ret_ip); in check_region_inline()
191 if (likely(!memory_is_poisoned(addr, size))) in check_region_inline()
194 return !kasan_report(addr, size, write, ret_ip); in check_region_inline()
197 bool kasan_check_range(const void *addr, size_t size, bool write, in kasan_check_range() argument
200 return check_region_inline(addr, size, write, ret_ip); in kasan_check_range()
228 size_t aligned_size = round_up(global->size, KASAN_GRANULE_SIZE); in register_global()
230 kasan_unpoison(global->beg, global->size, false); in register_global()
237 void __asan_register_globals(void *ptr, ssize_t size) in __asan_register_globals() argument
242 for (i = 0; i < size; i++) in __asan_register_globals()
247 void __asan_unregister_globals(void *ptr, ssize_t size) in __asan_unregister_globals() argument
252 #define DEFINE_ASAN_LOAD_STORE(size) \ argument
253 void __asan_load##size(void *addr) \
255 check_region_inline(addr, size, false, _RET_IP_); \
257 EXPORT_SYMBOL(__asan_load##size); \
258 __alias(__asan_load##size) \
259 void __asan_load##size##_noabort(void *); \
260 EXPORT_SYMBOL(__asan_load##size##_noabort); \
261 void __asan_store##size(void *addr) \
263 check_region_inline(addr, size, true, _RET_IP_); \
265 EXPORT_SYMBOL(__asan_store##size); \
266 __alias(__asan_store##size) \
267 void __asan_store##size##_noabort(void *); \
268 EXPORT_SYMBOL(__asan_store##size##_noabort)
276 void __asan_loadN(void *addr, ssize_t size) in __asan_loadN() argument
278 kasan_check_range(addr, size, false, _RET_IP_); in __asan_loadN()
286 void __asan_storeN(void *addr, ssize_t size) in __asan_storeN() argument
288 kasan_check_range(addr, size, true, _RET_IP_); in __asan_storeN()
301 void __asan_alloca_poison(void *addr, ssize_t size) in __asan_alloca_poison() argument
303 size_t rounded_up_size = round_up(size, KASAN_GRANULE_SIZE); in __asan_alloca_poison()
304 size_t padding_size = round_up(size, KASAN_ALLOCA_REDZONE_SIZE) - in __asan_alloca_poison()
306 size_t rounded_down_size = round_down(size, KASAN_GRANULE_SIZE); in __asan_alloca_poison()
315 size - rounded_down_size, false); in __asan_alloca_poison()
335 void __asan_set_shadow_##byte(const void *addr, ssize_t size) \
337 __memset((void *)addr, 0x##byte, size); \
364 void kasan_cache_create(struct kmem_cache *cache, unsigned int *size, in kasan_cache_create() argument
379 * size of the accessible memory within the object. Additionally, we use in kasan_cache_create()
384 ok_size = *size; in kasan_cache_create()
387 cache->kasan_info.alloc_meta_offset = *size; in kasan_cache_create()
388 *size += sizeof(struct kasan_alloc_meta); in kasan_cache_create()
391 if (*size > KMALLOC_MAX_SIZE) { in kasan_cache_create()
393 *size = ok_size; in kasan_cache_create()
397 ok_size = *size; in kasan_cache_create()
408 * to store original size. in kasan_cache_create()
412 cache->kasan_info.free_meta_offset = *size; in kasan_cache_create()
413 *size += sizeof(struct kasan_free_meta); in kasan_cache_create()
436 *size += rem_free_meta_size; in kasan_cache_create()
446 cache->kasan_info.free_meta_offset = *size; in kasan_cache_create()
447 *size += sizeof(struct kasan_free_meta); in kasan_cache_create()
451 if (*size > KMALLOC_MAX_SIZE) { in kasan_cache_create()
454 *size = ok_size; in kasan_cache_create()
457 /* Calculate size with optimal redzone. */ in kasan_cache_create()
462 /* Use optimal size if the size with added metas is not large enough. */ in kasan_cache_create()
463 if (*size < optimal_size) in kasan_cache_create()
464 *size = optimal_size; in kasan_cache_create()