Skip to content

Commit cde8a7e

Browse files
xairytorvalds
authored andcommitted
kasan: ensure poisoning size alignment
A previous changes d99f6a1 ("kasan: don't round_up too much") attempted to simplify the code by adding a round_up(size) call into kasan_poison(). While this allows to have less round_up() calls around the code, this results in round_up() being called multiple times. This patch removes round_up() of size from kasan_poison() and ensures that all callers round_up() the size explicitly. This patch also adds WARN_ON() alignment checks for address and size to kasan_poison() and kasan_unpoison(). Link: https://lkml.kernel.org/r/3ffe8d4a246ae67a8b5e91f65bf98cd7cba9d7b9.1612546384.git.andreyknvl@google.com Signed-off-by: Andrey Konovalov <andreyknvl@google.com> Reviewed-by: Marco Elver <elver@google.com> Cc: Alexander Potapenko <glider@google.com> Cc: Andrey Ryabinin <aryabinin@virtuozzo.com> Cc: Branislav Rankov <Branislav.Rankov@arm.com> Cc: Catalin Marinas <catalin.marinas@arm.com> Cc: Dmitry Vyukov <dvyukov@google.com> Cc: Evgenii Stepanov <eugenis@google.com> Cc: Kevin Brodsky <kevin.brodsky@arm.com> Cc: Peter Collingbourne <pcc@google.com> Cc: Vincenzo Frascino <vincenzo.frascino@arm.com> Cc: Will Deacon <will.deacon@arm.com> Signed-off-by: Andrew Morton <akpm@linux-foundation.org> Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
1 parent d12d9ad commit cde8a7e

File tree

3 files changed

+48
-31
lines changed

3 files changed

+48
-31
lines changed

mm/kasan/common.c

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,8 @@ void __kasan_unpoison_object_data(struct kmem_cache *cache, void *object)
261261

262262
void __kasan_poison_object_data(struct kmem_cache *cache, void *object)
263263
{
264-
kasan_poison(object, cache->object_size, KASAN_KMALLOC_REDZONE);
264+
kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE),
265+
KASAN_KMALLOC_REDZONE);
265266
}
266267

267268
/*
@@ -348,7 +349,8 @@ static bool ____kasan_slab_free(struct kmem_cache *cache, void *object,
348349
return true;
349350
}
350351

351-
kasan_poison(object, cache->object_size, KASAN_KMALLOC_FREE);
352+
kasan_poison(object, round_up(cache->object_size, KASAN_GRANULE_SIZE),
353+
KASAN_KMALLOC_FREE);
352354

353355
if ((IS_ENABLED(CONFIG_KASAN_GENERIC) && !quarantine))
354356
return false;
@@ -490,7 +492,8 @@ static void *____kasan_kmalloc(struct kmem_cache *cache, const void *object,
490492
/* Poison the aligned part of the redzone. */
491493
redzone_start = round_up((unsigned long)(object + size),
492494
KASAN_GRANULE_SIZE);
493-
redzone_end = (unsigned long)object + cache->object_size;
495+
redzone_end = round_up((unsigned long)(object + cache->object_size),
496+
KASAN_GRANULE_SIZE);
494497
kasan_poison((void *)redzone_start, redzone_end - redzone_start,
495498
KASAN_KMALLOC_REDZONE);
496499

mm/kasan/kasan.h

Lines changed: 20 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -330,30 +330,37 @@ static inline u8 kasan_random_tag(void) { return 0; }
330330

331331
#ifdef CONFIG_KASAN_HW_TAGS
332332

333-
static inline void kasan_poison(const void *address, size_t size, u8 value)
333+
static inline void kasan_poison(const void *addr, size_t size, u8 value)
334334
{
335-
address = kasan_reset_tag(address);
335+
addr = kasan_reset_tag(addr);
336336

337337
/* Skip KFENCE memory if called explicitly outside of sl*b. */
338-
if (is_kfence_address(address))
338+
if (is_kfence_address(addr))
339339
return;
340340

341-
hw_set_mem_tag_range((void *)address,
342-
round_up(size, KASAN_GRANULE_SIZE), value);
341+
if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK))
342+
return;
343+
if (WARN_ON(size & KASAN_GRANULE_MASK))
344+
return;
345+
346+
hw_set_mem_tag_range((void *)addr, size, value);
343347
}
344348

345-
static inline void kasan_unpoison(const void *address, size_t size)
349+
static inline void kasan_unpoison(const void *addr, size_t size)
346350
{
347-
u8 tag = get_tag(address);
351+
u8 tag = get_tag(addr);
348352

349-
address = kasan_reset_tag(address);
353+
addr = kasan_reset_tag(addr);
350354

351355
/* Skip KFENCE memory if called explicitly outside of sl*b. */
352-
if (is_kfence_address(address))
356+
if (is_kfence_address(addr))
353357
return;
354358

355-
hw_set_mem_tag_range((void *)address,
356-
round_up(size, KASAN_GRANULE_SIZE), tag);
359+
if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK))
360+
return;
361+
size = round_up(size, KASAN_GRANULE_SIZE);
362+
363+
hw_set_mem_tag_range((void *)addr, size, tag);
357364
}
358365

359366
static inline bool kasan_byte_accessible(const void *addr)
@@ -370,7 +377,7 @@ static inline bool kasan_byte_accessible(const void *addr)
370377
/**
371378
* kasan_poison - mark the memory range as unaccessible
372379
* @addr - range start address, must be aligned to KASAN_GRANULE_SIZE
373-
* @size - range size
380+
* @size - range size, must be aligned to KASAN_GRANULE_SIZE
374381
* @value - value that's written to metadata for the range
375382
*
376383
* The size gets aligned to KASAN_GRANULE_SIZE before marking the range.
@@ -380,7 +387,7 @@ void kasan_poison(const void *addr, size_t size, u8 value);
380387
/**
381388
* kasan_unpoison - mark the memory range as accessible
382389
* @addr - range start address, must be aligned to KASAN_GRANULE_SIZE
383-
* @size - range size
390+
* @size - range size, can be unaligned
384391
*
385392
* For the tag-based modes, the @size gets aligned to KASAN_GRANULE_SIZE before
386393
* marking the range.

mm/kasan/shadow.c

Lines changed: 22 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ void *memcpy(void *dest, const void *src, size_t len)
6969
return __memcpy(dest, src, len);
7070
}
7171

72-
void kasan_poison(const void *address, size_t size, u8 value)
72+
void kasan_poison(const void *addr, size_t size, u8 value)
7373
{
7474
void *shadow_start, *shadow_end;
7575

@@ -78,55 +78,62 @@ void kasan_poison(const void *address, size_t size, u8 value)
7878
* some of the callers (e.g. kasan_poison_object_data) pass tagged
7979
* addresses to this function.
8080
*/
81-
address = kasan_reset_tag(address);
81+
addr = kasan_reset_tag(addr);
8282

8383
/* Skip KFENCE memory if called explicitly outside of sl*b. */
84-
if (is_kfence_address(address))
84+
if (is_kfence_address(addr))
8585
return;
8686

87-
size = round_up(size, KASAN_GRANULE_SIZE);
88-
shadow_start = kasan_mem_to_shadow(address);
89-
shadow_end = kasan_mem_to_shadow(address + size);
87+
if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK))
88+
return;
89+
if (WARN_ON(size & KASAN_GRANULE_MASK))
90+
return;
91+
92+
shadow_start = kasan_mem_to_shadow(addr);
93+
shadow_end = kasan_mem_to_shadow(addr + size);
9094

9195
__memset(shadow_start, value, shadow_end - shadow_start);
9296
}
9397
EXPORT_SYMBOL(kasan_poison);
9498

9599
#ifdef CONFIG_KASAN_GENERIC
96-
void kasan_poison_last_granule(const void *address, size_t size)
100+
void kasan_poison_last_granule(const void *addr, size_t size)
97101
{
98102
if (size & KASAN_GRANULE_MASK) {
99-
u8 *shadow = (u8 *)kasan_mem_to_shadow(address + size);
103+
u8 *shadow = (u8 *)kasan_mem_to_shadow(addr + size);
100104
*shadow = size & KASAN_GRANULE_MASK;
101105
}
102106
}
103107
#endif
104108

105-
void kasan_unpoison(const void *address, size_t size)
109+
void kasan_unpoison(const void *addr, size_t size)
106110
{
107-
u8 tag = get_tag(address);
111+
u8 tag = get_tag(addr);
108112

109113
/*
110114
* Perform shadow offset calculation based on untagged address, as
111115
* some of the callers (e.g. kasan_unpoison_object_data) pass tagged
112116
* addresses to this function.
113117
*/
114-
address = kasan_reset_tag(address);
118+
addr = kasan_reset_tag(addr);
115119

116120
/*
117121
* Skip KFENCE memory if called explicitly outside of sl*b. Also note
118122
* that calls to ksize(), where size is not a multiple of machine-word
119123
* size, would otherwise poison the invalid portion of the word.
120124
*/
121-
if (is_kfence_address(address))
125+
if (is_kfence_address(addr))
126+
return;
127+
128+
if (WARN_ON((unsigned long)addr & KASAN_GRANULE_MASK))
122129
return;
123130

124-
/* Unpoison round_up(size, KASAN_GRANULE_SIZE) bytes. */
125-
kasan_poison(address, size, tag);
131+
/* Unpoison all granules that cover the object. */
132+
kasan_poison(addr, round_up(size, KASAN_GRANULE_SIZE), tag);
126133

127134
/* Partially poison the last granule for the generic mode. */
128135
if (IS_ENABLED(CONFIG_KASAN_GENERIC))
129-
kasan_poison_last_granule(address, size);
136+
kasan_poison_last_granule(addr, size);
130137
}
131138

132139
#ifdef CONFIG_MEMORY_HOTPLUG

0 commit comments

Comments
 (0)