Skip to content

Commit e6a72b7

Browse files
Eugeniy Paltsevvineetgarc
authored andcommitted
ARCv2: lib: memeset: fix doing prefetchw outside of buffer
ARCv2 optimized memset uses PREFETCHW instruction for prefetching the next cache line but doesn't ensure that the line is not past the end of the buffer. PRETECHW changes the line ownership and marks it dirty, which can cause issues in SMP config when next line was already owned by other core. Fix the issue by avoiding the PREFETCHW Some more details: The current code has 3 logical loops (ignroing the unaligned part) (a) Big loop for doing aligned 64 bytes per iteration with PREALLOC (b) Loop for 32 x 2 bytes with PREFETCHW (c) any left over bytes loop (a) was already eliding the last 64 bytes, so PREALLOC was safe. The fix was removing PREFETCW from (b). Another potential issue (applicable to configs with 32 or 128 byte L1 cache line) is that PREALLOC assumes 64 byte cache line and may not do the right thing specially for 32b. While it would be easy to adapt, there are no known configs with those lie sizes, so for now, just compile out PREALLOC in such cases. Signed-off-by: Eugeniy Paltsev <[email protected]> Cc: [email protected] #4.4+ Signed-off-by: Vineet Gupta <[email protected]> [vgupta: rewrote changelog, used asm .macro vs. "C" macro]
1 parent 4d44745 commit e6a72b7

File tree

1 file changed

+32
-8
lines changed

1 file changed

+32
-8
lines changed

arch/arc/lib/memset-archs.S

Lines changed: 32 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,11 +7,39 @@
77
*/
88

99
#include <linux/linkage.h>
10+
#include <asm/cache.h>
1011

11-
#undef PREALLOC_NOT_AVAIL
12+
/*
13+
* The memset implementation below is optimized to use prefetchw and prealloc
14+
* instruction in case of CPU with 64B L1 data cache line (L1_CACHE_SHIFT == 6)
15+
* If you want to implement optimized memset for other possible L1 data cache
16+
* line lengths (32B and 128B) you should rewrite code carefully checking
17+
* we don't call any prefetchw/prealloc instruction for L1 cache lines which
18+
* don't belongs to memset area.
19+
*/
20+
21+
#if L1_CACHE_SHIFT == 6
22+
23+
.macro PREALLOC_INSTR reg, off
24+
prealloc [\reg, \off]
25+
.endm
26+
27+
.macro PREFETCHW_INSTR reg, off
28+
prefetchw [\reg, \off]
29+
.endm
30+
31+
#else
32+
33+
.macro PREALLOC_INSTR
34+
.endm
35+
36+
.macro PREFETCHW_INSTR
37+
.endm
38+
39+
#endif
1240

1341
ENTRY_CFI(memset)
14-
prefetchw [r0] ; Prefetch the write location
42+
PREFETCHW_INSTR r0, 0 ; Prefetch the first write location
1543
mov.f 0, r2
1644
;;; if size is zero
1745
jz.d [blink]
@@ -48,11 +76,8 @@ ENTRY_CFI(memset)
4876

4977
lpnz @.Lset64bytes
5078
;; LOOP START
51-
#ifdef PREALLOC_NOT_AVAIL
52-
prefetchw [r3, 64] ;Prefetch the next write location
53-
#else
54-
prealloc [r3, 64]
55-
#endif
79+
PREALLOC_INSTR r3, 64 ; alloc next line w/o fetching
80+
5681
#ifdef CONFIG_ARC_HAS_LL64
5782
std.ab r4, [r3, 8]
5883
std.ab r4, [r3, 8]
@@ -85,7 +110,6 @@ ENTRY_CFI(memset)
85110
lsr.f lp_count, r2, 5 ;Last remaining max 124 bytes
86111
lpnz .Lset32bytes
87112
;; LOOP START
88-
prefetchw [r3, 32] ;Prefetch the next write location
89113
#ifdef CONFIG_ARC_HAS_LL64
90114
std.ab r4, [r3, 8]
91115
std.ab r4, [r3, 8]

0 commit comments

Comments
 (0)