We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 23ef354 commit 502d71fCopy full SHA for 502d71f
examples/linear_attention/example_retention_fwd.py
@@ -51,13 +51,6 @@ def chunk_retention_fwd(
51
o = T.alloc_fragment([chunk_size, BV], accum_dtype)
52
T.clear(h)
53
54
- T.annotate_layout({
55
- q: tl.layout.make_swizzled_layout(q),
56
- k: tl.layout.make_swizzled_layout(k),
57
- v: tl.layout.make_swizzled_layout(v),
58
- h_shared: tl.layout.make_swizzled_layout(h_shared),
59
- s_shared: tl.layout.make_swizzled_layout(s_shared),
60
- })
61
T.use_swizzle(10)
62
63
for i in T.Pipelined(0, NT):
0 commit comments