From d88353a5a4c6fc7dd699c97891230a9c5bbaa369 Mon Sep 17 00:00:00 2001 From: Phil Tillet Date: Mon, 9 Jan 2023 20:14:06 -0800 Subject: [PATCH] . --- python/tutorials/06-fused-attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/tutorials/06-fused-attention.py b/python/tutorials/06-fused-attention.py index d20ea3cd1..8892b5529 100644 --- a/python/tutorials/06-fused-attention.py +++ b/python/tutorials/06-fused-attention.py @@ -288,8 +288,8 @@ class _attention(torch.autograd.Function): BLOCK_DMODEL=ctx.BLOCK_DMODEL, num_warps=8, num_stages=1, ) - print(pgm.asm["ttgir"]) - exit() + # print(pgm.asm["ttgir"]) + # exit() return dq, dk, dv, None