From d020059f2f71fe7c25765dde9d535195c09ece01 Mon Sep 17 00:00:00 2001 From: Gustaf Rydholm Date: Sun, 3 Sep 2023 01:14:16 +0200 Subject: Update imports --- text_recognizer/network/transformer/attend.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'text_recognizer/network/transformer/attend.py') diff --git a/text_recognizer/network/transformer/attend.py b/text_recognizer/network/transformer/attend.py index 4e643fb..d2c17b1 100644 --- a/text_recognizer/network/transformer/attend.py +++ b/text_recognizer/network/transformer/attend.py @@ -32,7 +32,7 @@ class Attend(nn.Module): out = F.scaled_dot_product_attention(q, k, v, is_causal=causal) return out - def atten( + def attn( self, q: Tensor, k: Tensor, @@ -66,7 +66,7 @@ class Attend(nn.Module): if self.use_flash: return self.flash_attn(q, k, v, causal) else: - return self.atten(q, k, v, causal, mask) + return self.attn(q, k, v, causal, mask) def apply_input_mask( -- cgit v1.2.3-70-g09d2