Skip to content

Commit 07e56a1

Browse files
committed
allow encoder to cross attend
1 parent df38272 commit 07e56a1

File tree

2 files changed

+1
-2
lines changed

2 files changed

+1
-2
lines changed

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
setup(
44
name = 'x-transformers',
55
packages = find_packages(exclude=['examples']),
6-
version = '0.3.1',
6+
version = '0.3.2',
77
license='MIT',
88
description = 'X-Transformers - Pytorch',
99
author = 'Phil Wang',

x_transformers/x_transformers.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -343,7 +343,6 @@ def forward(self, x, context = None, mask = None, context_mask = None):
343343
class Encoder(AttentionLayers):
344344
def __init__(self, **kwargs):
345345
assert 'causal' not in kwargs, 'cannot set causality on encoder'
346-
assert 'cross_attend' not in kwargs, 'encoder cannot cross attend'
347346
super().__init__(causal = False, **kwargs)
348347

349348
class Decoder(AttentionLayers):

0 commit comments

Comments
 (0)