We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent b97e42b commit 29c230bCopy full SHA for 29c230b
nemo/collections/llm/gpt/model/megatron/hyena/hyena_mixer.py
@@ -309,7 +309,7 @@ def pad_to_multiple(x, multiple=16):
309
if (
310
self.use_b2b_causal_conv1d
311
and self.operator_type in ["hyena_short_conv", "hyena_medium_conv"]
312
- and inference_context is not None
+ and inference_context is None
313
):
314
# todo: support inference_context for b2b_kernel
315
# Use the B2BCausalConv1dModule wrapper with the existing weights from the original model
0 commit comments