We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
1 parent f8cdd3d commit cb8a229Copy full SHA for cb8a229
1 file changed
paddlenlp/transformers/llama/modeling.py
@@ -1567,7 +1567,7 @@ def forward(
1567
if is_casual and alibi is None:
1568
attention_mask = None
1569
else:
1570
- attention_mask = attention_mask.astype("bool")
+ attention_mask = None if attention_mask is None else attention_mask.astype("bool")
1571
hidden_states = inputs_embeds
1572
# decoder layers
1573
all_hidden_states = () if output_hidden_states else None
0 commit comments