Skip to content

Commit 1f13ba8

Browse files
correct opt (huggingface#17301)
1 parent 349f1c8 commit 1f13ba8

File tree

1 file changed

+3
-7
lines changed

1 file changed

+3
-7
lines changed

tests/models/opt/test_modeling_opt.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
import timeout_decorator # noqa
2323

2424
from transformers import OPTConfig, is_torch_available
25-
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
25+
from transformers.testing_utils import require_torch, slow, torch_device
2626

2727
from ...generation.test_generation_utils import GenerationTesterMixin
2828
from ...test_configuration_common import ConfigTester
@@ -266,25 +266,21 @@ def _long_tensor(tok_lst):
266266

267267

268268
@require_torch
269-
@require_sentencepiece
270-
@require_tokenizers
271269
class OPTModelIntegrationTests(unittest.TestCase):
272270
@slow
273271
def test_inference_no_head(self):
274272
model = OPTModel.from_pretrained("facebook/opt-350m").to(torch_device)
275273
input_ids = _long_tensor([[0, 31414, 232, 328, 740, 1140, 12695, 69, 46078, 1588, 2]])
276-
attention_mask = input_ids.ne(model.config.pad_token_id)
277274
with torch.no_grad():
278-
output = model(input_ids=input_ids, attention_mask=attention_mask).last_hidden_state
275+
output = model(input_ids=input_ids).last_hidden_state
279276
expected_shape = torch.Size((1, 11, 512))
280277
self.assertEqual(output.shape, expected_shape)
281278
expected_slice = torch.tensor(
282-
[[-0.2873, -1.9218, -0.3033], [-1.2710, -0.1338, -0.1902], [0.4095, 0.1214, -1.3121]], device=torch_device
279+
[[-0.2867, -1.9256, -0.3062], [-1.2711, -0.1337, -0.1897], [0.4109, 0.1187, -1.3142]], device=torch_device
283280
)
284281
self.assertTrue(torch.allclose(output[:, :3, :3], expected_slice, atol=1e-3))
285282

286283

287-
@require_tokenizers
288284
@require_torch
289285
@slow
290286
class OPTEmbeddingsTest(unittest.TestCase):

0 commit comments

Comments
 (0)