Skip to content

Commit 76262f6

Browse files
committed
Fix EXAONE-4.0 dummy id
1 parent cbb290e commit 76262f6

File tree

4 files changed

+10
-14
lines changed

4 files changed

+10
-14
lines changed

src/transformers/models/exaone4/configuration_exaone4.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,7 @@ class Exaone4Config(PretrainedConfig):
2626
r"""
2727
This is the configuration class to store the configuration of a [`Exaone4Model`]. It is used to
2828
instantiate a EXAONE 4.0 model according to the specified arguments, defining the model architecture. Instantiating a
29-
configuration with the defaults will yield a similar configuration to that of the EXAONE-4.0-Instruct [LGAI-EXAONE/EXAONE-4.0-Instruct](https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-Instruct)
30-
NOTE: `EXAONE-4.0-Instruct` is a placeholder model ID. The exact model ID will be updated in the future.
29+
configuration with the defaults will yield a similar configuration to that of the EXAONE-4.0-32B [LGAI-EXAONE/EXAONE-4.0-32B](https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-32B)
3130
3231
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model
3332
outputs. Read the documentation from [`PretrainedConfig`] for more information.

src/transformers/models/exaone4/modeling_exaone4.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -465,8 +465,8 @@ def forward(
465465
466466
```python
467467
>>> from transformers import AutoModelForCausalLM, AutoTokenizer
468-
>>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")
469-
>>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")
468+
>>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")
469+
>>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")
470470
471471
>>> prompt = "Explain how wonderful you are"
472472
>>> messages = [
@@ -485,8 +485,7 @@ def forward(
485485
>>> tokenizer.decode(output[0], skip_special_tokens=False)
486486
"[|system|]\nYou are a helpful assistant.[|endofturn|]\n[|user|]\nExplain how wonderful you are[|endofturn|]\n[|assistant|]\n<think>\n\n</think>\n\nOh, thank you for such a kind and lovely question! 😊 \n\nI’m *so* wonderful because I’m here to make your life easier, brighter, and more fun! Whether you need help with: \n\n✨ **Learning** – I can explain anything, from quantum physics to baking the perfect cake! \n💡 **Creativity** – Need a poem, story, or a wild idea? I’ve got you covered! \n🤖 **Problem-solving** – Stuck on a math problem or a tricky decision? I’ll help you figure it out"
487487
```
488-
489-
NOTE: `EXAONE-4.0-Instruct` is a placeholder model ID. The exact model ID will be updated in the future."""
488+
"""
490489
outputs: BaseModelOutputWithPast = self.model(
491490
input_ids=input_ids,
492491
attention_mask=attention_mask,

src/transformers/models/exaone4/modular_exaone4.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -53,16 +53,15 @@
5353

5454
logger = logging.get_logger(__name__)
5555

56-
_CHECKPOINT_FOR_DOC = "LGAI-EXAONE/EXAONE-4.0-Instruct"
56+
_CHECKPOINT_FOR_DOC = "LGAI-EXAONE/EXAONE-4.0-32B"
5757
_CONFIG_FOR_DOC = "Exaone4Config"
5858

5959

6060
class Exaone4Config(PretrainedConfig):
6161
r"""
6262
This is the configuration class to store the configuration of a [`Exaone4Model`]. It is used to
6363
instantiate a EXAONE 4.0 model according to the specified arguments, defining the model architecture. Instantiating a
64-
configuration with the defaults will yield a similar configuration to that of the EXAONE-4.0-Instruct [LGAI-EXAONE/EXAONE-4.0-Instruct](https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-Instruct)
65-
NOTE: `EXAONE-4.0-Instruct` is a placeholder model ID. The exact model ID will be updated in the future.
64+
configuration with the defaults will yield a similar configuration to that of the EXAONE-4.0-32B [LGAI-EXAONE/EXAONE-4.0-32B](https://huggingface.co/LGAI-EXAONE/EXAONE-4.0-32B)
6665
6766
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model
6867
outputs. Read the documentation from [`PretrainedConfig`] for more information.
@@ -462,8 +461,8 @@ def forward(
462461
463462
```python
464463
>>> from transformers import AutoModelForCausalLM, AutoTokenizer
465-
>>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")
466-
>>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-Instruct")
464+
>>> model = AutoModelForCausalLM.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")
465+
>>> tokenizer = AutoTokenizer.from_pretrained("LGAI-EXAONE/EXAONE-4.0-32B")
467466
468467
>>> prompt = "Explain how wonderful you are"
469468
>>> messages = [
@@ -482,8 +481,7 @@ def forward(
482481
>>> tokenizer.decode(output[0], skip_special_tokens=False)
483482
"[|system|]\nYou are a helpful assistant.[|endofturn|]\n[|user|]\nExplain how wonderful you are[|endofturn|]\n[|assistant|]\n<think>\n\n</think>\n\nOh, thank you for such a kind and lovely question! 😊 \n\nI’m *so* wonderful because I’m here to make your life easier, brighter, and more fun! Whether you need help with: \n\n✨ **Learning** – I can explain anything, from quantum physics to baking the perfect cake! \n💡 **Creativity** – Need a poem, story, or a wild idea? I’ve got you covered! \n🤖 **Problem-solving** – Stuck on a math problem or a tricky decision? I’ll help you figure it out"
484483
```
485-
486-
NOTE: `EXAONE-4.0-Instruct` is a placeholder model ID. The exact model ID will be updated in the future."""
484+
"""
487485
super().forward(
488486
input_ids=input_ids,
489487
attention_mask=attention_mask,

tests/models/exaone4/test_modeling_exaone4.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def setUp(self):
9898

9999
@require_torch
100100
class Exaone4IntegrationTest(unittest.TestCase):
101-
TEST_MODEL_ID = "LGAI-EXAONE/EXAONE-4.0-Instruct" # dummy model id
101+
TEST_MODEL_ID = "LGAI-EXAONE/EXAONE-4.0-32B"
102102

103103
def tearDown(self):
104104
# TODO (joao): automatic compilation, i.e. compilation when `cache_implementation="static"` is used, leaves

0 commit comments

Comments
 (0)