Skip to content

Commit 63f182d

Browse files
committed
Update the default chat model id for Gemini to gemini-2.5-flash
1 parent d0f885d commit 63f182d

File tree

10 files changed

+44
-70
lines changed

10 files changed

+44
-70
lines changed

docs/modules/ROOT/pages/includes/quarkus-langchain4j-ai-gemini.adoc

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AI_GEMINI_CHAT_MODEL_MODEL_ID+++`
215215
endif::add-copy-button-to-env-var[]
216216
--
217217
|string
218-
|`gemini-1.5-flash`
218+
|`gemini-2.5-flash`
219219

220220
a| [[quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-chat-model-temperature[`quarkus.langchain4j.ai.gemini.chat-model.temperature`]##
221221
ifdef::add-copy-button-to-config-props[]
@@ -275,7 +275,7 @@ Specify a lower value for less random responses and a higher value for more rand
275275

276276
Range: 0.0 - 1.0
277277

278-
gemini-1.0-pro and gemini-1.5-pro don't support topK
278+
gemini-2.5-flash doesn't support topK
279279

280280

281281
ifdef::add-copy-button-to-env-var[]
@@ -304,9 +304,8 @@ Specify a lower value for less random responses and a higher value for more rand
304304

305305
Range: 1-40
306306

307-
Default for gemini-1.5-pro: 0.94
307+
Default for gemini-2.5-flash: 0.95
308308

309-
Default for gemini-1.0-pro: 1
310309

311310

312311
ifdef::add-copy-button-to-env-var[]
@@ -722,7 +721,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AI_GEMINI__MODEL_NAME__CHAT_MODEL_
722721
endif::add-copy-button-to-env-var[]
723722
--
724723
|string
725-
|`gemini-1.5-flash`
724+
|`gemini-2.5-flash`
726725

727726
a| [[quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-model-name-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-model-name-chat-model-temperature[`quarkus.langchain4j.ai.gemini."model-name".chat-model.temperature`]##
728727
ifdef::add-copy-button-to-config-props[]
@@ -782,7 +781,7 @@ Specify a lower value for less random responses and a higher value for more rand
782781

783782
Range: 0.0 - 1.0
784783

785-
gemini-1.0-pro and gemini-1.5-pro don't support topK
784+
gemini-2.5-flash doesn't support topK
786785

787786

788787
ifdef::add-copy-button-to-env-var[]
@@ -811,9 +810,8 @@ Specify a lower value for less random responses and a higher value for more rand
811810

812811
Range: 1-40
813812

814-
Default for gemini-1.5-pro: 0.94
813+
Default for gemini-2.5-flash: 0.95
815814

816-
Default for gemini-1.0-pro: 1
817815

818816

819817
ifdef::add-copy-button-to-env-var[]

docs/modules/ROOT/pages/includes/quarkus-langchain4j-ai-gemini_quarkus.langchain4j.adoc

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -215,7 +215,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AI_GEMINI_CHAT_MODEL_MODEL_ID+++`
215215
endif::add-copy-button-to-env-var[]
216216
--
217217
|string
218-
|`gemini-1.5-flash`
218+
|`gemini-2.5-flash`
219219

220220
a| [[quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-chat-model-temperature[`quarkus.langchain4j.ai.gemini.chat-model.temperature`]##
221221
ifdef::add-copy-button-to-config-props[]
@@ -275,7 +275,7 @@ Specify a lower value for less random responses and a higher value for more rand
275275

276276
Range: 0.0 - 1.0
277277

278-
gemini-1.0-pro and gemini-1.5-pro don't support topK
278+
gemini-2.5-flash doesn't support topK
279279

280280

281281
ifdef::add-copy-button-to-env-var[]
@@ -304,9 +304,8 @@ Specify a lower value for less random responses and a higher value for more rand
304304

305305
Range: 1-40
306306

307-
Default for gemini-1.5-pro: 0.94
307+
Default for gemini-2.5-flash: 0.95
308308

309-
Default for gemini-1.0-pro: 1
310309

311310

312311
ifdef::add-copy-button-to-env-var[]
@@ -722,7 +721,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_AI_GEMINI__MODEL_NAME__CHAT_MODEL_
722721
endif::add-copy-button-to-env-var[]
723722
--
724723
|string
725-
|`gemini-1.5-flash`
724+
|`gemini-2.5-flash`
726725

727726
a| [[quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-model-name-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-ai-gemini_quarkus-langchain4j-ai-gemini-model-name-chat-model-temperature[`quarkus.langchain4j.ai.gemini."model-name".chat-model.temperature`]##
728727
ifdef::add-copy-button-to-config-props[]
@@ -782,7 +781,7 @@ Specify a lower value for less random responses and a higher value for more rand
782781

783782
Range: 0.0 - 1.0
784783

785-
gemini-1.0-pro and gemini-1.5-pro don't support topK
784+
gemini-2.5-flash doesn't support topK
786785

787786

788787
ifdef::add-copy-button-to-env-var[]
@@ -811,9 +810,8 @@ Specify a lower value for less random responses and a higher value for more rand
811810

812811
Range: 1-40
813812

814-
Default for gemini-1.5-pro: 0.94
813+
Default for gemini-2.5-flash: 0.95
815814

816-
Default for gemini-1.0-pro: 1
817815

818816

819817
ifdef::add-copy-button-to-env-var[]

docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini.adoc

Lines changed: 10 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_MODEL_I
299299
endif::add-copy-button-to-env-var[]
300300
--
301301
|string
302-
|`gemini-1.5-pro`
302+
|`gemini-2.5-flash`
303303

304304
a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-temperature[`quarkus.langchain4j.vertexai.gemini.chat-model.temperature`]##
305305
ifdef::add-copy-button-to-config-props[]
@@ -313,13 +313,10 @@ The temperature is used for sampling during response generation, which occurs wh
313313

314314
If the model returns a response that's too generic, too short, or the model gives a fallback response, try increasing the temperature.
315315

316-
Range for gemini-1.0-pro-001
316+
Range for gemini-2.5-flash: 0.0 - 2.0
317317

318-
Range for gemini-1.0-pro-002, gemini-1.5-pro: 0.0 - 2.0
318+
Default for gemini-2.5-flash: 1.0
319319

320-
Default for gemini-1.5-pro and gemini-1.0-pro-002: 1.0
321-
322-
Default for gemini-1.0-pro-001: 0.9
323320

324321

325322
ifdef::add-copy-button-to-env-var[]
@@ -367,7 +364,7 @@ Specify a lower value for less random responses and a higher value for more rand
367364

368365
Range: 0.0 - 1.0
369366

370-
gemini-1.0-pro and gemini-1.5-pro don't support topK
367+
gemini-2.5-flash doesn't support topK
371368

372369

373370
ifdef::add-copy-button-to-env-var[]
@@ -396,9 +393,8 @@ Specify a lower value for less random responses and a higher value for more rand
396393

397394
Range: 1-40
398395

399-
Default for gemini-1.5-pro: 0.94
396+
Default for gemini-2.5-flash: 0.95
400397

401-
Default for gemini-1.0-pro: 1
402398

403399

404400
ifdef::add-copy-button-to-env-var[]
@@ -854,7 +850,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_
854850
endif::add-copy-button-to-env-var[]
855851
--
856852
|string
857-
|`gemini-1.5-pro`
853+
|`gemini-2.5-flash`
858854

859855
a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-temperature[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.temperature`]##
860856
ifdef::add-copy-button-to-config-props[]
@@ -868,13 +864,11 @@ The temperature is used for sampling during response generation, which occurs wh
868864

869865
If the model returns a response that's too generic, too short, or the model gives a fallback response, try increasing the temperature.
870866

871-
Range for gemini-1.0-pro-001
872867

873-
Range for gemini-1.0-pro-002, gemini-1.5-pro: 0.0 - 2.0
868+
Range for gemini-2.5-flash: 0.0 - 2.0
874869

875-
Default for gemini-1.5-pro and gemini-1.0-pro-002: 1.0
870+
Default for gemini-2.5-flash: 1.0
876871

877-
Default for gemini-1.0-pro-001: 0.9
878872

879873

880874
ifdef::add-copy-button-to-env-var[]
@@ -922,7 +916,7 @@ Specify a lower value for less random responses and a higher value for more rand
922916

923917
Range: 0.0 - 1.0
924918

925-
gemini-1.0-pro and gemini-1.5-pro don't support topK
919+
gemini-2.5-flash doesn't support topK
926920

927921

928922
ifdef::add-copy-button-to-env-var[]
@@ -951,9 +945,8 @@ Specify a lower value for less random responses and a higher value for more rand
951945

952946
Range: 1-40
953947

954-
Default for gemini-1.5-pro: 0.94
948+
Default for gemini-2.5-flash: 0.95
955949

956-
Default for gemini-1.0-pro: 1
957950

958951

959952
ifdef::add-copy-button-to-env-var[]

docs/modules/ROOT/pages/includes/quarkus-langchain4j-vertex-ai-gemini_quarkus.langchain4j.adoc

Lines changed: 10 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -299,7 +299,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI_CHAT_MODEL_MODEL_I
299299
endif::add-copy-button-to-env-var[]
300300
--
301301
|string
302-
|`gemini-1.5-pro`
302+
|`gemini-2.5-flash`
303303

304304
a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-chat-model-temperature[`quarkus.langchain4j.vertexai.gemini.chat-model.temperature`]##
305305
ifdef::add-copy-button-to-config-props[]
@@ -313,13 +313,11 @@ The temperature is used for sampling during response generation, which occurs wh
313313

314314
If the model returns a response that's too generic, too short, or the model gives a fallback response, try increasing the temperature.
315315

316-
Range for gemini-1.0-pro-001
317316

318-
Range for gemini-1.0-pro-002, gemini-1.5-pro: 0.0 - 2.0
317+
Range for gemini-2.5-flash: 0.0 - 2.0
319318

320-
Default for gemini-1.5-pro and gemini-1.0-pro-002: 1.0
319+
Default for gemini-2.5-flash: 1.0
321320

322-
Default for gemini-1.0-pro-001: 0.9
323321

324322

325323
ifdef::add-copy-button-to-env-var[]
@@ -367,7 +365,7 @@ Specify a lower value for less random responses and a higher value for more rand
367365

368366
Range: 0.0 - 1.0
369367

370-
gemini-1.0-pro and gemini-1.5-pro don't support topK
368+
gemini-2.5-flash doesn't support topK
371369

372370

373371
ifdef::add-copy-button-to-env-var[]
@@ -396,9 +394,8 @@ Specify a lower value for less random responses and a higher value for more rand
396394

397395
Range: 1-40
398396

399-
Default for gemini-1.5-pro: 0.94
397+
Default for gemini-2.5-flash: 0.95
400398

401-
Default for gemini-1.0-pro: 1
402399

403400

404401
ifdef::add-copy-button-to-env-var[]
@@ -854,7 +851,7 @@ Environment variable: `+++QUARKUS_LANGCHAIN4J_VERTEXAI_GEMINI__MODEL_NAME__CHAT_
854851
endif::add-copy-button-to-env-var[]
855852
--
856853
|string
857-
|`gemini-1.5-pro`
854+
|`gemini-2.5-flash`
858855

859856
a| [[quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-temperature]] [.property-path]##link:#quarkus-langchain4j-vertex-ai-gemini_quarkus-langchain4j-vertexai-gemini-model-name-chat-model-temperature[`quarkus.langchain4j.vertexai.gemini."model-name".chat-model.temperature`]##
860857
ifdef::add-copy-button-to-config-props[]
@@ -868,13 +865,11 @@ The temperature is used for sampling during response generation, which occurs wh
868865

869866
If the model returns a response that's too generic, too short, or the model gives a fallback response, try increasing the temperature.
870867

871-
Range for gemini-1.0-pro-001
872868

873-
Range for gemini-1.0-pro-002, gemini-1.5-pro: 0.0 - 2.0
869+
Range for gemini-2.5-flash: 0.0 - 2.0
874870

875-
Default for gemini-1.5-pro and gemini-1.0-pro-002: 1.0
871+
Default for gemini-2.5-flash: 1.0
876872

877-
Default for gemini-1.0-pro-001: 0.9
878873

879874

880875
ifdef::add-copy-button-to-env-var[]
@@ -922,7 +917,7 @@ Specify a lower value for less random responses and a higher value for more rand
922917

923918
Range: 0.0 - 1.0
924919

925-
gemini-1.0-pro and gemini-1.5-pro don't support topK
920+
gemini-2.5-flash doesn't support topK
926921

927922

928923
ifdef::add-copy-button-to-env-var[]
@@ -951,9 +946,7 @@ Specify a lower value for less random responses and a higher value for more rand
951946

952947
Range: 1-40
953948

954-
Default for gemini-1.5-pro: 0.94
955-
956-
Default for gemini-1.0-pro: 1
949+
Default for gemini-2.5-flash: 0.95
957950

958951

959952
ifdef::add-copy-button-to-env-var[]

integration-tests/ai-gemini/src/main/java/org/acme/example/gemini/aiservices/GeminiResource.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
public class GeminiResource {
1717

1818
@POST
19-
@Path("v1beta/models/gemini-1.5-flash:generateContent")
19+
@Path("v1beta/models/gemini-2.5-flash:generateContent")
2020
@Produces("application/json")
2121
@Consumes("application/json")
2222
public String generateResponse(String generateRequest, @RestQuery String key) {

model-providers/google/gemini/ai-gemini/deployment/src/test/java/io/quarkiverse/langchain4j/ai/gemini/deployment/AiGeminiChatLanguageModelAuthProviderSmokeTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
public class AiGeminiChatLanguageModelAuthProviderSmokeTest extends WiremockAware {
2727

2828
private static final String API_KEY = "dummy";
29-
private static final String CHAT_MODEL_ID = "gemini-1.5-flash";
29+
private static final String CHAT_MODEL_ID = "gemini-2.5-flash";
3030

3131
@RegisterExtension
3232
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()

model-providers/google/gemini/ai-gemini/deployment/src/test/java/io/quarkiverse/langchain4j/ai/gemini/deployment/AiGeminiChatLanguageModelSmokeTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
public class AiGeminiChatLanguageModelSmokeTest extends WiremockAware {
2424

2525
private static final String API_KEY = "dummy";
26-
private static final String CHAT_MODEL_ID = "gemini-1.5-flash";
26+
private static final String CHAT_MODEL_ID = "gemini-2.5-flash";
2727

2828
@RegisterExtension
2929
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()

model-providers/google/gemini/ai-gemini/runtime/src/main/java/io/quarkiverse/langchain4j/ai/runtime/gemini/config/ChatModelConfig.java

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ public interface ChatModelConfig {
2020
* @see <a href=
2121
* "https://ai.google.dev/gemini-api/docs/models/gemini">https://ai.google.dev/gemini-api/docs/models/gemini</a>
2222
*/
23-
@WithDefault("gemini-1.5-flash")
23+
@WithDefault("gemini-2.5-flash")
2424
String modelId();
2525

2626
/**
@@ -54,7 +54,7 @@ public interface ChatModelConfig {
5454
* <p>
5555
* Range: 0.0 - 1.0
5656
* <p>
57-
* gemini-1.0-pro and gemini-1.5-pro don't support topK
57+
* Default for gemini-2.5-flash: 0.95
5858
*/
5959
OptionalDouble topP();
6060

@@ -70,9 +70,7 @@ public interface ChatModelConfig {
7070
* <p>
7171
* Range: 1-40
7272
* <p>
73-
* Default for gemini-1.5-pro: 0.94
74-
* <p>
75-
* Default for gemini-1.0-pro: 1
73+
* gemini-2.5-flash doesn't support topK
7674
*/
7775
OptionalInt topK();
7876

model-providers/google/vertex-ai-gemini/deployment/src/test/java/io/quarkiverse/langchain4j/vertexai/gemini/deployment/VertexAiGeminiChatLanguageModelSmokeTest.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
public class VertexAiGeminiChatLanguageModelSmokeTest extends WiremockAware {
2727

2828
private static final String API_KEY = "somekey";
29-
private static final String CHAT_MODEL_ID = "gemini-1.5-pro";
29+
private static final String CHAT_MODEL_ID = "gemini-2.5-flash";
3030

3131
@RegisterExtension
3232
static final QuarkusUnitTest unitTest = new QuarkusUnitTest()

0 commit comments

Comments
 (0)