Skip to content

Commit 556f5b6

Browse files
committed
Remove TokenCountEstimator interfaces
langchain4j/langchain4j#2791
1 parent 98db18f commit 556f5b6

File tree

1 file changed

+5
-10
lines changed
  • langchain4j-azure-open-ai-spring-boot-starter/src/main/java/dev/langchain4j/azure/openai/spring

1 file changed

+5
-10
lines changed

langchain4j-azure-open-ai-spring-boot-starter/src/main/java/dev/langchain4j/azure/openai/spring/AutoConfig.java

Lines changed: 5 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import com.azure.core.http.ProxyOptions;
44
import com.azure.core.util.Configuration;
5-
import dev.langchain4j.model.Tokenizer;
65
import dev.langchain4j.model.azure.*;
76
import dev.langchain4j.model.chat.listener.ChatModelListener;
87
import org.springframework.beans.factory.ObjectProvider;
@@ -11,7 +10,6 @@
1110
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
1211
import org.springframework.boot.context.properties.EnableConfigurationProperties;
1312
import org.springframework.context.annotation.Bean;
14-
import org.springframework.lang.Nullable;
1513

1614
import java.time.Duration;
1715

@@ -40,7 +38,6 @@ AzureOpenAiChatModel openAiChatModel(Properties properties, ObjectProvider<ChatM
4038
.serviceVersion(chatModelProperties.serviceVersion())
4139
.apiKey(chatModelProperties.apiKey())
4240
.deploymentName(chatModelProperties.deploymentName())
43-
// TODO inject tokenizer?
4441
.maxTokens(chatModelProperties.maxTokens())
4542
.temperature(chatModelProperties.temperature())
4643
.topP(chatModelProperties.topP())
@@ -87,7 +84,6 @@ AzureOpenAiStreamingChatModel openAiStreamingChatModel(Properties properties,
8784
.serviceVersion(chatModelProperties.serviceVersion())
8885
.apiKey(chatModelProperties.apiKey())
8986
.deploymentName(chatModelProperties.deploymentName())
90-
// TODO inject tokenizer?
9187
.maxTokens(chatModelProperties.maxTokens())
9288
.temperature(chatModelProperties.temperature())
9389
.topP(chatModelProperties.topP())
@@ -112,24 +108,23 @@ AzureOpenAiStreamingChatModel openAiStreamingChatModel(Properties properties,
112108

113109
@Bean
114110
@ConditionalOnProperty({Properties.PREFIX + ".embedding-model.api-key"})
115-
AzureOpenAiEmbeddingModel openAiEmbeddingModelByApiKey(Properties properties, @Nullable Tokenizer tokenizer) {
116-
return openAiEmbeddingModel(properties, tokenizer);
111+
AzureOpenAiEmbeddingModel openAiEmbeddingModelByApiKey(Properties properties) {
112+
return openAiEmbeddingModel(properties);
117113
}
118114

119115
@Bean
120116
@ConditionalOnProperty({Properties.PREFIX + ".embedding-model.non-azure-api-key"})
121-
AzureOpenAiEmbeddingModel openAiEmbeddingModelByNonAzureApiKey(Properties properties, @Nullable Tokenizer tokenizer) {
122-
return openAiEmbeddingModel(properties, tokenizer);
117+
AzureOpenAiEmbeddingModel openAiEmbeddingModelByNonAzureApiKey(Properties properties) {
118+
return openAiEmbeddingModel(properties);
123119
}
124120

125-
AzureOpenAiEmbeddingModel openAiEmbeddingModel(Properties properties, Tokenizer tokenizer) {
121+
AzureOpenAiEmbeddingModel openAiEmbeddingModel(Properties properties) {
126122
EmbeddingModelProperties embeddingModelProperties = properties.embeddingModel();
127123
AzureOpenAiEmbeddingModel.Builder builder = AzureOpenAiEmbeddingModel.builder()
128124
.endpoint(embeddingModelProperties.endpoint())
129125
.serviceVersion(embeddingModelProperties.serviceVersion())
130126
.apiKey(embeddingModelProperties.apiKey())
131127
.deploymentName(embeddingModelProperties.deploymentName())
132-
.tokenizer(tokenizer)
133128
.timeout(Duration.ofSeconds(embeddingModelProperties.timeout() == null ? 0 : embeddingModelProperties.timeout()))
134129
.maxRetries(embeddingModelProperties.maxRetries())
135130
.proxyOptions(ProxyOptions.fromConfiguration(Configuration.getGlobalConfiguration()))

0 commit comments

Comments
 (0)