@@ -13,7 +13,7 @@ import {
1313 ChatMessageTool ,
1414 usePluginStore ,
1515} from "@/app/store" ;
16- import { stream } from "@/app/utils/chat" ;
16+ import { streamWithThink } from "@/app/utils/chat" ;
1717import {
1818 ChatOptions ,
1919 getHeaders ,
@@ -22,7 +22,10 @@ import {
2222 SpeechOptions ,
2323} from "../api" ;
2424import { getClientConfig } from "@/app/config/client" ;
25- import { getMessageTextContent } from "@/app/utils" ;
25+ import {
26+ getMessageTextContent ,
27+ getMessageTextContentWithoutThinking ,
28+ } from "@/app/utils" ;
2629import { RequestPayload } from "./openai" ;
2730import { fetch } from "@/app/utils/stream" ;
2831
@@ -67,8 +70,13 @@ export class DeepSeekApi implements LLMApi {
6770 async chat ( options : ChatOptions ) {
6871 const messages : ChatOptions [ "messages" ] = [ ] ;
6972 for ( const v of options . messages ) {
70- const content = getMessageTextContent ( v ) ;
71- messages . push ( { role : v . role , content } ) ;
73+ if ( v . role === "assistant" ) {
74+ const content = getMessageTextContentWithoutThinking ( v ) ;
75+ messages . push ( { role : v . role , content } ) ;
76+ } else {
77+ const content = getMessageTextContent ( v ) ;
78+ messages . push ( { role : v . role , content } ) ;
79+ }
7280 }
7381
7482 const modelConfig = {
@@ -107,6 +115,8 @@ export class DeepSeekApi implements LLMApi {
107115 headers : getHeaders ( ) ,
108116 } ;
109117
118+ // console.log(chatPayload);
119+
110120 // make a fetch request
111121 const requestTimeoutId = setTimeout (
112122 ( ) => controller . abort ( ) ,
@@ -119,7 +129,7 @@ export class DeepSeekApi implements LLMApi {
119129 . getAsTools (
120130 useChatStore . getState ( ) . currentSession ( ) . mask ?. plugin || [ ] ,
121131 ) ;
122- return stream (
132+ return streamWithThink (
123133 chatPath ,
124134 requestPayload ,
125135 getHeaders ( ) ,
@@ -132,8 +142,9 @@ export class DeepSeekApi implements LLMApi {
132142 const json = JSON . parse ( text ) ;
133143 const choices = json . choices as Array < {
134144 delta : {
135- content : string ;
145+ content : string | null ;
136146 tool_calls : ChatMessageTool [ ] ;
147+ reasoning_content : string | null ;
137148 } ;
138149 } > ;
139150 const tool_calls = choices [ 0 ] ?. delta ?. tool_calls ;
@@ -155,7 +166,36 @@ export class DeepSeekApi implements LLMApi {
155166 runTools [ index ] [ "function" ] [ "arguments" ] += args ;
156167 }
157168 }
158- return choices [ 0 ] ?. delta ?. content ;
169+ const reasoning = choices [ 0 ] ?. delta ?. reasoning_content ;
170+ const content = choices [ 0 ] ?. delta ?. content ;
171+
172+ // Skip if both content and reasoning_content are empty or null
173+ if (
174+ ( ! reasoning || reasoning . trim ( ) . length === 0 ) &&
175+ ( ! content || content . trim ( ) . length === 0 )
176+ ) {
177+ return {
178+ isThinking : false ,
179+ content : "" ,
180+ } ;
181+ }
182+
183+ if ( reasoning && reasoning . trim ( ) . length > 0 ) {
184+ return {
185+ isThinking : true ,
186+ content : reasoning ,
187+ } ;
188+ } else if ( content && content . trim ( ) . length > 0 ) {
189+ return {
190+ isThinking : false ,
191+ content : content ,
192+ } ;
193+ }
194+
195+ return {
196+ isThinking : false ,
197+ content : "" ,
198+ } ;
159199 } ,
160200 // processToolMessage, include tool_calls message and tool call results
161201 (
0 commit comments