Skip to content

Commit c41e86f

Browse files
authored
Merge pull request #6116 from ChatGPTNextWeb/feat/issue-6104-deepseek-reasoning-content
Support DeepSeek API streaming reasoning content
2 parents 553b8c9 + 143be69 commit c41e86f

File tree

4 files changed

+343
-13
lines changed

4 files changed

+343
-13
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,13 @@ One-Click to get a well-designed cross-platform ChatGPT web UI, with Claude, GPT
4040

4141
</div>
4242

43+
## 🥳 DeepSeek R1 Now Support !
44+
> Especially Thinking UI For DeepSeek Reasoner Model
45+
46+
<img src="https://github.com/user-attachments/assets/f3952210-3af1-4dc0-9b81-40eaa4847d9a"/>
47+
48+
49+
4350
## 🫣 NextChat Support MCP !
4451
> Before build, please set env ENABLE_MCP=true
4552

app/client/platforms/deepseek.ts

Lines changed: 47 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ import {
1313
ChatMessageTool,
1414
usePluginStore,
1515
} from "@/app/store";
16-
import { stream } from "@/app/utils/chat";
16+
import { streamWithThink } from "@/app/utils/chat";
1717
import {
1818
ChatOptions,
1919
getHeaders,
@@ -22,7 +22,10 @@ import {
2222
SpeechOptions,
2323
} from "../api";
2424
import { getClientConfig } from "@/app/config/client";
25-
import { getMessageTextContent } from "@/app/utils";
25+
import {
26+
getMessageTextContent,
27+
getMessageTextContentWithoutThinking,
28+
} from "@/app/utils";
2629
import { RequestPayload } from "./openai";
2730
import { fetch } from "@/app/utils/stream";
2831

@@ -67,8 +70,13 @@ export class DeepSeekApi implements LLMApi {
6770
async chat(options: ChatOptions) {
6871
const messages: ChatOptions["messages"] = [];
6972
for (const v of options.messages) {
70-
const content = getMessageTextContent(v);
71-
messages.push({ role: v.role, content });
73+
if (v.role === "assistant") {
74+
const content = getMessageTextContentWithoutThinking(v);
75+
messages.push({ role: v.role, content });
76+
} else {
77+
const content = getMessageTextContent(v);
78+
messages.push({ role: v.role, content });
79+
}
7280
}
7381

7482
const modelConfig = {
@@ -107,6 +115,8 @@ export class DeepSeekApi implements LLMApi {
107115
headers: getHeaders(),
108116
};
109117

118+
// console.log(chatPayload);
119+
110120
// make a fetch request
111121
const requestTimeoutId = setTimeout(
112122
() => controller.abort(),
@@ -119,7 +129,7 @@ export class DeepSeekApi implements LLMApi {
119129
.getAsTools(
120130
useChatStore.getState().currentSession().mask?.plugin || [],
121131
);
122-
return stream(
132+
return streamWithThink(
123133
chatPath,
124134
requestPayload,
125135
getHeaders(),
@@ -132,8 +142,9 @@ export class DeepSeekApi implements LLMApi {
132142
const json = JSON.parse(text);
133143
const choices = json.choices as Array<{
134144
delta: {
135-
content: string;
145+
content: string | null;
136146
tool_calls: ChatMessageTool[];
147+
reasoning_content: string | null;
137148
};
138149
}>;
139150
const tool_calls = choices[0]?.delta?.tool_calls;
@@ -155,7 +166,36 @@ export class DeepSeekApi implements LLMApi {
155166
runTools[index]["function"]["arguments"] += args;
156167
}
157168
}
158-
return choices[0]?.delta?.content;
169+
const reasoning = choices[0]?.delta?.reasoning_content;
170+
const content = choices[0]?.delta?.content;
171+
172+
// Skip if both content and reasoning_content are empty or null
173+
if (
174+
(!reasoning || reasoning.trim().length === 0) &&
175+
(!content || content.trim().length === 0)
176+
) {
177+
return {
178+
isThinking: false,
179+
content: "",
180+
};
181+
}
182+
183+
if (reasoning && reasoning.trim().length > 0) {
184+
return {
185+
isThinking: true,
186+
content: reasoning,
187+
};
188+
} else if (content && content.trim().length > 0) {
189+
return {
190+
isThinking: false,
191+
content: content,
192+
};
193+
}
194+
195+
return {
196+
isThinking: false,
197+
content: "",
198+
};
159199
},
160200
// processToolMessage, include tool_calls message and tool call results
161201
(

app/utils.ts

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ export function trimTopic(topic: string) {
1616
return (
1717
topic
1818
// fix for gemini
19-
.replace(/^["*]+|["*]+$/g, "")
20-
.replace(/[",.!?*]*$/, "")
19+
.replace(/^["""*]+|[""*]+$/g, "")
20+
.replace(/["""",.!?*]*$/, "")
2121
);
2222
}
2323

@@ -241,6 +241,28 @@ export function getMessageTextContent(message: RequestMessage) {
241241
return "";
242242
}
243243

244+
export function getMessageTextContentWithoutThinking(message: RequestMessage) {
245+
let content = "";
246+
247+
if (typeof message.content === "string") {
248+
content = message.content;
249+
} else {
250+
for (const c of message.content) {
251+
if (c.type === "text") {
252+
content = c.text ?? "";
253+
break;
254+
}
255+
}
256+
}
257+
258+
// Filter out thinking lines (starting with "> ")
259+
return content
260+
.split("\n")
261+
.filter((line) => !line.startsWith("> ") && line.trim() !== "")
262+
.join("\n")
263+
.trim();
264+
}
265+
244266
export function getMessageImages(message: RequestMessage): string[] {
245267
if (typeof message.content === "string") {
246268
return [];
@@ -256,9 +278,7 @@ export function getMessageImages(message: RequestMessage): string[] {
256278

257279
export function isVisionModel(model: string) {
258280
const visionModels = useAccessStore.getState().visionModels;
259-
const envVisionModels = visionModels
260-
?.split(",")
261-
.map((m) => m.trim());
281+
const envVisionModels = visionModels?.split(",").map((m) => m.trim());
262282
if (envVisionModels?.includes(model)) {
263283
return true;
264284
}

0 commit comments

Comments
 (0)