Skip to content

Commit 51def56

Browse files
authored
Add z.ai provider
- open image modal at end of images - fix card glow behavior on theme change - add zai provider - handle zai reasoning responses
1 parent 64c4736 commit 51def56

6 files changed

Lines changed: 84 additions & 17 deletions

File tree

common/providers.ts

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -85,6 +85,20 @@ export const KNOWN_PROVIDERS: Record<string, ProviderDefinition> = {
8585
{ type: 'format', value: 'openai' },
8686
],
8787
},
88+
zai: {
89+
name: 'Z.ai',
90+
url: `https://api.z.ai/api/paas/v4`,
91+
formats: [
92+
{
93+
type: 'format',
94+
value: 'openai-chatv2',
95+
subs: [
96+
{ name: 'Standard', value: '' },
97+
{ name: 'Reasoning', value: 'reasoning' },
98+
],
99+
},
100+
],
101+
},
88102
}
89103

90104
export const KNOWN_SELF_HOST: Record<string, ProviderDefinition> = {

common/requests/stream.ts

Lines changed: 50 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -327,12 +327,8 @@ export async function* fetchStream(
327327
return
328328
}
329329
} else {
330-
const reasoning = getChoiceProp(json, 'reasoning') || getChoiceProp(json, 'thought')
331-
const token: string =
332-
getChoiceProp(json, 'content') ||
333-
getChoiceProp(json, 'text') ||
334-
json.token ||
335-
json.response
330+
const reasoning = getNextThoughts(json) || json.reasoning_content
331+
const token: string = getNextTokens(json) || json.token || json.response
336332

337333
const index = +(getChoiceProp<string>(json, 'index') || '0')
338334

@@ -401,7 +397,7 @@ export async function* fetchStream(
401397
yield { token: suffix + token }
402398
}
403399

404-
if (DEBUG) {
400+
if (DEBUG || true) {
405401
const choice = json.choices?.[0]
406402
if (choice) console.log(`#${type} `, inline(choice))
407403
else console.log(`#${type} `, inline(json))
@@ -521,6 +517,53 @@ function getChoiceProp<T = any>(json: any, prop: string, assign?: any) {
521517
return value as T
522518
}
523519

520+
function getNextTokens(json: any) {
521+
const props = ['content', 'text']
522+
523+
const choice = json?.choices?.[0]
524+
let value: any = undefined
525+
526+
for (const prop of props) {
527+
const match = choice?.delta?.[prop] || choice?.[prop] || json?.[prop]
528+
if (!match) continue
529+
530+
value = match
531+
break
532+
}
533+
534+
return value
535+
}
536+
537+
function getNextThoughts(json: any) {
538+
const props = ['reasoning', 'thought', 'reasoning_content']
539+
const choice = json?.choices?.[0]
540+
541+
let value: any = undefined
542+
for (const prop of props) {
543+
const match = choice?.delta?.[prop] || choice?.[prop] || json?.[prop]
544+
if (!match) continue
545+
546+
value = match
547+
break
548+
}
549+
550+
if (!value) return
551+
552+
if (typeof value === 'string') return value
553+
554+
// Mistral returns thoughts in an array for some reason string
555+
if (Array.isArray(value)) {
556+
const first = value[0]
557+
if (!first) return
558+
559+
if (first.type !== 'thinking') return
560+
if (!first.thinking?.[0]) return
561+
return first.thinking?.[0]?.text
562+
}
563+
564+
return
565+
}
566+
524567
function tryParse(value: any) {
525568
try {
526569
const obj = JSON.parse(value)

srv/adapter/openai.ts

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -104,14 +104,18 @@ export const handleOAI: ModelAdapter = async function* (opts) {
104104
// }
105105

106106
if (gen.reasoning?.enabled) {
107-
body.reasoning = {
108-
exclude: !!gen.reasoning.exclude,
109-
}
110-
111-
if (gen.reasoning.effort === 'custom') {
112-
body.reasoning.max_tokens = gen.reasoning.maxTokens
107+
if (opts.conn.provider?.provider === 'known-zai') {
108+
body.thinking = { type: 'enabled' }
113109
} else {
114-
body.reasoning.effort = gen.reasoning.effort || 'low'
110+
body.reasoning = {
111+
exclude: !!gen.reasoning.exclude,
112+
}
113+
114+
if (gen.reasoning.effort === 'custom') {
115+
body.reasoning.max_tokens = gen.reasoning.maxTokens
116+
} else {
117+
body.reasoning.effort = gen.reasoning.effort || 'low'
118+
}
115119
}
116120
}
117121

web/pages/Chat/ChatMenu.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ const ChatNav: Component<NavProps> = (props) => {
9494
return
9595
}
9696

97-
imageStore.showMessageImages({ id: last._id, position: 0 })
97+
imageStore.showMessageImages({ id: last._id, position: -1 })
9898
}
9999

100100
return (

web/pages/Image/ImageModal.tsx

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -426,7 +426,9 @@ const PromptSettings: Component<{
426426
<div class="image-modal">
427427
<section class="flex flex-col gap-1" style={{ 'grid-area': 'options' }}>
428428
<Show when={props.messageId}>
429-
<Show when={window.flags.debug}>{props.messageId?.slice(0, 4)}</Show>
429+
<Show when={window.flags.debug}>
430+
<span class="text-500 text-xs italic">{props.messageId}</span>
431+
</Show>
430432
<TextInput
431433
placeholder="Caption Hint: What to focus on when generating the caption?"
432434
class="!text-sm"

web/shared/hooks.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -282,7 +282,11 @@ export function useImageCache(opts: ImageCacheOpts = {}) {
282282
const imageIds = await reel.getImageIds()
283283
const images = imageIds.map(cleanIds)
284284

285-
const current = clamp(col.pos !== undefined ? col.pos : images.length - 1, images.length - 1, 0)
285+
const current = clamp(
286+
col.pos !== undefined && col.pos >= 0 ? col.pos : images.length - 1,
287+
images.length - 1,
288+
0
289+
)
286290

287291
const image = await reel.getImage(images[current])
288292
setState({ pos: current, image, images, imageId: images[current] })

0 commit comments

Comments
 (0)