Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -1019,6 +1019,24 @@ export const MentionInput: React.FC<MentionInputProps> = ({
[attachedFiles],
);

const renderModelSelectorTip = React.useCallback(
(children: React.ReactNode) => {
if (footerConfig.disableModelSelector) {
return (
<Popover
id='ai-chat-model-selector'
title={localize('aiNative.chat.modelSelector.disableTip')}
position={PopoverPosition.top}
>
{children}
</Popover>
);
}
return children;
},
[footerConfig.disableModelSelector],
);

return (
<div className={styles.input_container}>
{mentionState.active && (
Expand Down Expand Up @@ -1048,16 +1066,17 @@ export const MentionInput: React.FC<MentionInputProps> = ({
</div>
<div className={styles.footer}>
<div className={styles.left_control}>
{footerConfig.showModelSelector && (
<Select
options={footerConfig.modelOptions || []}
value={selectedModel}
onChange={handleModelChange}
className={styles.model_selector}
size='small'
disabled={footerConfig.disableModelSelector}
/>
)}
{footerConfig.showModelSelector &&
renderModelSelectorTip(
<Select
options={footerConfig.modelOptions || []}
value={selectedModel}
onChange={handleModelChange}
className={styles.model_selector}
size='small'
disabled={footerConfig.disableModelSelector}
/>,
)}
{renderButtons(FooterButtonPosition.LEFT)}
</div>
<div className={styles.right_control}>
Expand Down
38 changes: 26 additions & 12 deletions packages/ai-native/src/browser/context/llm-context.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,16 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer
attachedFolders: FileContext[];
version: number;
}>();
private hasUserManualReference = false;
Comment thread
coderabbitai[bot] marked this conversation as resolved.
onDidContextFilesChangeEvent = this.onDidContextFilesChangeEmitter.event;

private addFileToList(file: FileContext, list: FileContext[], maxLimit: number) {
const existingIndex = list.findIndex((f) => f.uri.toString() === file.uri.toString());
const existingIndex = list.findIndex(
(f) =>
f.uri.toString() === file.uri.toString() &&
f.selection?.[0] === file.selection?.[0] &&
f.selection?.[1] === file.selection?.[1],
);
if (existingIndex > -1) {
list.splice(existingIndex, 1);
}
Expand Down Expand Up @@ -85,6 +91,7 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer

if (isManual) {
this.docModelManager.createModelReference(uri);
this.hasUserManualReference = true;
}

this.addFileToList(file, targetList, maxLimit);
Expand All @@ -109,6 +116,7 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer
cleanFileContext() {
this.attachedFiles = [];
this.attachedFolders = [];
this.hasUserManualReference = false;
this.notifyContextChange();
}

Expand All @@ -127,6 +135,11 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer
if (index > -1) {
targetList.splice(index, 1);
}
if (isManual) {
if (this.attachedFiles.length === 0) {
this.hasUserManualReference = false;
}
}
this.notifyContextChange();
}

Expand Down Expand Up @@ -176,14 +189,17 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer
event.payload.selections[0].positionLineNumber,
].sort() as [number, number];

if (selection[0] === selection[1]) {
this.addFileToContext(event.payload.editorUri, undefined, false);
} else {
this.addFileToContext(
event.payload.editorUri,
selection.sort((a, b) => a - b),
false,
);
if (!this.hasUserManualReference) {
// 当没有用户手动引用时,才自动收集
if (selection[0] === selection[1]) {
this.addFileToContext(event.payload.editorUri, undefined, false);
} else {
this.addFileToContext(
event.payload.editorUri,
selection.sort((a, b) => a - b),
false,
);
}
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
}),
Expand Down Expand Up @@ -275,9 +291,7 @@ export class LLMContextServiceImpl extends WithEventBus implements LLMContextSer
}

return {
content: ref.instance.getText(
file.selection && new Range(file.selection[0], Infinity, file.selection[1], Infinity),
),
content: ref.instance.getText(file.selection && new Range(file.selection[0], 0, file.selection[1], Infinity)),
lineErrors: this.getFileErrors(file.uri),
path: workspaceRoot.relative(file.uri)!.toString(),
language: ref.instance.languageId!,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -201,14 +201,14 @@ export class MCPConfigService extends Disposable {
delete servers[prev.name];
}
servers[data.name] = serverConfig;
await this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription);
// 更新情况下,如果原有服务是启用状态,则进行如下操作:
// 1. 关闭旧的服务
// 2. 启动新的服务
await this.preferenceService.set('mcp', { mcpServers: servers });
if (prev?.enabled) {
await this.sumiMCPServerBackendProxy.$removeServer(prev.name);
this.sumiMCPServerBackendProxy.$removeServer(prev.name);
}
await this.preferenceService.set('mcp', { mcpServers: servers });
this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription);
Comment on lines +207 to +211
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

调整了保存服务器配置的操作顺序和异步行为

代码改动将首先更新偏好设置,然后异步执行后端服务器操作而不等待完成。这种变更有以下影响:

  1. 用户界面会立即反映新配置,提升了感知响应速度
  2. 由于不再等待后端操作完成,方法会更快返回
  3. 后端操作失败时用户不会立即收到通知

建议添加错误处理机制以捕获后端操作可能出现的错误:

 await this.preferenceService.set('mcp', { mcpServers: servers });
 if (prev?.enabled) {
-  this.sumiMCPServerBackendProxy.$removeServer(prev.name);
+  this.sumiMCPServerBackendProxy.$removeServer(prev.name).catch(error => {
+    this.logger.error(`无法移除服务器 ${prev.name}:`, error);
+    this.messageService.error(localize('ai.native.mcp.error.remove.server', '无法移除服务器: {0}', error.message || error));
+  });
 }
-this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription);
+this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription).catch(error => {
+  this.logger.error(`无法添加或更新服务器 ${data.name}:`, error);
+  this.messageService.error(localize('ai.native.mcp.error.add.server', '无法添加或更新服务器: {0}', error.message || error));
+});
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
await this.preferenceService.set('mcp', { mcpServers: servers });
if (prev?.enabled) {
await this.sumiMCPServerBackendProxy.$removeServer(prev.name);
this.sumiMCPServerBackendProxy.$removeServer(prev.name);
}
await this.preferenceService.set('mcp', { mcpServers: servers });
this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription);
await this.preferenceService.set('mcp', { mcpServers: servers });
if (prev?.enabled) {
this.sumiMCPServerBackendProxy.$removeServer(prev.name).catch(error => {
this.logger.error(`无法移除服务器 ${prev.name}:`, error);
this.messageService.error(
localize('ai.native.mcp.error.remove.server', '无法移除服务器: {0}', error.message || error)
);
});
}
this.sumiMCPServerBackendProxy.$addOrUpdateServer(data as MCPServerDescription).catch(error => {
this.logger.error(`无法添加或更新服务器 ${data.name}:`, error);
this.messageService.error(
localize('ai.native.mcp.error.add.server', '无法添加或更新服务器: {0}', error.message || error)
);
});
🤖 Prompt for AI Agents (early access)
In packages/ai-native/src/browser/mcp/config/mcp-config.service.ts around lines
207 to 211, the code updates preferences first and then calls backend server
operations asynchronously without awaiting them, which improves UI
responsiveness but loses error handling for backend failures. Modify the code to
add try-catch blocks around the backend calls to $removeServer and
$addOrUpdateServer, and handle or log any errors to ensure backend operation
failures are captured and can be addressed.

}

async deleteServer(serverName: string): Promise<void> {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import { Autowired, Injectable } from '@opensumi/di';
import {
CodeSchemaId,
Domain,
IJSONSchema,
IJSONSchemaRegistry,
JsonSchemaContribution,
MaybePromise,
Expand Down
11 changes: 4 additions & 7 deletions packages/ai-native/src/common/prompts/context-prompt-provider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,12 @@ export class DefaultChatAgentPromptProvider implements ChatAgentPromptProvider {
protected readonly workspaceService: IWorkspaceService;

async provideContextPrompt(context: SerializedContext, userMessage: string) {
const currentFileInfo = await this.getCurrentFileInfo();
const currentFileInfo =
context.attachedFiles.length > 0 || context.attachedFolders.length > 0 ? null : await this.getCurrentFileInfo();

return this.buildPromptTemplate({
recentFiles: this.buildRecentFilesSection(context.recentlyViewFiles),
attachedFiles: this.buildAttachedFilesSection(context.attachedFiles, context.recentlyViewFiles),
attachedFiles: this.buildAttachedFilesSection(context.attachedFiles),
attachedFolders: this.buildAttachedFoldersSection(context.attachedFolders),
currentFile: currentFileInfo,
userMessage,
Expand Down Expand Up @@ -91,11 +92,7 @@ ${files.map((file, idx) => ` ${idx + 1}: ${file}`).join('\n')}
</recently_viewed_files>`;
}

private buildAttachedFilesSection(
files: { path: string; content: string; lineErrors: string[] }[],
recentlyViewFiles: string[],
): string {
files = files.filter((file) => !recentlyViewFiles.includes(file.path));
private buildAttachedFilesSection(files: { path: string; content: string; lineErrors: string[] }[]): string {
if (!files.length) {
return '';
}
Expand Down
4 changes: 2 additions & 2 deletions packages/ai-native/src/node/mcp-server.stdio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export class StdioMCPServer implements IMCPServer {
private readonly logger?: ILogger,
) {
this.name = name;
this.command = command;
this.command = command === 'node' ? process.env.NODE_BINARY_PATH || 'node' : command;
Comment thread
coderabbitai[bot] marked this conversation as resolved.
this.args = args;
this.env = env;
}
Expand Down Expand Up @@ -132,7 +132,7 @@ export class StdioMCPServer implements IMCPServer {
}

update(command: string, args?: string[], env?: { [key: string]: string }): void {
this.command = command;
this.command = command === 'node' ? process.env.NODE_BINARY_PATH || 'node' : command;
this.args = args;
this.env = env;
}
Expand Down
4 changes: 2 additions & 2 deletions packages/components/src/style/variable.less
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,8 @@
@zindex-back-top: 10;
@zindex-picker-panel: 10;
@zindex-popup-close: 10;
@zindex-modal: 1000;
@zindex-modal-mask: 1000;
@zindex-modal: 10000;
@zindex-modal-mask: 10000;
@zindex-message: 1010;
@zindex-notification: 1010;
@zindex-popover: 1030;
Expand Down
5 changes: 4 additions & 1 deletion packages/core-browser/src/progress/progress-bar.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,13 @@ import styles from './progress.module.less';

import { IProgressModel } from '.';

export const ProgressBar: React.FC<{ progressModel: IProgressModel; className?: string }> = ({
export const ProgressBar: React.FC<{ progressModel?: IProgressModel; className?: string }> = ({
progressModel,
className,
}) => {
if (!progressModel) {
return null;
}
const worked = useAutorun<number>(progressModel.worked);
const total = useAutorun<number | undefined>(progressModel.total);
const show = useAutorun<boolean>(progressModel.show);
Expand Down
2 changes: 2 additions & 0 deletions packages/i18n/src/common/en-US.lang.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1466,6 +1466,8 @@ export const localizationBundle = {
'aiNative.chat.clearContext': 'Clear Context',
'aiNative.chat.context.description': 'Total {0} References',
'aiNative.chat.context.clear': 'Clear References',
'aiNative.chat.modelSelector.disableTip': 'Clear or create session to change model',

'aiNative.inline.chat.operate.chat.title': 'Chat({0})',
'aiNative.inline.chat.operate.check.title': 'Check',
'aiNative.inline.chat.operate.thumbsup.title': 'Thumbs up',
Expand Down
1 change: 1 addition & 0 deletions packages/i18n/src/common/zh-CN.lang.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1234,6 +1234,7 @@ export const localizationBundle = {
'aiNative.chat.clearContext': '清空上下文',
'aiNative.chat.context.description': '共 {0} 个引用',
'aiNative.chat.context.clear': '点击清空引用',
'aiNative.chat.modelSelector.disableTip': '如需切换模型,请新建或清空会话',

'aiNative.inline.chat.operate.chat.title': 'Chat({0})',
'aiNative.inline.chat.operate.check.title': '采纳',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ export const AccordionSection = ({
style={bodyStyle}
ref={contentRef}
>
{<ProgressBar className={styles.progressBar} progressModel={indicator!.progressModel} />}
{<ProgressBar className={styles.progressBar} progressModel={indicator?.progressModel} />}
<ErrorBoundary>
{metadata.message && <div className={styles.kt_split_panel_message}>{metadata.message}</div>}
<Component
Expand Down
4 changes: 2 additions & 2 deletions packages/main-layout/src/browser/tabbar/bar.view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ import { InlineMenuBar } from '@opensumi/ide-core-browser/lib/components/actions
import { Layout } from '@opensumi/ide-core-browser/lib/components/layout/layout';
import { VIEW_CONTAINERS } from '@opensumi/ide-core-browser/lib/layout/view-id';
import { IProgressService } from '@opensumi/ide-core-browser/lib/progress';
import { observableValue } from '@opensumi/ide-monaco/lib/common/observable';

import { IMainLayoutService } from '../../common';

Expand All @@ -25,7 +26,6 @@ import styles from './styles.module.less';
import { TabbarService, TabbarServiceFactory } from './tabbar.service';

import type { ViewBadge } from 'vscode';

function splitVisibleTabs(containers: ComponentRegistryInfo[], visibleCount: number) {
if (visibleCount >= containers.length) {
return [containers, []];
Expand Down Expand Up @@ -246,7 +246,7 @@ export const IconTabView: FC<{ component: ComponentRegistryProvider }> = ({ comp
const [component, setComponent] = useState<ComponentRegistryProvider>(defaultComponent);
const indicator = progressService.getIndicator(component.options?.containerId || '');

const inProgress = useAutorun(indicator!.progressModel.show);
const inProgress = indicator ? useAutorun(indicator.progressModel.show) : false;

const title = useMemo(() => {
const options = component.options;
Expand Down
5 changes: 4 additions & 1 deletion packages/quick-open/src/browser/quick-open.view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -403,13 +403,16 @@ export const QuickOpenProgress = () => {
const progressService: IProgressService = useInjectable(IProgressService);
const indicator = progressService.getIndicator(VIEW_CONTAINERS.QUICKPICK_PROGRESS);

if (!indicator) {
return null;
}
React.useEffect(() => {
widget.updateProgressStatus(!!busy);
}, [busy]);

return (
<div id={VIEW_CONTAINERS.QUICKPICK_PROGRESS} className={styles.progress_bar}>
<ProgressBar progressModel={indicator!.progressModel} />
<ProgressBar progressModel={indicator.progressModel} />
</div>
);
};
Expand Down
Loading