-
-
-
+
+
+
- {{ selectedModel.label }}
-
+
-
-
-
-
我是你的开发小助手
-
智能对话
-
- 需要一个注册表单?
- 如何将表单嵌进我的网站?
-
-
-
-
-
-
-
-
-
-
-
- {{ item.content }}
-
-
-
-
-
-
-
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
@@ -113,26 +73,37 @@
+
+
diff --git a/packages/plugins/robot/src/mcp/McpServer.vue b/packages/plugins/robot/src/mcp/McpServer.vue
new file mode 100644
index 0000000000..28d403a041
--- /dev/null
+++ b/packages/plugins/robot/src/mcp/McpServer.vue
@@ -0,0 +1,179 @@
+
+
+
+
+ MCP
+ {{ activeCount }}
+
+
+
+
+
+
+
+
diff --git a/packages/plugins/robot/src/mcp/types.ts b/packages/plugins/robot/src/mcp/types.ts
new file mode 100644
index 0000000000..6304732af3
--- /dev/null
+++ b/packages/plugins/robot/src/mcp/types.ts
@@ -0,0 +1,87 @@
+import type { BubbleContentItem } from '@opentiny/tiny-robot'
+
+export interface RequestOptions {
+ url?: string
+ model?: string
+ headers?: Record
+}
+
+export interface RequestTool {
+ type: 'function'
+ function: {
+ name: string
+ description: string
+ parameters: {
+ type: 'object'
+ required?: string[]
+ properties: Record<
+ string,
+ {
+ type: string
+ description: string
+ [prop: string]: unknown
+ }
+ >
+ }
+ }
+}
+
+export interface LLMMessage {
+ role: string
+ content: string
+ [prop: string]: unknown
+}
+
+export interface RobotMessage {
+ role: string
+ content: string | BubbleContentItem[]
+ [prop: string]: unknown
+}
+
+export interface LLMRequestBody {
+ model?: string
+ stream: boolean
+ messages: LLMMessage[]
+ tools?: RequestTool[]
+}
+
+export interface ReponseToolCall {
+ id: string
+ function: {
+ name: string
+ arguments: string
+ }
+}
+
+export interface LLMResponse {
+ choices: Array<{
+ message: {
+ role?: string
+ content: string
+ tool_calls?: Array
+ [prop: string]: unknown
+ }
+ }>
+}
+
+export interface McpTool {
+ name: string
+ description: string
+ inputSchema?: {
+ type: 'object'
+ properties: Record<
+ string,
+ {
+ type: string
+ description: string
+ [prop: string]: unknown
+ }
+ >
+ [prop: string]: unknown
+ }
+ [prop: string]: unknown
+}
+
+export interface McpListToolsResponse {
+ tools: Array
+}
diff --git a/packages/plugins/robot/src/mcp/useMcp.ts b/packages/plugins/robot/src/mcp/useMcp.ts
new file mode 100644
index 0000000000..e88ea9b7af
--- /dev/null
+++ b/packages/plugins/robot/src/mcp/useMcp.ts
@@ -0,0 +1,185 @@
+import { computed, ref } from 'vue'
+import type { PluginInfo, PluginTool } from '@opentiny/tiny-robot'
+import { getMetaApi, META_SERVICE } from '@opentiny/tiny-engine-meta-register'
+import type { McpListToolsResponse, McpTool, RequestTool } from './types'
+
+const ENGINE_MCP_SERVER: PluginInfo = {
+ id: 'tiny-engine-mcp-server',
+ name: 'Tiny Engine MCP 服务器',
+ icon: 'https://res.hc-cdn.com/lowcode-portal/1.1.80.20250515160330/assets/opentiny-tinyengine-logo-4f8a3801.svg',
+ description: '使用TinyEngine设计器能力,如添加国际化',
+ added: true
+}
+
+const MOCK_SERVERS: PluginInfo[] = [
+ {
+ id: 'plugin-1',
+ name: 'Jira 集成 (Mock)',
+ icon: 'https://ts3.tc.mm.bing.net/th/id/ODLS.2a97aa8b-50c6-4e00-af97-3b563dfa07f4',
+ description: 'Jira 任务管理',
+ enabled: true,
+ added: false,
+ tools: [
+ { id: 'tool-5', name: '创建任务', description: '创建 Jira 任务', enabled: true },
+ { id: 'tool-6', name: '查询任务', description: '查询 Jira 任务', enabled: true }
+ ]
+ },
+ {
+ id: 'plugin-2',
+ name: 'Notion 集成 (Mock)',
+ icon: 'https://www.notion.so/front-static/favicon.ico',
+ description: 'Notion 文档管理和协作',
+ enabled: false,
+ added: false,
+ tools: [
+ { id: 'tool-7', name: '创建页面', description: '创建 Notion 页面', enabled: false },
+ { id: 'tool-8', name: '查询数据库', description: '查询 Notion 数据库', enabled: false }
+ ]
+ },
+ {
+ id: 'plugin-3',
+ name: 'Telegram 机器人 (Mock)',
+ icon: 'https://telegram.org/favicon.ico',
+ description: 'Telegram 消息推送和自动化',
+ enabled: false,
+ added: false,
+ tools: [{ id: 'tool-9', name: '发送消息', description: '发送 Telegram 消息', enabled: false }]
+ }
+]
+
+const mcpServers = ref([ENGINE_MCP_SERVER, ...MOCK_SERVERS])
+
+const inUseMcpServers = ref([
+ { ...ENGINE_MCP_SERVER, enabled: true, expanded: false, tools: [], toolCount: 0 }
+])
+
+const updateServerTools = (serverId: string, tools: PluginTool[]) => {
+ const mcpServer = inUseMcpServers.value.find((item) => item.id === serverId)
+ if (mcpServer) {
+ mcpServer.tools = tools
+ mcpServer.toolCount = tools.length
+ }
+}
+
+const updateEngineTools = async () => {
+ const tools: Array<{ name: string; description: string; status: string }> =
+ (await getMetaApi(META_SERVICE.McpService)?.getToolList?.()) || []
+ const engineTools = tools.map((tool) => ({
+ id: tool.name,
+ name: tool.name,
+ description: tool.description,
+ enabled: tool.status === 'enabled'
+ }))
+ updateServerTools(ENGINE_MCP_SERVER.id, engineTools)
+}
+
+const convertMCPToOpenAITools = (mcpTools: McpTool[]): RequestTool[] => {
+ return mcpTools.map((tool: McpTool) => ({
+ type: 'function',
+ function: {
+ name: tool.name,
+ description: tool.description || '',
+ parameters: {
+ type: 'object',
+ properties: Object.fromEntries(
+ Object.entries(tool.inputSchema?.properties || {}).map(([key, prop]: [string, any]) => [key, { ...prop }])
+ ),
+ required: tool.inputSchema?.required || []
+ }
+ }
+ })) as RequestTool[]
+}
+
+const getEngineServer = () => {
+ return inUseMcpServers.value.find((item) => item.id === ENGINE_MCP_SERVER.id)
+}
+
+const isToolsEnabled = computed(() => getEngineServer()?.tools?.some((tool) => tool.enabled))
+
+const updateEngineServerToolStatus = (toolId: string, enabled: boolean) => {
+ getMetaApi(META_SERVICE.McpService)?.updateTool?.(toolId, { enabled })
+}
+
+const updateEngineServer = (engineServer: PluginInfo, enabled: boolean) => {
+ engineServer?.tools?.forEach((tool) => {
+ tool.enabled = enabled
+ updateEngineServerToolStatus(tool.id, enabled)
+ })
+}
+
+// TODO: 连接MCP Server
+const connectMcpServer = (_server: PluginInfo) => {}
+
+// TODO: 断开连接
+const disconnectMcpServer = (_server: PluginInfo) => {}
+
+const updateMcpServerStatus = async (server: PluginInfo, added: boolean) => {
+ if (added) {
+ const newServer: PluginInfo = {
+ ...server,
+ id: server.id || `mcp-server-${Date.now()}`,
+ enabled: true,
+ added: true,
+ expanded: false,
+ tools: server.tools || [],
+ toolCount: server.tools?.length || 0
+ }
+ inUseMcpServers.value.push(newServer)
+ if (server.id === ENGINE_MCP_SERVER.id) {
+ await updateEngineTools()
+ updateEngineServer(newServer, added)
+ }
+ // TODO: 连接MCP Server
+ connectMcpServer(newServer)
+ } else {
+ const index = inUseMcpServers.value.findIndex((p) => p.id === server.id)
+ if (index > -1) {
+ updateEngineServer(inUseMcpServers.value[index], added)
+ inUseMcpServers.value.splice(index, 1)
+ // TODO: 断开连接
+ disconnectMcpServer(server)
+ }
+ }
+}
+
+const updateMcpServerToolStatus = (currentServer: PluginInfo, toolId: string, enabled: boolean) => {
+ const tool = currentServer.tools?.find((t: PluginTool) => t.id === toolId)
+ if (tool) {
+ tool.enabled = enabled
+ if (currentServer.id === ENGINE_MCP_SERVER.id) {
+ updateEngineServerToolStatus(toolId, enabled)
+ } else {
+ // TODO: 更新MCP Server的Tool状态
+ // 获取 tool 实例调用 enableTool 或 disableTool
+ }
+ }
+}
+
+const refreshMcpServerTools = () => {
+ updateEngineTools()
+}
+
+const listTools = async (): Promise =>
+ getMetaApi(META_SERVICE.McpService)?.getMcpClient()?.listTools()
+
+const callTool = async (toolId: string, args: Record) =>
+ getMetaApi(META_SERVICE.McpService)?.getMcpClient()?.callTool({ name: toolId, arguments: args }) || {}
+
+const getLLMTools = async () => {
+ const mcpTools = await listTools()
+ return convertMCPToOpenAITools(mcpTools?.tools || [])
+}
+
+export default function useMcpServer() {
+ return {
+ mcpServers,
+ inUseMcpServers,
+ refreshMcpServerTools,
+ updateMcpServerStatus,
+ updateMcpServerToolStatus,
+ listTools,
+ callTool,
+ getLLMTools,
+ isToolsEnabled
+ }
+}
diff --git a/packages/plugins/robot/src/mcp/utils.ts b/packages/plugins/robot/src/mcp/utils.ts
new file mode 100644
index 0000000000..5ef759a8d2
--- /dev/null
+++ b/packages/plugins/robot/src/mcp/utils.ts
@@ -0,0 +1,113 @@
+import { toRaw } from 'vue'
+import useMcpServer from './useMcp'
+import type { LLMMessage, RobotMessage } from './types'
+import type { LLMRequestBody, LLMResponse, ReponseToolCall, RequestOptions, RequestTool } from './types'
+
+let requestOptions: RequestOptions = {}
+
+const fetchLLM = async (messages: LLMMessage[], tools: RequestTool[], options: RequestOptions = requestOptions) => {
+ const bodyObj: LLMRequestBody = {
+ model: options?.model || 'deepseek-chat',
+ stream: false,
+ messages: toRaw(messages)
+ }
+ if (tools.length > 0) {
+ bodyObj.tools = toRaw(tools)
+ }
+ return fetch(options?.url || '/app-center/api/chat/completions', {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ ...options?.headers
+ },
+ body: JSON.stringify(bodyObj)
+ })
+}
+
+const parseArgs = (args: string) => {
+ try {
+ return JSON.parse(args)
+ } catch (error) {
+ return args
+ }
+}
+
+const handleToolCall = async (
+ res: LLMResponse,
+ tools: RequestTool[],
+ messages: RobotMessage[],
+ contextMessages?: RobotMessage[]
+) => {
+ if (messages.length < 1) {
+ return
+ }
+ const currentMessage = messages.at(-1)!
+ if (typeof currentMessage.content === 'string' || !currentMessage.content) {
+ currentMessage.content = []
+ }
+ if (res.choices[0].message.content) {
+ currentMessage.content.push({
+ type: 'markdown',
+ content: res.choices[0].message.content
+ })
+ }
+ const tool_calls: ReponseToolCall[] | undefined = res.choices[0].message.tool_calls
+ if (tool_calls && tool_calls.length) {
+ const historyMessages = contextMessages?.length ? contextMessages : toRaw(messages.slice(0, -1))
+ const toolMessages: LLMMessage[] = [...historyMessages, res.choices[0].message] as LLMMessage[]
+ for (const tool of tool_calls) {
+ const { name, arguments: args } = tool.function
+ const parsedArgs = parseArgs(args)
+ const currentToolMessage = {
+ type: 'tool',
+ name,
+ status: 'running',
+ content: {
+ params: parsedArgs
+ },
+ formatPretty: true
+ }
+ currentMessage.content.push(currentToolMessage)
+ const toolCallResult = await useMcpServer().callTool(name, parsedArgs)
+ toolMessages.push({
+ type: 'text',
+ content: toolCallResult.content,
+ role: 'tool',
+ tool_call_id: tool.id
+ })
+
+ currentMessage.content.at(-1)!.status = 'success'
+ currentMessage.content.at(-1)!.content = {
+ params: parsedArgs,
+ result: toolCallResult.content
+ }
+ }
+ const newResp = await fetchLLM(toolMessages, tools).then((res) => res.json())
+ const hasToolCall = newResp.choices[0].message.tool_calls?.length > 0
+ if (hasToolCall) {
+ await handleToolCall(newResp, tools, messages, toolMessages)
+ } else {
+ if (newResp.choices[0].message.content) {
+ currentMessage.content.push({
+ type: 'markdown',
+ content: newResp.choices[0].message.content
+ })
+ }
+ }
+ }
+}
+
+export const sendMcpRequest = async (messages: LLMMessage[], options: RequestOptions = {}) => {
+ if (messages.length < 1) {
+ return
+ }
+ const tools = await useMcpServer().getLLMTools()
+ requestOptions = options
+ const res = await fetchLLM(messages.slice(0, -1), tools, options).then((res) => res.json())
+ const hasToolCall = res.choices[0].message.tool_calls?.length > 0
+ if (hasToolCall) {
+ await handleToolCall(res, tools, messages)
+ } else {
+ messages.at(-1)!.content = res.choices[0].message.content
+ }
+}
diff --git a/packages/register/src/constants.ts b/packages/register/src/constants.ts
index d200a3d724..d85e3ff2be 100644
--- a/packages/register/src/constants.ts
+++ b/packages/register/src/constants.ts
@@ -18,7 +18,8 @@ export const META_SERVICE = {
Property: 'engine.service.property',
Properties: 'engine.service.properties',
ThemeSwitch: 'engine.service.themeSwitch',
- Style: 'engine.service.style'
+ Style: 'engine.service.style',
+ McpService: 'engine.service.mcpService'
}
export const META_APP = {
diff --git a/packages/register/src/service.ts b/packages/register/src/service.ts
index 0161c56652..54b5bcb8b3 100644
--- a/packages/register/src/service.ts
+++ b/packages/register/src/service.ts
@@ -8,7 +8,7 @@ interface Context {
options: K
}
-interface ServiceOptions {
+export interface ServiceOptions {
id: string
type: 'MetaService'
initialState: T