diff --git a/apps/models/llm.py b/apps/models/llm.py index 0c44797dbae3b3270312b6a0bedd51cdb3b82c74..e67a6b04699f1c516ad72719cd7b972e9bb4abde 100644 --- a/apps/models/llm.py +++ b/apps/models/llm.py @@ -1,8 +1,10 @@ """大模型信息 数据库表""" +import uuid from datetime import UTC, datetime -from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, String +from sqlalchemy import DateTime, Integer, String +from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import Mapped, mapped_column from apps.common.config import config @@ -15,10 +17,8 @@ class LLMData(Base): """大模型信息""" __tablename__ = "framework_llm" - id: Mapped[int] = mapped_column(BigInteger, primary_key=True, autoincrement=True, init=False) - """主键ID""" - userSub: Mapped[str] = mapped_column(ForeignKey("framework_user.userSub"), index=True, nullable=False) # noqa: N815 - """添加LLM所属的用户""" + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default_factory=uuid.uuid4) + """大模型ID""" icon: Mapped[str] = mapped_column(String(1000), default=llm_provider_dict["ollama"]["icon"], nullable=False) """LLM图标路径""" openaiBaseUrl: Mapped[str] = mapped_column(String(300), default=config.llm.endpoint, nullable=False) # noqa: N815 diff --git a/apps/models/user.py b/apps/models/user.py index ed598c69b219240cbded7fc9b1af23fa801c2e1c..d63564e114c86959353f1f918c1b8be012b31c5a 100644 --- a/apps/models/user.py +++ b/apps/models/user.py @@ -36,7 +36,7 @@ class User(Base): """用户个人令牌""" selectedKB: Mapped[list[uuid.UUID]] = mapped_column(ARRAY(UUID), default=[], nullable=False) # noqa: N815 """用户选择的知识库的ID""" - selectedLLM: Mapped[int | None] = mapped_column(BigInteger, default=None, nullable=True) # noqa: N815 + defaultLLM: Mapped[uuid.UUID | None] = mapped_column(UUID(as_uuid=True), default=None, nullable=True) # noqa: N815 """用户选择的大模型ID""" diff --git a/apps/routers/conversation.py b/apps/routers/conversation.py index f05449346e89858607df6f0ae9a8de68d6e96098..2f49f803733c105362e2ba36cae2032f58d6ee77 100644 --- a/apps/routers/conversation.py +++ b/apps/routers/conversation.py @@ -12,8 +12,8 @@ from fastapi.responses import JSONResponse from apps.dependency import verify_personal_token, verify_session from apps.models.conversation import Conversation from apps.schemas.request_data import ( - DeleteConversationData, ChangeConversationData, + DeleteConversationData, ) from apps.schemas.response_data import ( AddConversationMsg, diff --git a/apps/routers/llm.py b/apps/routers/llm.py index 78ea54c7944e12012bf89466d65b35bc88bcb5fc..512ac708189346e023531682fbf2fcba144ae177 100644 --- a/apps/routers/llm.py +++ b/apps/routers/llm.py @@ -1,6 +1,7 @@ # Copyright (c) Huawei Technologies Co., Ltd. 2023-2025. All rights reserved. """FastAPI 大模型相关接口""" +import uuid from typing import Annotated from fastapi import APIRouter, Depends, Query, Request, status @@ -36,9 +37,8 @@ admin_router = APIRouter( ) -@admin_router.get("/provider", response_model=ListLLMProviderRsp, responses={ - status.HTTP_404_NOT_FOUND: {"model": ResponseData}, - }, +@admin_router.get("/provider", response_model=ListLLMProviderRsp, + responses={status.HTTP_404_NOT_FOUND: {"model": ResponseData}}, ) async def list_llm_provider() -> JSONResponse: """获取大模型提供商列表""" @@ -53,16 +53,12 @@ async def list_llm_provider() -> JSONResponse: ) -@router.get("", response_model=ListLLMRsp, responses={ - status.HTTP_404_NOT_FOUND: {"model": ResponseData}, - }, +@router.get("", response_model=ListLLMRsp, + responses={status.HTTP_404_NOT_FOUND: {"model": ResponseData}}, ) -async def list_llm( - request: Request, - llm_id: Annotated[str | None, Query(description="大模型ID", alias="llmId")] = None, -) -> JSONResponse: +async def list_llm(llmId: uuid.UUID | None = None) -> JSONResponse: # noqa: N803 """获取大模型列表""" - llm_list = await LLMManager.list_llm(request.state.user_sub, llm_id) + llm_list = await LLMManager.list_llm(llmId) return JSONResponse( status_code=status.HTTP_200_OK, content=ListLLMRsp( @@ -73,17 +69,15 @@ async def list_llm( ) -@admin_router.put("", responses={ - status.HTTP_404_NOT_FOUND: {"model": ResponseData}, - }, +@admin_router.put("", + responses={status.HTTP_404_NOT_FOUND: {"model": ResponseData}}, ) async def create_llm( - request: Request, req: UpdateLLMReq, - llm_id: Annotated[str | None, Query(description="大模型ID", alias="llmId")] = None, + llmId: uuid.UUID | None = None, # noqa: N803 ) -> JSONResponse: """创建或更新大模型配置""" - llm_id = await LLMManager.update_llm(request.state.user_sub, llm_id, req) + await LLMManager.update_llm(llmId, req) return JSONResponse( status_code=status.HTTP_200_OK, content=ResponseData( @@ -94,46 +88,46 @@ async def create_llm( ) -@admin_router.delete( - "", - responses={ - status.HTTP_404_NOT_FOUND: {"model": ResponseData}, - }, +@admin_router.delete("", + responses={status.HTTP_404_NOT_FOUND: {"model": ResponseData}}, ) -async def delete_llm( - request: Request, - llm_id: Annotated[str, Query(description="大模型ID", alias="llmId")], -) -> JSONResponse: +async def delete_llm(request: Request, llmId: uuid.UUID) -> JSONResponse: # noqa: N803 """删除大模型配置""" - await LLMManager.delete_llm(request.state.user_sub, llm_id) + await LLMManager.delete_llm(request.state.user_sub, llmId) return JSONResponse( status_code=status.HTTP_200_OK, content=ResponseData( code=status.HTTP_200_OK, message="success", - result=llm_id, + result=llmId, ).model_dump(exclude_none=True, by_alias=True), ) -@router.put( - "/conv", - responses={ - status.HTTP_404_NOT_FOUND: {"model": ResponseData}, - }, +@router.put("/conv", + responses={status.HTTP_404_NOT_FOUND: {"model": ResponseData}}, ) async def update_user_llm( request: Request, - conversation_id: Annotated[str, Query(description="对话ID", alias="conversationId")], - llm_id: Annotated[str, Query(description="llm ID", alias="llmId")] = "empty", + llmId: uuid.UUID, # noqa: N803 ) -> JSONResponse: """更新用户所选的大模型""" - llm_id = await LLMManager.update_user_llm(request.state.user_sub, conversation_id, llm_id) + try: + await LLMManager.update_user_default_llm(request.state.user_sub, llmId) + except ValueError as e: + return JSONResponse( + status_code=status.HTTP_400_BAD_REQUEST, + content=ResponseData( + code=status.HTTP_400_BAD_REQUEST, + message=str(e), + result=None, + ).model_dump(exclude_none=True, by_alias=True), + ) return JSONResponse( status_code=status.HTTP_200_OK, content=ResponseData( code=status.HTTP_200_OK, message="success", - result=llm_id, + result=llmId, ).model_dump(exclude_none=True, by_alias=True), ) diff --git a/apps/scheduler/mcp/host.py b/apps/scheduler/mcp/host.py index 7e215ec0e3d1b031d240c4ef12cdafef76ad136d..4b104e9d49c98a657dcd84dee40b6f9c4df0b752 100644 --- a/apps/scheduler/mcp/host.py +++ b/apps/scheduler/mcp/host.py @@ -101,7 +101,7 @@ class MCPHost: task_id=self._task_id, flow_id=self._runtime_id, flow_name=self._runtime_name, - step_id=tool.toolId, + step_id=tool.id, step_name=tool.toolName, # description是规划的实际内容 step_description=plan_item.content, diff --git a/apps/scheduler/mcp/plan.py b/apps/scheduler/mcp/plan.py index 50224567ed9e4f0081d7a55f324f76fd7894ff6b..74f5eb8330ddaeb50929ccfb4365dad163ba1911 100644 --- a/apps/scheduler/mcp/plan.py +++ b/apps/scheduler/mcp/plan.py @@ -6,7 +6,8 @@ from jinja2.sandbox import SandboxedEnvironment from apps.llm.function import JsonGenerator from apps.llm.reasoning import ReasoningLLM -from apps.schemas.mcp import MCPPlan, MCPTool +from apps.models.mcp import MCPTools +from apps.schemas.mcp import MCPPlan from .prompt import CREATE_PLAN, FINAL_ANSWER @@ -27,7 +28,7 @@ class MCPPlanner: self.output_tokens = 0 - async def create_plan(self, tool_list: list[MCPTool], max_steps: int = 6) -> MCPPlan: + async def create_plan(self, tool_list: list[MCPTools], max_steps: int = 6) -> MCPPlan: """规划下一步的执行流程,并输出""" # 获取推理结果 result = await self._get_reasoning_plan(tool_list, max_steps) @@ -36,7 +37,7 @@ class MCPPlanner: return await self._parse_plan_result(result, max_steps) - async def _get_reasoning_plan(self, tool_list: list[MCPTool], max_steps: int) -> str: + async def _get_reasoning_plan(self, tool_list: list[MCPTools], max_steps: int) -> str: """获取推理大模型的结果""" # 格式化Prompt template = self._env.from_string(CREATE_PLAN) diff --git a/apps/scheduler/pool/check.py b/apps/scheduler/pool/check.py index b3cf1852b2d01982d4932296b49ec56cfc6853d2..2e1d5f0e571076d7732b1888ccbbc4e638dc606e 100644 --- a/apps/scheduler/pool/check.py +++ b/apps/scheduler/pool/check.py @@ -2,12 +2,16 @@ """文件检查器""" import logging +import uuid from hashlib import sha256 from anyio import Path +from sqlalchemy import select from apps.common.config import config -from apps.common.mongo import MongoDB +from apps.common.postgres import postgres +from apps.models.app import App, AppHashes +from apps.models.service import Service, ServiceHashes from apps.schemas.enum_var import MetadataType logger = logging.getLogger(__name__) @@ -41,57 +45,62 @@ class FileChecker: return hashes - async def diff_one(self, path: Path, previous_hashes: dict[str, str] | None = None) -> bool: + async def diff_one(self, path: Path, previous_hashes: AppHashes | ServiceHashes | None = None) -> bool: """检查文件是否发生变化""" self._resource_path = path semantics_path = Path(config.deploy.data_dir) / "semantics" path_diff = self._resource_path.relative_to(semantics_path) + # FIXME 不能使用字典比对,必须一条条比对 self.hashes[path_diff.as_posix()] = await self.check_one(path) return self.hashes[path_diff.as_posix()] != previous_hashes - async def diff(self, check_type: MetadataType) -> tuple[list[str], list[str]]: + async def diff(self, check_type: MetadataType) -> tuple[list[uuid.UUID], list[uuid.UUID]]: """生成更新列表和删除列表""" - if check_type == MetadataType.APP: - collection = MongoDB().get_collection("app") - self._dir_path = Path(config.deploy.data_dir) / "semantics" / "app" - elif check_type == MetadataType.SERVICE: - collection = MongoDB().get_collection("service") - self._dir_path = Path(config.deploy.data_dir) / "semantics" / "service" - - changed_list = [] - deleted_list = [] - - # 查询所有条目 - try: - items = await collection.find({}).to_list(None) - except Exception as e: - err = f"[FileChecker] {check_type}类型数据的条目为空" - logger.exception(err) - raise RuntimeError(err) from e - - # 遍历列表 - for list_item in items: - # 判断是否存在? - if not await Path(self._dir_path / list_item["_id"]).exists(): - deleted_list.append(list_item["_id"]) - continue - # 判断是否发生变化 - if await self.diff_one(Path(self._dir_path / list_item["_id"]), list_item.get("hashes", None)): - changed_list.append(list_item["_id"]) - - logger.info("[FileChecker] 文件变动: %s;文件删除: %s", changed_list, deleted_list) - # 遍历目录 - item_names = [item["_id"] for item in items] - async for service_folder in self._dir_path.iterdir(): - # 判断是否新增? - if ( - service_folder.name not in item_names - and service_folder.name not in deleted_list - and service_folder.name not in changed_list - ): - changed_list += [service_folder.name] - # 触发一次hash计算 - await self.diff_one(service_folder) - - return changed_list, deleted_list + async with postgres.session() as session: + # 判断类型 + if check_type == MetadataType.APP: + self._dir_path = Path(config.deploy.data_dir) / "semantics" / "app" + items = list((await session.scalars(select(App.id))).all()) + elif check_type == MetadataType.SERVICE: + self._dir_path = Path(config.deploy.data_dir) / "semantics" / "service" + items = list((await session.scalars(select(Service.id))).all()) + + changed_list = [] + deleted_list = [] + + # 遍历列表 + for list_item in items: + # 判断是否存在? + if not await Path(self._dir_path / str(list_item)).exists(): + deleted_list.append(list_item) + continue + + # 获取Hash + if check_type == MetadataType.APP: + hashes = ( + await session.scalars(select(AppHashes).where(AppHashes.appId == list_item)) + ).one() + elif check_type == MetadataType.SERVICE: + hashes = ( + await session.scalars(select(ServiceHashes).where(ServiceHashes.serviceId == list_item)) + ).one() + # 判断是否发生变化 + if await self.diff_one(Path(self._dir_path / str(list_item)), hashes): + changed_list.append(list_item) + + logger.info("[FileChecker] 文件变动: %s;文件删除: %s", changed_list, deleted_list) + # 遍历目录 + item_names = list(items) + async for service_folder in self._dir_path.iterdir(): + # 判断是否新增? + if ( + service_folder.name not in item_names + and service_folder.name not in deleted_list + and service_folder.name not in changed_list + ): + changed_list += [service_folder.name] + # 触发一次hash计算 + await self.diff_one(service_folder) + + return changed_list, deleted_list diff --git a/apps/schemas/request_data.py b/apps/schemas/request_data.py index b765ae13cf7e4b43187f321ca80e7b6b94d4effe..8303a96da7059a354af920577d345ddcbf8c1f6a 100644 --- a/apps/schemas/request_data.py +++ b/apps/schemas/request_data.py @@ -15,8 +15,8 @@ from .mcp import MCPType class RequestDataApp(BaseModel): """模型对话中包含的app信息""" - app_id: str = Field(description="应用ID", alias="appId") - flow_id: str = Field(description="Flow ID", alias="flowId") + app_id: uuid.UUID = Field(description="应用ID", alias="appId") + flow_id: uuid.UUID = Field(description="Flow ID", alias="flowId") params: dict[str, Any] = Field(description="插件参数") diff --git a/apps/schemas/response_data.py b/apps/schemas/response_data.py index 0567adab91923ce93378ae0cd77c68e78e87643a..966f7d4b2b7f6dbc0838c5c9d89424cb5a25d6e1 100644 --- a/apps/schemas/response_data.py +++ b/apps/schemas/response_data.py @@ -567,7 +567,7 @@ class ListLLMProviderRsp(ResponseData): class LLMProviderInfo(BaseModel): """LLM数据结构""" - llm_id: str = Field(alias="llmId", description="LLM ID") + llm_id: uuid.UUID = Field(alias="llmId", description="LLM ID") icon: str = Field(default="", description="LLM图标", max_length=25536) openai_base_url: str = Field( default="https://api.openai.com/v1", diff --git a/apps/services/appcenter.py b/apps/services/appcenter.py index 3ca9365abc13338aa60bede58e20d2d60541644c..dea9a4892fe3539b1ad1573537b86a109e299d7e 100644 --- a/apps/services/appcenter.py +++ b/apps/services/appcenter.py @@ -341,19 +341,19 @@ class AppCenterManager: :param user_sub: 用户唯一标识 :return: 最近使用的应用列表 """ - mongo = MongoDB() - user_collection = mongo.get_collection("user") - app_collection = mongo.get_collection("app") - # 校验用户信息 - user_data = User.model_validate(await user_collection.find_one({"_id": user_sub})) - # 获取最近使用的应用ID列表,按最后使用时间倒序排序 - # 允许 app_usage 为空 - usage_list = sorted( - user_data.app_usage.items(), - key=lambda x: x[1].last_used, - reverse=True, - )[:count] - app_ids = [t[0] for t in usage_list] + # 获取用户使用情况 + async with postgres.session() as session: + recent_apps = list((await session.scalars( + select(UserAppUsage.appId).where( + UserAppUsage.userSub == user_sub, + ).order_by( + UserAppUsage.lastUsed.desc(), + ), + )).all()) + # 批量查询AppName + app_names = [] + for app_id in recent_apps: + app_names.append(await AppCenterManager._get_app_data(app_id, user_sub)) if not app_ids: apps = [] # 如果 app_ids 为空,直接返回空列表 else: diff --git a/apps/services/llm.py b/apps/services/llm.py index 7143e5dfff10e5d0ad853a55ffb095305a66c811..ce005809753f0d08204664e2b483d251f320b886 100644 --- a/apps/services/llm.py +++ b/apps/services/llm.py @@ -2,10 +2,10 @@ """大模型管理""" import logging +import uuid from sqlalchemy import and_, select -from apps.common.config import config from apps.common.postgres import postgres from apps.models.llm import LLMData from apps.models.user import User @@ -41,7 +41,7 @@ class LLMManager: @staticmethod - async def get_llm_id_by_user_id(user_sub: str) -> int | None: + async def get_user_default_llm(user_sub: str) -> uuid.UUID | None: """ 通过用户ID获取大模型ID @@ -56,11 +56,11 @@ class LLMManager: logger.error("[LLMManager] 用户 %s 不存在", user_sub) return None - return user.selectedLLM + return user.defaultLLM @staticmethod - async def get_llm_by_id(user_sub: str, llm_id: int) -> LLMData | None: + async def get_llm(llm_id: uuid.UUID) -> LLMData | None: """ 通过ID获取大模型 @@ -71,10 +71,7 @@ class LLMManager: async with postgres.session() as session: llm = (await session.scalars( select(LLMData).where( - and_( - LLMData.id == llm_id, - LLMData.userSub == user_sub, - ), + LLMData.id == llm_id, ), )).one_or_none() if not llm: @@ -84,11 +81,10 @@ class LLMManager: @staticmethod - async def list_llm(user_sub: str, llm_id: int | None) -> list[LLMProviderInfo]: + async def list_llm(llm_id: uuid.UUID | None) -> list[LLMProviderInfo]: """ 获取大模型列表 - :param user_sub: 用户ID :param llm_id: 大模型ID :return: 大模型列表 """ @@ -96,41 +92,37 @@ class LLMManager: if llm_id: llm_list = (await session.scalars( select(LLMData).where( - and_( - LLMData.id == llm_id, - LLMData.userSub == user_sub, - ), + LLMData.id == llm_id, ), )).all() else: llm_list = (await session.scalars( - select(LLMData).where(LLMData.userSub == user_sub), + select(LLMData), )).all() if not llm_list: - logger.error("[LLMManager] 无法找到用户 %s 的大模型", user_sub) + logger.error("[LLMManager] 无法找到大模型 %s", llm_id) return [] # 默认大模型 - llm_item = LLMProviderInfo(llmId="empty") - llm_list = [llm_item] - for llm in result: + provider_list = [] + for llm in llm_list: llm_item = LLMProviderInfo( - llmId=llm["_id"], - icon=llm["icon"], - openaiBaseUrl=llm["openai_base_url"], - openaiApiKey=llm["openai_api_key"], - modelName=llm["model_name"], - maxTokens=llm["max_tokens"], + llmId=llm.id, + icon=llm.icon, + openaiBaseUrl=llm.openaiBaseUrl, + openaiApiKey=llm.openaiAPIKey, + modelName=llm.modelName, + maxTokens=llm.maxToken, ) - llm_list.append(llm_item) - return llm_list + provider_list.append(llm_item) + return provider_list + @staticmethod - async def update_llm(user_sub: str, llm_id: str | None, req: UpdateLLMReq) -> str: + async def update_llm(llm_id: uuid.UUID | None, req: UpdateLLMReq) -> None: """ 创建大模型 - :param user_sub: 用户ID :param req: 创建大模型请求体 :return: 大模型对象 """ @@ -163,15 +155,13 @@ class LLMManager: max_tokens=req.max_tokens, ) await llm_collection.insert_one(llm.model_dump(by_alias=True)) - return llm.id @staticmethod - async def delete_llm(user_sub: str, llm_id: int | None) -> None: + async def delete_llm(user_sub: str, llm_id: uuid.UUID | None) -> None: """ 删除大模型 - :param user_sub: 用户ID :param llm_id: 大模型ID """ if llm_id is None: @@ -181,10 +171,7 @@ class LLMManager: async with postgres.session() as session: llm = (await session.scalars( select(LLMData).where( - and_( - LLMData.id == llm_id, - LLMData.userSub == user_sub, - ), + LLMData.id == llm_id, ), )).one_or_none() if not llm: @@ -200,55 +187,22 @@ class LLMManager: if not user: err = f"[LLMManager] 用户 {user_sub} 不存在" raise ValueError(err) - user.selectedLLM = None + user.defaultLLM = None await session.commit() @staticmethod - async def update_user_llm( + async def update_user_default_llm( user_sub: str, - conversation_id: str, - llm_id: str, - ) -> str: - """更新对话的LLM""" - mongo = MongoDB() - conv_collection = mongo.get_collection("conversation") - llm_collection = mongo.get_collection("llm") - - if llm_id != "empty": - llm_dict = await llm_collection.find_one({"_id": llm_id, "user_sub": user_sub}) - if not llm_dict: - err = f"[LLMManager] LLM {llm_id} 不存在" - logger.error(err) + llm_id: uuid.UUID, + ) -> None: + """更新用户的默认LLM""" + async with postgres.session() as session: + user = (await session.scalars( + select(User).where(User.userSub == user_sub), + )).one_or_none() + if not user: + err = f"[LLMManager] 用户 {user_sub} 不存在" raise ValueError(err) - llm_dict = { - "llm_id": llm_dict["_id"], - "model_name": llm_dict["model_name"], - "icon": llm_dict["icon"], - } - else: - llm_dict = { - "llm_id": "empty", - "model_name": config.llm.model, - "icon": llm_provider_dict["ollama"]["icon"], - } - conv_dict = await conv_collection.find_one({"_id": conversation_id, "user_sub": user_sub}) - if not conv_dict: - err_msg = "[LLMManager] 更新对话的LLM失败,未找到对话" - logger.error(err_msg) - raise ValueError(err_msg) - - llm_item = LLMData( - userSub=user_sub, - icon=llm_dict["icon"], - openaiBaseUrl=llm_dict["openai_base_url"], - openaiAPIKey=llm_dict["openai_api_key"], - modelName=llm_dict["model_name"], - maxToken=llm_dict["max_tokens"], - ) - - await conv_collection.update_one( - {"_id": conversation_id, "user_sub": user_sub}, - {"$set": {"llm": llm_item.model_dump(by_alias=True)}}, - ) - return conversation_id + user.defaultLLM = llm_id + await session.commit()