From 47bdd9d5a589c232b520bd94ae85e014502cc009 Mon Sep 17 00:00:00 2001 From: jeremygan2021 Date: Wed, 5 Nov 2025 00:29:54 +0800 Subject: [PATCH 1/2] =?UTF-8?q?model=20=E6=9B=B4=E6=8D=A2?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastapi_server/server.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/fastapi_server/server.py b/fastapi_server/server.py index 394d404..aa9328c 100644 --- a/fastapi_server/server.py +++ b/fastapi_server/server.py @@ -26,6 +26,8 @@ class ChatCompletionRequest(BaseModel): max_tokens: Optional[int] = Field(default=500, description="最大生成token数") stream: Optional[bool] = Field(default=False, description="是否流式返回") thread_id: Optional[int] = Field(default=3, description="线程ID,用于多轮对话") + llm_provider: Optional[str] = Field(default="openai", description="LLM提供商") + base_url: Optional[str] = Field(default="https://dashscope.aliyuncs.com/compatible-mode/v1", description="LLM API基础URL") class ChatCompletionResponseChoice(BaseModel): index: int @@ -73,14 +75,6 @@ app.add_middleware( allow_headers=["*"], ) -# 初始化Pipeline -pipeline_config = PipelineConfig() -pipeline_config.llm_name = "qwen-flash" -pipeline_config.llm_provider = "openai" -pipeline_config.base_url = "https://dashscope.aliyuncs.com/compatible-mode/v1" - -pipeline = Pipeline(pipeline_config) - @app.post("/v1/chat/completions", response_model=ChatCompletionResponse) async def chat_completions( request: ChatCompletionRequest, @@ -103,6 +97,15 @@ async def chat_completions( if not user_message: raise HTTPException(status_code=400, detail="缺少用户消息") + # 动态创建PipelineConfig + pipeline_config = PipelineConfig() + pipeline_config.llm_name = request.model + pipeline_config.llm_provider = request.llm_provider + pipeline_config.base_url = request.base_url + + # 创建新的Pipeline实例 + pipeline = Pipeline(pipeline_config) + # 调用pipeline的chat方法 response_content = pipeline.chat( inp=user_message, From 4e83426d16460e122187eb88181abfc49c0a07ef Mon Sep 17 00:00:00 2001 From: jeremygan2021 Date: Wed, 5 Nov 2025 00:37:09 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E5=B0=81=E8=A3=85fastAPI=20openAI=E6=8E=A5?= =?UTF-8?q?=E5=8F=A3=E8=A7=84=E8=8C=83?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- fastapi_server/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fastapi_server/server.py b/fastapi_server/server.py index aa9328c..834d88a 100644 --- a/fastapi_server/server.py +++ b/fastapi_server/server.py @@ -91,7 +91,7 @@ async def chat_completions( for message in request.messages: if message.role == "user": user_message = message.content - elif message.role == "system": + elif message.role == "system" or message.role == "assistant": system_message = message.content if not user_message: