From 26ca06d50d82eef3df0375ee5b9bb4910552d6db Mon Sep 17 00:00:00 2001 From: goulustis Date: Thu, 12 Feb 2026 15:05:26 +0800 Subject: [PATCH] remove pipeline_id and set_id since using LLMNodeConfig --- lang_agent/eval/validator.py | 4 ++-- lang_agent/graphs/dual_path.py | 6 ------ lang_agent/graphs/react.py | 6 ------ lang_agent/graphs/routing.py | 15 --------------- lang_agent/graphs/vision_routing.py | 12 ------------ lang_agent/pipeline.py | 6 ------ 6 files changed, 2 insertions(+), 47 deletions(-) diff --git a/lang_agent/eval/validator.py b/lang_agent/eval/validator.py index 92855c7..d120f54 100644 --- a/lang_agent/eval/validator.py +++ b/lang_agent/eval/validator.py @@ -3,7 +3,7 @@ from typing import Type, Callable, List import tyro import random -from lang_agent.config import LLMNodeConfig +from lang_agent.config import LLMKeyConfig from lang_agent.pipeline import Pipeline, PipelineConfig from langchain.chat_models import init_chat_model @@ -11,7 +11,7 @@ from langchain_core.messages import BaseMessage, ToolMessage @tyro.conf.configure(tyro.conf.SuppressFixed) @dataclass -class ValidatorConfig(LLMNodeConfig): +class ValidatorConfig(LLMKeyConfig): _target: Type = field(default_factory=lambda:Validator) diff --git a/lang_agent/graphs/dual_path.py b/lang_agent/graphs/dual_path.py index 673f4b5..464eb1e 100644 --- a/lang_agent/graphs/dual_path.py +++ b/lang_agent/graphs/dual_path.py @@ -51,12 +51,6 @@ TOOL_SYS_PROMPT = """根据用户的心情使用self_led_control改变灯的颜 class DualConfig(LLMNodeConfig): _target: Type = field(default_factory=lambda:Dual) - pipeline_id: Optional[str] = None - """If set, load prompts from database (with hardcoded fallback)""" - - prompt_set_id: Optional[str] = None - """If set, load from this specific prompt set instead of the active one""" - tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) from langchain.tools import tool diff --git a/lang_agent/graphs/react.py b/lang_agent/graphs/react.py index 9cc5ed8..b74de55 100644 --- a/lang_agent/graphs/react.py +++ b/lang_agent/graphs/react.py @@ -26,12 +26,6 @@ class ReactGraphConfig(LLMNodeConfig): sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "blueberry.txt") """path to system prompt""" - pipeline_id: Optional[str] = None - """If set, load prompts from database (with file fallback)""" - - prompt_set_id: Optional[str] = None - """If set, load from this specific prompt set instead of the active one""" - tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) def __post_init__(self): diff --git a/lang_agent/graphs/routing.py b/lang_agent/graphs/routing.py index a2d8415..2eebd1c 100644 --- a/lang_agent/graphs/routing.py +++ b/lang_agent/graphs/routing.py @@ -30,24 +30,9 @@ from langgraph.checkpoint.memory import MemorySaver class RoutingConfig(LLMNodeConfig): _target: Type = field(default_factory=lambda: RoutingGraph) - llm_name: str = "qwen-plus" - """name of llm""" - - llm_provider:str = "openai" - """provider of the llm""" - - base_url:str = "https://dashscope.aliyuncs.com/compatible-mode/v1" - """base url; could be used to overwrite the baseurl in llm provider""" - sys_promp_dir: str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "route_sys_prompts") """path to directory or json contantaining system prompt for graphs; Will overwrite systemprompt from xiaozhi if 'chat_prompt' is provided""" - pipeline_id: Optional[str] = None - """If set, load prompts from database (with file fallback)""" - - prompt_set_id: Optional[str] = None - """If set, load from this specific prompt set instead of the active one""" - tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) tool_node_config: AnnotatedToolNode = field(default_factory=ToolNodeConfig) diff --git a/lang_agent/graphs/vision_routing.py b/lang_agent/graphs/vision_routing.py index 6f02a1e..5e8700b 100644 --- a/lang_agent/graphs/vision_routing.py +++ b/lang_agent/graphs/vision_routing.py @@ -99,18 +99,6 @@ class VisionRoutingConfig(LLMNodeConfig): vision_llm_name: str = "qwen-vl-max" """LLM for vision/image analysis""" - llm_provider: str = "openai" - """provider of the llm""" - - base_url: str = "https://dashscope.aliyuncs.com/compatible-mode/v1" - """base url for API""" - - pipeline_id: Optional[str] = None - """If set, load prompts from database (with hardcoded fallback)""" - - prompt_set_id: Optional[str] = None - """If set, load from this specific prompt set instead of the active one""" - tool_manager_config: ToolManagerConfig = field(default_factory=ClientToolManagerConfig) diff --git a/lang_agent/pipeline.py b/lang_agent/pipeline.py index 9783cf3..a053cac 100644 --- a/lang_agent/pipeline.py +++ b/lang_agent/pipeline.py @@ -64,12 +64,6 @@ class PipelineConfig(LLMNodeConfig): port:int = 8588 """what is my port""" - pipeline_id: str = None - """If set, load prompts from database (with file fallback)""" - - prompt_set_id: str = None - """If set, load from this specific prompt set instead of the active one""" - # graph_config: AnnotatedGraph = field(default_factory=ReactGraphConfig) graph_config: AnnotatedGraph = field(default_factory=RoutingConfig)