remove pipeline_id and set_id since using LLMNodeConfig

This commit is contained in:
2026-02-12 15:05:26 +08:00
parent 9363bd3442
commit 26ca06d50d
6 changed files with 2 additions and 47 deletions

View File

@@ -3,7 +3,7 @@ from typing import Type, Callable, List
import tyro import tyro
import random import random
from lang_agent.config import LLMNodeConfig from lang_agent.config import LLMKeyConfig
from lang_agent.pipeline import Pipeline, PipelineConfig from lang_agent.pipeline import Pipeline, PipelineConfig
from langchain.chat_models import init_chat_model from langchain.chat_models import init_chat_model
@@ -11,7 +11,7 @@ from langchain_core.messages import BaseMessage, ToolMessage
@tyro.conf.configure(tyro.conf.SuppressFixed) @tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass @dataclass
class ValidatorConfig(LLMNodeConfig): class ValidatorConfig(LLMKeyConfig):
_target: Type = field(default_factory=lambda:Validator) _target: Type = field(default_factory=lambda:Validator)

View File

@@ -51,12 +51,6 @@ TOOL_SYS_PROMPT = """根据用户的心情使用self_led_control改变灯的颜
class DualConfig(LLMNodeConfig): class DualConfig(LLMNodeConfig):
_target: Type = field(default_factory=lambda:Dual) _target: Type = field(default_factory=lambda:Dual)
pipeline_id: Optional[str] = None
"""If set, load prompts from database (with hardcoded fallback)"""
prompt_set_id: Optional[str] = None
"""If set, load from this specific prompt set instead of the active one"""
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
from langchain.tools import tool from langchain.tools import tool

View File

@@ -26,12 +26,6 @@ class ReactGraphConfig(LLMNodeConfig):
sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "blueberry.txt") sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "blueberry.txt")
"""path to system prompt""" """path to system prompt"""
pipeline_id: Optional[str] = None
"""If set, load prompts from database (with file fallback)"""
prompt_set_id: Optional[str] = None
"""If set, load from this specific prompt set instead of the active one"""
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
def __post_init__(self): def __post_init__(self):

View File

@@ -30,24 +30,9 @@ from langgraph.checkpoint.memory import MemorySaver
class RoutingConfig(LLMNodeConfig): class RoutingConfig(LLMNodeConfig):
_target: Type = field(default_factory=lambda: RoutingGraph) _target: Type = field(default_factory=lambda: RoutingGraph)
llm_name: str = "qwen-plus"
"""name of llm"""
llm_provider:str = "openai"
"""provider of the llm"""
base_url:str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
"""base url; could be used to overwrite the baseurl in llm provider"""
sys_promp_dir: str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "route_sys_prompts") sys_promp_dir: str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "route_sys_prompts")
"""path to directory or json contantaining system prompt for graphs; Will overwrite systemprompt from xiaozhi if 'chat_prompt' is provided""" """path to directory or json contantaining system prompt for graphs; Will overwrite systemprompt from xiaozhi if 'chat_prompt' is provided"""
pipeline_id: Optional[str] = None
"""If set, load prompts from database (with file fallback)"""
prompt_set_id: Optional[str] = None
"""If set, load from this specific prompt set instead of the active one"""
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig) tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
tool_node_config: AnnotatedToolNode = field(default_factory=ToolNodeConfig) tool_node_config: AnnotatedToolNode = field(default_factory=ToolNodeConfig)

View File

@@ -99,18 +99,6 @@ class VisionRoutingConfig(LLMNodeConfig):
vision_llm_name: str = "qwen-vl-max" vision_llm_name: str = "qwen-vl-max"
"""LLM for vision/image analysis""" """LLM for vision/image analysis"""
llm_provider: str = "openai"
"""provider of the llm"""
base_url: str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
"""base url for API"""
pipeline_id: Optional[str] = None
"""If set, load prompts from database (with hardcoded fallback)"""
prompt_set_id: Optional[str] = None
"""If set, load from this specific prompt set instead of the active one"""
tool_manager_config: ToolManagerConfig = field(default_factory=ClientToolManagerConfig) tool_manager_config: ToolManagerConfig = field(default_factory=ClientToolManagerConfig)

View File

@@ -64,12 +64,6 @@ class PipelineConfig(LLMNodeConfig):
port:int = 8588 port:int = 8588
"""what is my port""" """what is my port"""
pipeline_id: str = None
"""If set, load prompts from database (with file fallback)"""
prompt_set_id: str = None
"""If set, load from this specific prompt set instead of the active one"""
# graph_config: AnnotatedGraph = field(default_factory=ReactGraphConfig) # graph_config: AnnotatedGraph = field(default_factory=ReactGraphConfig)
graph_config: AnnotatedGraph = field(default_factory=RoutingConfig) graph_config: AnnotatedGraph = field(default_factory=RoutingConfig)