Compare commits

...

24 Commits

Author SHA1 Message Date
737a80aa39 llm name fix 2026-03-18 20:44:28 +08:00
bdd4ddec9e show tool message in chat 2026-03-14 11:20:39 +08:00
2ee55d25cc bug fix: skill_dir show up correctly in frontend 2026-03-13 16:09:26 +08:00
bf9ce709e2 bug fix; error when saving config for fs_backend 2026-03-13 16:01:45 +08:00
59331d2435 api bug fix 2026-03-13 14:52:37 +08:00
a9488a655a bug fix 2026-03-13 14:17:28 +08:00
eeadd4c825 update nginx.conf 2026-03-13 14:03:56 +08:00
42d8b8e8e1 bug fixes 2026-03-13 13:57:00 +08:00
da6cc1e18b deepagent backend configurable 2026-03-13 13:56:24 +08:00
691ef1c77d not edit my computer 2026-03-13 13:44:58 +08:00
9f9813f01d add deep agent image 2026-03-13 11:17:31 +08:00
f9f3651c7e fix docker 2026-03-12 20:37:17 +08:00
87407e1656 add chinese mirror 2026-03-12 20:33:11 +08:00
2c226e2df2 fix? 2026-03-12 17:20:32 +08:00
8b2a506177 fixes? 2026-03-12 17:18:23 +08:00
a26cda2f04 add dockerfile.frontend 2026-03-12 16:51:48 +08:00
5e2a86e3be update install 2026-03-12 16:50:26 +08:00
2c7d5ea589 frontend in docker 2026-03-12 16:40:51 +08:00
36e3b40936 build frontend 2026-03-12 16:36:35 +08:00
86f6e4f81c change port num 2026-03-12 16:11:23 +08:00
60f3029e54 update sht 2026-03-12 11:37:08 +08:00
fe7ff9a516 fix tool initialization failure 2026-03-12 11:36:37 +08:00
33faedc1b1 enable comma in tool list 2026-03-12 11:36:11 +08:00
c9b1c5cb32 support both docker and local dev 2026-03-11 22:31:37 +08:00
25 changed files with 626 additions and 108 deletions

View File

@@ -172,7 +172,7 @@ uvicorn fastapi_server.server_dashscope:app --reload --host 0.0.0.0 --port 8588
uvicorn fastapi_server.combined:app --reload --host 0.0.0.0 --port 8500
```
You can change the URL by setting `VITE_FRONT_API_BASE_URL` in `frontend/.env` (defaults to `http://127.0.0.1:8500`).
You can change the URL by setting `VITE_FRONT_API_BASE_URL` in `frontend/.env` (defaults to `/`, i.e. same-origin).
### Start the development server

View File

@@ -0,0 +1,19 @@
FROM node:20-alpine
WORKDIR /app
RUN npm config set registry https://registry.npmmirror.com
# Build-time API base for Vite (must be set before npm run build).
ARG VITE_FRONT_API_BASE_URL=/
ENV VITE_FRONT_API_BASE_URL=${VITE_FRONT_API_BASE_URL}
COPY package*.json ./
RUN npm install
COPY . .
RUN npm run build && \
mkdir -p /opt/frontend_dist && \
cp -r dist/. /opt/frontend_dist/
CMD ["sh", "-c", "rm -rf /app/dist/* && cp -r /opt/frontend_dist/. /app/dist && ls /app/dist"]

View File

@@ -17,15 +17,23 @@ FROM python:3.12-slim
WORKDIR /app
# Install system dependencies
RUN apt-get update && apt-get install -y \
postgresql-client \
curl \
&& rm -rf /var/lib/apt/lists/*
RUN set -eux; \
for source_file in /etc/apt/sources.list /etc/apt/sources.list.d/*.list /etc/apt/sources.list.d/*.sources; do \
if [ -f "$source_file" ]; then \
sed -i 's|deb.debian.org|mirrors.aliyun.com|g' "$source_file"; \
sed -i 's|security.debian.org|mirrors.aliyun.com|g' "$source_file"; \
fi; \
done; \
apt-get update; \
apt-get install -y --no-install-recommends \
postgresql-client \
curl; \
rm -rf /var/lib/apt/lists/*
# Copy Python dependencies
COPY pyproject.toml ./
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -e .
RUN pip install --no-cache-dir --upgrade pip -i https://pypi.tuna.tsinghua.edu.cn/simple && \
pip install --no-cache-dir -e . -i https://pypi.tuna.tsinghua.edu.cn/simple
# Copy application code
COPY lang_agent/ ./lang_agent/

View File

@@ -19,13 +19,13 @@ services:
- postgres_data:/var/lib/postgresql/data
- ../scripts/init_database:/docker-entrypoint-initdb.d
ports:
- "${POSTGRES_PORT:-5432}:5432"
- "${POSTGRES_PORT:-5434}:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
restart: no #unless-stopped
# Backend API server
backend:
@@ -52,7 +52,7 @@ services:
depends_on:
postgres:
condition: service_healthy
restart: unless-stopped
restart: no #unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8500/health"]
interval: 30s
@@ -60,6 +60,18 @@ services:
retries: 3
start_period: 40s
# Frontend build service
frontend:
build:
context: ../frontend
dockerfile: ../docker/Dockerfile.frontend
args:
VITE_FRONT_API_BASE_URL: ${VITE_FRONT_API_BASE_URL:-/}
volumes:
- frontend_dist:/app/dist
networks:
- app-network
# Nginx for serving frontend (optional - can also serve via FastAPI)
nginx:
image: nginx:alpine
@@ -67,16 +79,20 @@ services:
networks:
- app-network
ports:
- "${FRONTEND_PORT:-80}:80"
- "${FRONTEND_PORT:-8080}:80"
volumes:
- ../nginx.conf:/etc/nginx/nginx.conf:ro
- ../frontend/dist:/usr/share/nginx/html:ro
- frontend_dist:/usr/share/nginx/html:ro
depends_on:
- backend
restart: unless-stopped
frontend:
condition: service_completed_successfully
backend:
condition: service_started
restart: no #unless-stopped
volumes:
postgres_data:
frontend_dist:
networks:
app-network:

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

View File

@@ -36,20 +36,29 @@ type EditableAgent = {
pipelineId: string;
promptSetId?: string;
toolKeys: string[];
toolKeysInput: string;
prompts: Record<string, string>;
apiKey: string;
llmName: string;
actBackend: DeepAgentActBackend;
fileBackendConfig: FileBackendConfig;
};
type AgentChatMessage = {
id: string;
role: "user" | "assistant";
role: "user" | "assistant" | "tool";
content: string;
};
type ActiveTab = "agents" | "discussions" | "mcp";
type DeepAgentActBackend = "state_bk" | "local_shell" | "daytona_sandbox";
type FileBackendConfig = {
skills_dir: string;
rt_skills_dir: string;
workspace_dir?: string;
api_key?: string;
};
type McpTransport = "streamable_http" | "sse" | "stdio";
type McpEntry = {
id: string;
@@ -73,7 +82,24 @@ const DEEPAGENT_BACKEND_OPTIONS: Array<{
{ value: "local_shell", label: "local_shell" },
{ value: "daytona_sandbox", label: "daytona_sandbox" },
];
const LOCAL_DASHSCOPE_BASE = "http://127.0.0.1:8500/v1/apps";
const DEFAULT_FILE_BACKEND_CONFIG: Record<DeepAgentActBackend, FileBackendConfig> = {
state_bk: {
skills_dir: "./assets/skills",
rt_skills_dir: "/skills",
},
local_shell: {
skills_dir: "./workspace/skills",
rt_skills_dir: "/skills",
workspace_dir: "./workspace",
},
daytona_sandbox: {
skills_dir: "./workspace/skills",
rt_skills_dir: "",
api_key: "",
},
};
const LOCAL_DASHSCOPE_BASE = "/v1/apps";
const MCP_TRANSPORT_OPTIONS: McpTransport[] = ["streamable_http", "sse", "stdio"];
const GRAPH_ARCH_IMAGE_MODULES = import.meta.glob(
"../assets/images/graph_arch/*.{png,jpg,jpeg,webp,gif}",
@@ -426,6 +452,28 @@ function createConversationId(): string {
return `conv-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
}
function mapConversationMessageToAgentChatMessage(
message: ConversationMessageItem
): AgentChatMessage | null {
const type = (message.message_type || "").toLowerCase();
let role: AgentChatMessage["role"] | null = null;
if (type === "human" || type === "user") {
role = "user";
} else if (type === "ai" || type === "assistant") {
role = "assistant";
} else if (type === "tool") {
role = "tool";
}
if (!role) {
return null;
}
return {
id: `${message.sequence_number}-${message.created_at}-${role}`,
role,
content: message.content || "",
};
}
function normalizeDeepAgentActBackend(value: unknown): DeepAgentActBackend {
if (value === "local_shell" || value === "localshell") {
return "local_shell";
@@ -439,9 +487,68 @@ function normalizeDeepAgentActBackend(value: unknown): DeepAgentActBackend {
return DEFAULT_DEEPAGENT_ACT_BACKEND;
}
function getDefaultFileBackendConfig(
backend: DeepAgentActBackend
): FileBackendConfig {
return { ...DEFAULT_FILE_BACKEND_CONFIG[backend] };
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === "object" && value !== null && !Array.isArray(value);
}
function readOptionalString(
record: Record<string, unknown>,
key: keyof FileBackendConfig
): string | undefined {
const value = record[key];
return typeof value === "string" ? value : undefined;
}
function getDeepAgentEditorState(config: GraphConfigReadResponse): {
actBackend: DeepAgentActBackend;
fileBackendConfig: FileBackendConfig;
} {
const graphParams = isRecord(config.graph_params) ? config.graph_params : {};
const actBackend = normalizeDeepAgentActBackend(graphParams.act_bkend);
const defaults = getDefaultFileBackendConfig(actBackend);
const rawFileBackendConfig = isRecord(graphParams.file_backend_config)
? graphParams.file_backend_config
: null;
if (!rawFileBackendConfig) {
return {
actBackend,
fileBackendConfig: defaults,
};
}
return {
actBackend,
fileBackendConfig: {
...defaults,
...(readOptionalString(rawFileBackendConfig, "skills_dir") !== undefined
? { skills_dir: readOptionalString(rawFileBackendConfig, "skills_dir") as string }
: {}),
...(readOptionalString(rawFileBackendConfig, "rt_skills_dir") !== undefined
? { rt_skills_dir: readOptionalString(rawFileBackendConfig, "rt_skills_dir") as string }
: {}),
...(readOptionalString(rawFileBackendConfig, "workspace_dir") !== undefined
? { workspace_dir: readOptionalString(rawFileBackendConfig, "workspace_dir") as string }
: {}),
...(readOptionalString(rawFileBackendConfig, "api_key") !== undefined
? { api_key: readOptionalString(rawFileBackendConfig, "api_key") as string }
: {}),
},
};
}
function buildGraphParams(editor: EditableAgent): Record<string, unknown> {
if (editor.graphId === "deepagent") {
return { act_bkend: editor.actBackend };
return {
act_bkend: editor.actBackend,
file_backend_config: editor.fileBackendConfig,
};
}
return {};
}
@@ -450,6 +557,7 @@ function toEditable(
config: GraphConfigReadResponse,
draft: boolean
): EditableAgent {
const deepAgentState = getDeepAgentEditorState(config);
return {
id: draft
? `draft-${Date.now()}-${Math.random().toString(36).slice(2, 6)}`
@@ -459,10 +567,12 @@ function toEditable(
pipelineId: config.pipeline_id,
promptSetId: config.prompt_set_id,
toolKeys: config.tool_keys || [],
toolKeysInput: (config.tool_keys || []).join(", "),
prompts: config.prompt_dict || {},
apiKey: config.api_key || DEFAULT_API_KEY,
llmName: DEFAULT_LLM_NAME,
actBackend: DEFAULT_DEEPAGENT_ACT_BACKEND,
actBackend: deepAgentState.actBackend,
fileBackendConfig: deepAgentState.fileBackendConfig,
};
}
@@ -750,10 +860,15 @@ export default function App() {
graphId,
prompts: { ...defaults.prompt_dict },
toolKeys: defaults.tool_keys || [],
toolKeysInput: (defaults.tool_keys || []).join(", "),
actBackend:
graphId === "deepagent"
? prev.actBackend || DEFAULT_DEEPAGENT_ACT_BACKEND
: DEFAULT_DEEPAGENT_ACT_BACKEND,
fileBackendConfig:
graphId === "deepagent"
? prev.fileBackendConfig || getDefaultFileBackendConfig(DEFAULT_DEEPAGENT_ACT_BACKEND)
: getDefaultFileBackendConfig(DEFAULT_DEEPAGENT_ACT_BACKEND),
};
if (next.isDraft) {
setDraftAgents((drafts) => drafts.map((draft) => (draft.id === next.id ? next : draft)));
@@ -787,6 +902,24 @@ export default function App() {
setEditorAndSyncDraft((prev) => ({ ...prev, [key]: value }));
}
function updateActBackend(newBackend: DeepAgentActBackend): void {
setEditorAndSyncDraft((prev) => ({
...prev,
actBackend: newBackend,
fileBackendConfig: getDefaultFileBackendConfig(newBackend),
}));
}
function updateFileBackendConfig(key: keyof FileBackendConfig, value: string): void {
setEditorAndSyncDraft((prev) => ({
...prev,
fileBackendConfig: {
...prev.fileBackendConfig,
[key]: value,
},
}));
}
function updatePrompt(key: string, value: string): void {
setEditorAndSyncDraft((prev) => ({
...prev,
@@ -1116,6 +1249,7 @@ export default function App() {
async function sendAgentChatMessage(): Promise<void> {
const pipelineId = (chatPipelineId || "").trim();
const conversationId = chatConversationId;
const message = chatInput.trim();
if (!pipelineId || !message || chatSending) {
return;
@@ -1145,7 +1279,7 @@ export default function App() {
try {
await streamAgentChatResponse({
appId: pipelineId,
sessionId: chatConversationId,
sessionId: conversationId,
apiKey: authKey,
message,
signal: controller.signal,
@@ -1162,6 +1296,34 @@ export default function App() {
);
},
});
// Some runtimes namespace thread ids as "<pipeline_id>:<session_id>" when persisting.
// Try both IDs and fail soft so a successful streamed reply never turns into a UI error.
const candidateConversationIds = [
conversationId,
`${pipelineId}:${conversationId}`,
];
let reloaded = false;
for (const candidateId of candidateConversationIds) {
try {
const storedMessages = await getPipelineConversationMessages(
pipelineId,
candidateId
);
const normalizedMessages = storedMessages
.map(mapConversationMessageToAgentChatMessage)
.filter((item): item is AgentChatMessage => item !== null);
if (normalizedMessages.length > 0) {
setChatMessages(normalizedMessages);
reloaded = true;
break;
}
} catch {
// Ignore refresh failures; keep streamed content visible.
}
}
if (!reloaded) {
// Keep existing streamed messages without surfacing a false error state.
}
} catch (error) {
if ((error as Error).message === "Request cancelled") {
setChatMessages((prev) =>
@@ -1366,8 +1528,12 @@ export default function App() {
<label>
tool_keys (comma separated)
<input
value={editor.toolKeys.join(", ")}
onChange={(e) => updateEditor("toolKeys", parseToolCsv(e.target.value))}
value={editor.toolKeysInput}
onChange={(e) => {
const raw = e.target.value;
updateEditor("toolKeysInput", raw);
updateEditor("toolKeys", parseToolCsv(raw));
}}
placeholder="tool_a, tool_b"
disabled={busy}
/>
@@ -1397,25 +1563,69 @@ export default function App() {
</label>
{editor.graphId === "deepagent" ? (
<label>
act_bkend
<select
value={editor.actBackend}
onChange={(e) =>
updateEditor(
"actBackend",
normalizeDeepAgentActBackend(e.target.value)
)
}
disabled={busy}
>
{DEEPAGENT_BACKEND_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</select>
</label>
<>
<label>
act_bkend
<select
value={editor.actBackend}
onChange={(e) =>
updateActBackend(normalizeDeepAgentActBackend(e.target.value))
}
disabled={busy}
>
{DEEPAGENT_BACKEND_OPTIONS.map((option) => (
<option key={option.value} value={option.value}>
{option.label}
</option>
))}
</select>
</label>
<div className="file-backend-config">
<h3>File Backend Config</h3>
{editor.actBackend === "daytona_sandbox" ? (
<label>
api_key
<input
type="password"
value={editor.fileBackendConfig.api_key || ""}
onChange={(e) => updateFileBackendConfig("api_key", e.target.value)}
placeholder="Daytona API key"
disabled={busy}
/>
</label>
) : null}
{editor.actBackend === "local_shell" ? (
<label>
workspace_dir
<input
value={editor.fileBackendConfig.workspace_dir || ""}
onChange={(e) => updateFileBackendConfig("workspace_dir", e.target.value)}
placeholder="./workspace"
disabled={busy}
/>
</label>
) : null}
<label>
skills_dir
<input
value={editor.fileBackendConfig.skills_dir || ""}
onChange={(e) => updateFileBackendConfig("skills_dir", e.target.value)}
placeholder="./assets/skills"
disabled={busy}
/>
</label>
<label>
rt_skills_dir
<input
value={editor.fileBackendConfig.rt_skills_dir || ""}
onChange={(e) => updateFileBackendConfig("rt_skills_dir", e.target.value)}
placeholder="/skills"
disabled={busy}
/>
</label>
</div>
</>
) : null}
<div className="prompt-section">
@@ -1748,9 +1958,15 @@ export default function App() {
chatMessages.map((message) => (
<article
key={message.id}
className={`chat-modal-message ${message.role === "assistant" ? "assistant" : "user"}`}
className={`chat-modal-message ${message.role}`}
>
<strong>{message.role === "assistant" ? "Agent" : "You"}</strong>
<strong>
{message.role === "assistant"
? "Agent"
: message.role === "tool"
? "Tool"
: "You"}
</strong>
<div className="chat-message-content">
<ReactMarkdown remarkPlugins={[remarkGfm]}>
{message.content || (chatSending && message.role === "assistant" ? "..." : "")}

View File

@@ -0,0 +1,22 @@
import { describe, expect, it } from "vitest";
import { joinApiUrl } from "./frontApis";
describe("joinApiUrl", () => {
it("keeps same-origin paths when base url is slash", () => {
expect(joinApiUrl("/", "/v1/pipelines")).toBe("/v1/pipelines");
});
it("joins absolute host and trims trailing slash", () => {
expect(joinApiUrl("http://127.0.0.1:8500/", "/v1/pipelines")).toBe(
"http://127.0.0.1:8500/v1/pipelines"
);
});
it("accepts path without leading slash", () => {
expect(joinApiUrl("http://127.0.0.1:8500", "v1/pipelines")).toBe(
"http://127.0.0.1:8500/v1/pipelines"
);
});
});

View File

@@ -19,8 +19,19 @@ import type {
RuntimeAuthInfoResponse,
} from "../types";
const API_BASE_URL =
import.meta.env.VITE_FRONT_API_BASE_URL?.trim() || "http://127.0.0.1:8500";
const API_BASE_URL = import.meta.env.VITE_FRONT_API_BASE_URL?.trim() || "/";
export function joinApiUrl(baseUrl: string, path: string): string {
const normalizedPath = path.startsWith("/") ? path : `/${path}`;
const normalizedBase = baseUrl.trim();
// "/" is commonly used in Docker+nginx builds and should resolve as same-origin.
if (!normalizedBase || normalizedBase === "/") {
return normalizedPath;
}
return `${normalizedBase.replace(/\/+$/, "")}${normalizedPath}`;
}
// Log which backend the frontend is targeting on startup, with file + line hint.
// This runs once when the module is loaded.
@@ -30,7 +41,8 @@ console.info(
);
async function fetchJson<T>(path: string, init?: RequestInit): Promise<T> {
const response = await fetch(`${API_BASE_URL}${path}`, {
const url = joinApiUrl(API_BASE_URL, path);
const response = await fetch(url, {
headers: {
"Content-Type": "application/json",
...(init?.headers || {}),
@@ -49,7 +61,24 @@ async function fetchJson<T>(path: string, init?: RequestInit): Promise<T> {
}
throw new Error(message);
}
return (await response.json()) as T;
if (response.status === 204) {
return undefined as T;
}
const bodyText = await response.text();
if (!bodyText.trim()) {
return undefined as T;
}
try {
return JSON.parse(bodyText) as T;
} catch {
const preview = bodyText.slice(0, 160).replace(/\s+/g, " ").trim();
throw new Error(
`Expected JSON response from ${url}, but received non-JSON content: ${preview || "<empty>"}`
);
}
}
export function listAvailableGraphs(): Promise<AvailableGraphsResponse> {
@@ -189,7 +218,10 @@ export async function streamAgentChatResponse(
): Promise<string> {
const { appId, sessionId, apiKey, message, onText, signal } = options;
const response = await fetch(
`${API_BASE_URL}/v1/apps/${encodeURIComponent(appId)}/sessions/${encodeURIComponent(sessionId)}/responses`,
joinApiUrl(
API_BASE_URL,
`/v1/apps/${encodeURIComponent(appId)}/sessions/${encodeURIComponent(sessionId)}/responses`
),
{
method: "POST",
headers: {

View File

@@ -224,6 +224,7 @@ button:disabled {
}
.prompt-section,
.file-backend-config,
.run-info {
border: 1px solid #dbe2ea;
border-radius: 10px;
@@ -232,6 +233,7 @@ button:disabled {
}
.prompt-section h3,
.file-backend-config h3,
.run-info h3 {
margin-top: 0;
}
@@ -609,6 +611,10 @@ button:disabled {
border-left: 3px solid #26a269;
}
.chat-modal-message.tool {
border-left: 3px solid #8e6bd8;
}
.chat-modal-message p {
margin: 6px 0 0 0;
white-space: pre-wrap;

View File

@@ -23,6 +23,7 @@ export type GraphConfigReadResponse = {
tool_keys: string[];
prompt_dict: Record<string, string>;
api_key: string;
graph_params?: Record<string, unknown>;
};
export type GraphConfigUpsertRequest = {

File diff suppressed because one or more lines are too long

View File

@@ -1 +1 @@
{"root":["./src/App.tsx","./src/activeConfigSelection.test.ts","./src/activeConfigSelection.ts","./src/main.tsx","./src/types.ts","./src/vite-env.d.ts","./src/api/frontApis.ts"],"version":"5.9.3"}
{"root":["./src/App.tsx","./src/activeConfigSelection.test.ts","./src/activeConfigSelection.ts","./src/main.tsx","./src/types.ts","./src/vite-env.d.ts","./src/api/frontApis.test.ts","./src/api/frontApis.ts"],"version":"5.9.3"}

View File

@@ -4,5 +4,15 @@ export default defineConfig({
plugins: [react()],
server: {
port: 5173,
proxy: {
"/v1": {
target: "http://127.0.0.1:8500",
changeOrigin: true,
},
"/apps": {
target: "http://127.0.0.1:8500",
changeOrigin: true,
},
},
},
});

View File

@@ -5,6 +5,16 @@ export default defineConfig({
plugins: [react()],
server: {
port: 5173,
proxy: {
"/v1": {
target: "http://127.0.0.1:8500",
changeOrigin: true,
},
"/apps": {
target: "http://127.0.0.1:8500",
changeOrigin: true,
},
},
},
});

View File

@@ -4,6 +4,7 @@ from lang_agent.config.core_config import (
LLMKeyConfig,
LLMNodeConfig,
load_tyro_conf,
resolve_llm_api_key,
)
from lang_agent.config.constants import (

View File

@@ -10,6 +10,20 @@ from dotenv import load_dotenv
load_dotenv()
def resolve_llm_api_key(api_key: Optional[str]) -> Optional[str]:
"""Resolve the API key for OpenAI-compatible providers."""
if api_key not in (None, "", "wrong-key"):
resolved_key = api_key
else:
resolved_key = os.environ.get("ALI_API_KEY") or os.environ.get("OPENAI_API_KEY")
# Some OpenAI-compatible integrations still read OPENAI_API_KEY from env.
if resolved_key and not os.environ.get("OPENAI_API_KEY"):
os.environ["OPENAI_API_KEY"] = resolved_key
return resolved_key
## NOTE: base classes taken from nerfstudio
class PrintableConfig:
"""
@@ -99,12 +113,12 @@ class LLMKeyConfig(InstantiateConfig):
"""api key for llm"""
def __post_init__(self):
if self.api_key == "wrong-key" or self.api_key is None:
self.api_key = os.environ.get("ALI_API_KEY")
if self.api_key is None:
logger.error(f"no ALI_API_KEY provided for embedding")
else:
logger.info("ALI_API_KEY loaded from environ")
original_api_key = self.api_key
self.api_key = resolve_llm_api_key(self.api_key)
if self.api_key is None:
logger.error("no ALI_API_KEY or OPENAI_API_KEY provided for embedding")
elif original_api_key in (None, "", "wrong-key"):
logger.info("LLM API key loaded from environment")
@dataclass

View File

@@ -20,6 +20,7 @@ from lang_agent.config.constants import (
MCP_CONFIG_DEFAULT_CONTENT,
PIPELINE_REGISTRY_PATH,
)
from lang_agent.config.core_config import load_tyro_conf
from lang_agent.front_api.build_server_utils import (
GRAPH_BUILD_FNCS,
update_pipeline_registry,
@@ -55,6 +56,7 @@ class GraphConfigReadResponse(BaseModel):
tool_keys: List[str]
prompt_dict: Dict[str, str]
api_key: str = Field(default="")
graph_params: Dict[str, Any] = Field(default_factory=dict)
class GraphConfigListItem(BaseModel):
@@ -325,6 +327,81 @@ def _normalize_pipeline_spec(pipeline_id: str, spec: Dict[str, Any]) -> Pipeline
)
def _resolve_config_path(config_file: str) -> str:
if osp.isabs(config_file):
return config_file
return osp.join(_PROJECT_ROOT, config_file)
def _normalize_deepagent_backend_name(file_backend_config: Any) -> Optional[str]:
if file_backend_config is None:
return None
type_names = {
type(file_backend_config).__name__.lower(),
getattr(getattr(file_backend_config, "_target", None), "__name__", "").lower(),
}
if any("statebk" in name for name in type_names):
return "state_bk"
if any("localshell" in name for name in type_names):
return "local_shell"
if any("daytona" in name for name in type_names):
return "daytona_sandbox"
return None
def _extract_graph_params_from_config(graph_id: Optional[str], loaded_cfg: Any) -> Dict[str, Any]:
if graph_id != "deepagent":
return {}
graph_config = getattr(loaded_cfg, "graph_config", None)
file_backend_config = getattr(graph_config, "file_backend_config", None)
if file_backend_config is None:
return {}
graph_params: Dict[str, Any] = {}
act_bkend = _normalize_deepagent_backend_name(file_backend_config)
if act_bkend:
graph_params["act_bkend"] = act_bkend
serialized_backend_config: Dict[str, Any] = {}
for key in ("skills_dir", "rt_skills_dir", "workspace_dir", "api_key"):
value = getattr(file_backend_config, key, None)
if value is not None:
serialized_backend_config[key] = value
if serialized_backend_config:
graph_params["file_backend_config"] = serialized_backend_config
return graph_params
def _load_graph_params_for_pipeline(
pipeline_id: str, graph_id: Optional[str]
) -> Dict[str, Any]:
try:
registry = _read_pipeline_registry()
pipeline_spec = registry.get("pipelines", {}).get(pipeline_id, {})
config_file = ""
if isinstance(pipeline_spec, dict):
config_file = str(pipeline_spec.get("config_file") or "").strip()
if not config_file:
fallback = osp.join(_PROJECT_ROOT, "configs", "pipelines", f"{pipeline_id}.yaml")
if osp.exists(fallback):
config_file = fallback
if not config_file:
return {}
config_path = _resolve_config_path(config_file)
if not osp.exists(config_path):
return {}
loaded_cfg = load_tyro_conf(config_path)
return _extract_graph_params_from_config(graph_id, loaded_cfg)
except Exception:
return {}
def _normalize_api_key_policy(api_key: str, policy: Dict[str, Any]) -> ApiKeyPolicyItem:
if not isinstance(policy, dict):
raise ValueError(f"api key policy for '{api_key}' must be an object")
@@ -428,6 +505,9 @@ async def get_default_graph_config(pipeline_id: str):
tool_keys=tool_keys,
prompt_dict=prompt_dict,
api_key=(active.get("api_key") or ""),
graph_params=_load_graph_params_for_pipeline(
pipeline_id, active.get("graph_id")
),
)
@@ -466,6 +546,9 @@ async def get_graph_config(pipeline_id: str, prompt_set_id: str):
tool_keys=tool_keys,
prompt_dict=prompt_dict,
api_key=(meta.get("api_key") or ""),
graph_params=_load_graph_params_for_pipeline(
pipeline_id, meta.get("graph_id")
),
)

View File

@@ -1,4 +1,4 @@
from typing import Any, Dict, List, Literal
from typing import Any, Dict, List, Literal, Optional
import os
import os.path as osp
import subprocess
@@ -75,23 +75,42 @@ def build_route(
**_: Any,
):
cmd_opt = [
"--pipeline.pipeline-id", pipeline_id,
"--pipeline.pipeline-id",
pipeline_id,
"--pipeline.llm-name",
llm_name,
"route", # ------------
"--llm-name", llm_name,
"--api-key", api_key,
"--pipeline-id", pipeline_id,
"--prompt-set-id", prompt_set,
"tool_node", # ------------
"--llm-name", llm_name,
"--api-key", api_key,
"--pipeline-id", pipeline_id,
"--prompt-set-id", prompt_set,
"--llm-name",
llm_name,
"--api-key",
api_key,
"--pipeline-id",
pipeline_id,
"--prompt-set-id",
prompt_set,
]
if tool_keys:
cmd_opt.extend(
["--tool-manager-config.client-tool-manager.tool-keys", *tool_keys]
)
# Tyro parses list options greedily across positional subcommands; repeat a
# parent-level option to terminate list parsing before `tool_node`.
cmd_opt.extend(["--pipeline-id", pipeline_id])
cmd_opt.extend(
[
"tool_node", # ------------
"--llm-name",
llm_name,
"--api-key",
api_key,
"--pipeline-id",
pipeline_id,
"--prompt-set-id",
prompt_set,
]
)
return _build_and_load_pipeline_config(pipeline_id, pipeline_config_dir, cmd_opt)
@@ -106,13 +125,20 @@ def build_react(
**_: Any,
):
cmd_opt = [
"--pipeline.pipeline-id", pipeline_id,
"--pipeline.pipeline-id",
pipeline_id,
"--pipeline.llm-name",
llm_name,
"react", # ------------
"--llm-name", llm_name,
"--api-key", api_key,
"--pipeline-id", pipeline_id,
"--prompt-set-id", prompt_set,
]
"--llm-name",
llm_name,
"--api-key",
api_key,
"--pipeline-id",
pipeline_id,
"--prompt-set-id",
prompt_set,
]
if tool_keys:
cmd_opt.extend(
["--tool-manager-config.client-tool-manager.tool-keys", *tool_keys]
@@ -136,6 +162,7 @@ def build_deep_agent(
"daytona_sandbox",
"daytonasandbox",
] = "state_bk",
file_backend_config: Optional[Dict[str, Any]] = None,
**_: Any,
):
backend_subcommand = _DEEP_AGENT_BACKEND_ALIASES.get(act_bkend)
@@ -146,22 +173,48 @@ def build_deep_agent(
)
cmd_opt = [
"--pipeline.pipeline-id", pipeline_id,
"--pipeline.pipeline-id",
pipeline_id,
"--pipeline.llm-name",
llm_name,
"deepagent",
"--llm-name", llm_name,
"--api-key", api_key,
"--pipeline-id", pipeline_id,
"--prompt-set-id", prompt_set,
backend_subcommand,
"--llm-name",
llm_name,
"--api-key",
api_key,
"--pipeline-id",
pipeline_id,
"--prompt-set-id",
prompt_set,
]
if tool_keys:
cmd_opt.extend(
["--tool-manager-config.client-tool-manager.tool-keys", *tool_keys]
)
cmd_opt.extend(["--pipeline-id", pipeline_id])
cmd_opt.append(backend_subcommand)
if file_backend_config:
if "skills_dir" in file_backend_config and file_backend_config["skills_dir"]:
cmd_opt.extend(["--skills-dir", file_backend_config["skills_dir"]])
if (
"rt_skills_dir" in file_backend_config
and file_backend_config["rt_skills_dir"]
):
cmd_opt.extend(["--rt-skills-dir", file_backend_config["rt_skills_dir"]])
if (
"workspace_dir" in file_backend_config
and file_backend_config["workspace_dir"]
):
cmd_opt.extend(["--workspace-dir", file_backend_config["workspace_dir"]])
if "api_key" in file_backend_config and file_backend_config["api_key"]:
cmd_opt.extend(["--api-key", file_backend_config["api_key"]])
return _build_and_load_pipeline_config(pipeline_id, pipeline_config_dir, cmd_opt)
# {pipeline_id: build_function}
GRAPH_BUILD_FNCS = {
"routing": build_route,

View File

@@ -1,8 +1,10 @@
from dataclasses import dataclass, field, is_dataclass
import os
from dataclasses import dataclass
from typing import Any
import tyro
import os.path as osp
from abc import ABC, abstractmethod
from loguru import logger
from lang_agent.config import InstantiateConfig
class BaseFilesystemBackend(ABC):
@@ -25,4 +27,25 @@ class BaseFilesystemBackend(ABC):
if hasattr(self.config, "rt_skills_dir"):
return {"skills" : [self.config.rt_skills_dir]}
else:
return {}
return {}
@dataclass
class FilesystemBackendConfig(InstantiateConfig):
"""
Shared filesystem backend config behavior.
If subclasses define these fields, this hook ensures they exist:
- skills_dir
- workspace_dir
"""
def _ensure_dir_if_present(self, attr_name: str) -> None:
path = getattr(self, attr_name, None)
if not isinstance(path, str) or not path.strip():
return
os.makedirs(path, exist_ok=True)
logger.info(f"Ensured {attr_name} exists: {path}")
def __post_init__(self) -> None:
self._ensure_dir_if_present("skills_dir")
self._ensure_dir_if_present("workspace_dir")

View File

@@ -8,13 +8,12 @@ from loguru import logger
from daytona import Daytona, DaytonaConfig, FileUpload
from langchain_daytona import DaytonaSandbox
from lang_agent.config import InstantiateConfig
from lang_agent.fs_bkends import BaseFilesystemBackend
from lang_agent.fs_bkends.base import BaseFilesystemBackend, FilesystemBackendConfig
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class DaytonaSandboxConfig(InstantiateConfig):
class DaytonaSandboxConfig(FilesystemBackendConfig):
_target: Type = field(default_factory=lambda: DaytonaSandboxBk)
api_key: Optional[str] = None
@@ -27,6 +26,7 @@ class DaytonaSandboxConfig(InstantiateConfig):
"""runtime skills path inside the sandbox (auto-set from sandbox workdir)"""
def __post_init__(self):
super().__post_init__()
if self.api_key is None:
self.api_key = os.environ.get("DAYTONA_API_KEY")
if self.api_key is None:

View File

@@ -1,21 +1,16 @@
from dataclasses import dataclass, field, is_dataclass
from typing import Type, TypedDict, Literal, Dict, List, Tuple, Optional
from dataclasses import dataclass, field
from typing import Type
import tyro
import os.path as osp
from abc import ABC, abstractmethod
import glob
from loguru import logger
from deepagents.backends.utils import create_file_data
from deepagents.backends import LocalShellBackend
from lang_agent.config import InstantiateConfig
from lang_agent.fs_bkends import BaseFilesystemBackend
from lang_agent.fs_bkends.base import BaseFilesystemBackend, FilesystemBackendConfig
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class LocalShellConfig(InstantiateConfig):
class LocalShellConfig(FilesystemBackendConfig):
_target:Type = field(default_factory=lambda:LocalShell)
workspace_dir:str = "./workspace"

View File

@@ -1,16 +1,14 @@
from dataclasses import dataclass, field, is_dataclass
from typing import Type, TypedDict, Literal, Dict, List, Tuple, Optional
from dataclasses import dataclass, field
from typing import Type
import tyro
import os.path as osp
from abc import ABC, abstractmethod
import glob
from loguru import logger
from deepagents.backends.utils import create_file_data
from deepagents.backends import StateBackend
from lang_agent.config import InstantiateConfig
from lang_agent.fs_bkends import BaseFilesystemBackend
from lang_agent.fs_bkends.base import BaseFilesystemBackend, FilesystemBackendConfig
def read_as_utf8(file_path:str):
with open(file_path, "r", encoding="utf-8") as f:
@@ -31,7 +29,7 @@ def build_skill_fs_dict(skill_dir:str, virt_path:str="/skills"):
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class StateBkConfig(InstantiateConfig):
class StateBkConfig(FilesystemBackendConfig):
_target:Type = field(default_factory=lambda:StateBk)
skills_dir:str = "./assets/skills"
@@ -40,10 +38,6 @@ class StateBkConfig(InstantiateConfig):
rt_skills_dir:str = "/skills"
"""path to directory with skills in runtime directory"""
def __post_init__(self):
err_msg = f"{self.skills_dir} does not exist"
assert osp.exists(self.skills_dir), err_msg
class StateBk(BaseFilesystemBackend):
def __init__(self, config:StateBkConfig):

View File

@@ -13,7 +13,7 @@ from langchain_core.messages import SystemMessage, HumanMessage, BaseMessage
from langchain.agents import create_agent
from langgraph.checkpoint.memory import MemorySaver
from lang_agent.config import LLMNodeConfig, load_tyro_conf
from lang_agent.config import LLMNodeConfig, load_tyro_conf, resolve_llm_api_key
from lang_agent.graphs import AnnotatedGraph, ReactGraphConfig, RoutingConfig
from lang_agent.base import GraphBase
from lang_agent.components import conv_store
@@ -104,7 +104,13 @@ class Pipeline:
if self.config.base_url is not None
else self.config.graph_config.base_url
)
self.config.graph_config.api_key = self.config.api_key
pipeline_api_key = resolve_llm_api_key(self.config.api_key)
graph_api_key = resolve_llm_api_key(
getattr(self.config.graph_config, "api_key", None)
)
resolved_api_key = pipeline_api_key or graph_api_key
self.config.api_key = resolved_api_key
self.config.graph_config.api_key = resolved_api_key
self.graph: GraphBase = self.config.graph_config.setup()

View File

@@ -21,6 +21,15 @@ http {
root /usr/share/nginx/html;
index index.html;
# Always revalidate the SPA entrypoint so clients pick up the latest
# hashed JS bundle after redeploys.
location = /index.html {
add_header Cache-Control "no-store, no-cache, must-revalidate, proxy-revalidate" always;
add_header Pragma "no-cache" always;
add_header Expires "0" always;
try_files $uri =404;
}
# Serve frontend static files
location / {
try_files $uri $uri/ /index.html;

View File

@@ -49,7 +49,7 @@ POSTGRES_PORT=5432
BACKEND_PORT=8500
# Frontend Configuration
FRONTEND_PORT=80
FRONTEND_PORT=8080
# Database Connection String (used by backend)
CONN_STR=postgresql://myapp_user:secure_password_123@postgres:5432/ai_conversations
@@ -63,7 +63,7 @@ EOF
# Build Docker images
build_images() {
echo -e "${YELLOW}Building Docker images...${NC}"
echo -e "${YELLOW}Building Docker images (including frontend)...${NC}"
cd "$PROJECT_ROOT"
# Check if docker-compose or docker compose