Skip to content

Commit 1917fa3

Browse files
committed
Merge branch 'main' into fix_tool_info
2 parents d18aa6f + a845fe1 commit 1917fa3

File tree

8 files changed

+755
-338
lines changed

8 files changed

+755
-338
lines changed

backend/app/service/chat_service.py

Lines changed: 156 additions & 45 deletions
Large diffs are not rendered by default.

backend/app/service/task.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ class Action(str, Enum):
2020
update_task = "update_task" # user -> backend
2121
task_state = "task_state" # backend -> user
2222
new_task_state = "new_task_state" # backend -> user
23+
decompose_progress = "decompose_progress" # backend -> user (streaming decomposition)
24+
decompose_text = "decompose_text" # backend -> user (raw streaming text)
2325
start = "start" # user -> backend
2426
create_agent = "create_agent" # backend -> user
2527
activate_agent = "activate_agent" # backend -> user
@@ -64,6 +66,17 @@ class ActionTaskStateData(BaseModel):
6466
action: Literal[Action.task_state] = Action.task_state
6567
data: dict[Literal["task_id", "content", "state", "result", "failure_count"], str | int]
6668

69+
70+
class ActionDecomposeProgressData(BaseModel):
71+
action: Literal[Action.decompose_progress] = Action.decompose_progress
72+
data: dict
73+
74+
75+
class ActionDecomposeTextData(BaseModel):
76+
action: Literal[Action.decompose_text] = Action.decompose_text
77+
data: dict
78+
79+
6780
class ActionNewTaskStateData(BaseModel):
6881
action: Literal[Action.new_task_state] = Action.new_task_state
6982
data: dict[Literal["task_id", "content", "state", "result", "failure_count"], str | int]
@@ -227,6 +240,8 @@ class ActionSkipTaskData(BaseModel):
227240
| ActionAddTaskData
228241
| ActionRemoveTaskData
229242
| ActionSkipTaskData
243+
| ActionDecomposeTextData
244+
| ActionDecomposeProgressData
230245
)
231246

232247

backend/app/utils/agent.py

Lines changed: 61 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -108,6 +108,7 @@ def __init__(
108108
prune_tool_calls_from_memory: bool = False,
109109
enable_snapshot_clean: bool = False,
110110
step_timeout: float | None = 900,
111+
**kwargs: Any,
111112
) -> None:
112113
super().__init__(
113114
system_message=system_message,
@@ -130,6 +131,7 @@ def __init__(
130131
prune_tool_calls_from_memory=prune_tool_calls_from_memory,
131132
enable_snapshot_clean=enable_snapshot_clean,
132133
step_timeout=step_timeout,
134+
**kwargs,
133135
)
134136
self.api_task_id = api_task_id
135137
self.agent_name = agent_name
@@ -182,9 +184,49 @@ def step(
182184
total_tokens = 0
183185

184186
if res is not None:
187+
if isinstance(res, StreamingChatAgentResponse):
188+
def _stream_with_deactivate():
189+
last_response: ChatAgentResponse | None = None
190+
try:
191+
for chunk in res:
192+
last_response = chunk
193+
yield chunk
194+
finally:
195+
final_message = ""
196+
total_tokens = 0
197+
if last_response:
198+
final_message = (
199+
last_response.msg.content if last_response.msg else ""
200+
)
201+
usage_info = (
202+
last_response.info.get("usage")
203+
or last_response.info.get("token_usage")
204+
or {}
205+
)
206+
if usage_info:
207+
total_tokens = usage_info.get("total_tokens", 0)
208+
asyncio.create_task(
209+
task_lock.put_queue(
210+
ActionDeactivateAgentData(
211+
data={
212+
"agent_name": self.agent_name,
213+
"process_task_id": self.process_task_id,
214+
"agent_id": self.agent_id,
215+
"message": final_message,
216+
"tokens": total_tokens,
217+
},
218+
)
219+
)
220+
)
221+
222+
return StreamingChatAgentResponse(_stream_with_deactivate())
223+
185224
message = res.msg.content if res.msg else ""
186-
total_tokens = res.info["usage"]["total_tokens"]
187-
traceroot_logger.info(f"Agent {self.agent_name} completed step, tokens used: {total_tokens}")
225+
usage_info = res.info.get("usage") or res.info.get("token_usage") or {}
226+
total_tokens = usage_info.get("total_tokens", 0) if usage_info else 0
227+
traceroot_logger.info(
228+
f"Agent {self.agent_name} completed step, tokens used: {total_tokens}"
229+
)
188230

189231
assert message is not None
190232

@@ -533,6 +575,21 @@ def agent_model(
533575
)
534576
)
535577

578+
# Build model config, defaulting to streaming for planner
579+
extra_params = options.extra_params or {}
580+
model_config: dict[str, Any] = {}
581+
if options.is_cloud():
582+
model_config["user"] = str(options.project_id)
583+
model_config.update(
584+
{
585+
k: v
586+
for k, v in extra_params.items()
587+
if k not in ["model_platform", "model_type", "api_key", "url"]
588+
}
589+
)
590+
if agent_name == Agents.task_agent:
591+
model_config["stream"] = True
592+
536593
return ListenChatAgent(
537594
options.project_id,
538595
agent_name,
@@ -542,23 +599,15 @@ def agent_model(
542599
model_type=options.model_type,
543600
api_key=options.api_key,
544601
url=options.api_url,
545-
model_config_dict={
546-
"user": str(options.project_id),
547-
}
548-
if options.is_cloud()
549-
else None,
550-
**{
551-
k: v
552-
for k, v in (options.extra_params or {}).items()
553-
if k not in ["model_platform", "model_type", "api_key", "url"]
554-
},
602+
model_config_dict=model_config or None,
555603
),
556604
# output_language=options.language,
557605
tools=tools,
558606
agent_id=agent_id,
559607
prune_tool_calls_from_memory=prune_tool_calls_from_memory,
560608
toolkits_to_register_agent=toolkits_to_register_agent,
561609
enable_snapshot_clean=enable_snapshot_clean,
610+
stream_accumulate=False,
562611
)
563612

564613

backend/app/utils/workforce.py

Lines changed: 72 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
from camel.societies.workforce.utils import TaskAssignResult
1313
from camel.societies.workforce.workforce_metrics import WorkforceMetrics
1414
from camel.societies.workforce.events import WorkerCreatedEvent
15+
from camel.societies.workforce.prompts import TASK_DECOMPOSE_PROMPT
1516
from camel.tasks.task import Task, TaskState, validate_task_content
1617
from app.component import code
1718
from app.exception.exception import UserException
@@ -65,14 +66,22 @@ def __init__(
6566
)
6667
logger.info(f"[WF-LIFECYCLE] ✅ Workforce.__init__ COMPLETED, id={id(self)}")
6768

68-
def eigent_make_sub_tasks(self, task: Task, coordinator_context: str = ""):
69+
def eigent_make_sub_tasks(
70+
self,
71+
task: Task,
72+
coordinator_context: str = "",
73+
on_stream_batch=None,
74+
on_stream_text=None,
75+
):
6976
"""
7077
Split process_task method to eigent_make_sub_tasks and eigent_start method.
7178
7279
Args:
7380
task: The main task to decompose
7481
coordinator_context: Optional context ONLY for coordinator agent during decomposition.
7582
This context will NOT be passed to subtasks or worker agents.
83+
on_stream_batch: Optional callback for streaming batches signature (List[Task], bool)
84+
on_stream_text: Optional callback for raw streaming text chunks
7685
"""
7786
logger.info("=" * 80)
7887
logger.info("🧩 [DECOMPOSE] eigent_make_sub_tasks CALLED", extra={
@@ -103,7 +112,15 @@ def eigent_make_sub_tasks(self, task: Task, coordinator_context: str = ""):
103112
logger.info(f"[DECOMPOSE] Workforce reset complete, state: {self._state.name}")
104113

105114
logger.info(f"[DECOMPOSE] Calling handle_decompose_append_task")
106-
subtasks = asyncio.run(self.handle_decompose_append_task(task, reset=False, coordinator_context=coordinator_context))
115+
subtasks = asyncio.run(
116+
self.handle_decompose_append_task(
117+
task,
118+
reset=False,
119+
coordinator_context=coordinator_context,
120+
on_stream_batch=on_stream_batch,
121+
on_stream_text=on_stream_text
122+
)
123+
)
107124
logger.info("=" * 80)
108125
logger.info(f"✅ [DECOMPOSE] Task decomposition COMPLETED", extra={
109126
"api_task_id": self.api_task_id,
@@ -142,8 +159,45 @@ async def eigent_start(self, subtasks: list[Task]):
142159
self._state = WorkforceState.IDLE
143160
logger.info(f"[WF-LIFECYCLE] Workforce state set to IDLE")
144161

162+
def _decompose_task(self, task: Task, stream_callback=None):
163+
"""Decompose task with optional streaming text callback."""
164+
165+
decompose_prompt = str(
166+
TASK_DECOMPOSE_PROMPT.format(
167+
content=task.content,
168+
child_nodes_info=self._get_child_nodes_info(),
169+
additional_info=task.additional_info,
170+
)
171+
)
172+
self.task_agent.reset()
173+
result = task.decompose(
174+
self.task_agent, decompose_prompt, stream_callback=stream_callback
175+
)
176+
177+
if isinstance(result, Generator):
178+
def streaming_with_dependencies():
179+
all_subtasks = []
180+
for new_tasks in result:
181+
all_subtasks.extend(new_tasks)
182+
if new_tasks:
183+
self._update_dependencies_for_decomposition(
184+
task, all_subtasks
185+
)
186+
yield new_tasks
187+
return streaming_with_dependencies()
188+
else:
189+
subtasks = result
190+
if subtasks:
191+
self._update_dependencies_for_decomposition(task, subtasks)
192+
return subtasks
193+
145194
async def handle_decompose_append_task(
146-
self, task: Task, reset: bool = True, coordinator_context: str = ""
195+
self,
196+
task: Task,
197+
reset: bool = True,
198+
coordinator_context: str = "",
199+
on_stream_batch=None,
200+
on_stream_text=None,
147201
) -> List[Task]:
148202
"""
149203
Override to support coordinator_context parameter.
@@ -153,6 +207,8 @@ async def handle_decompose_append_task(
153207
task: The task to be processed
154208
reset: Should trigger workforce reset (Workforce must not be running)
155209
coordinator_context: Optional context ONLY for coordinator during decomposition
210+
on_stream_batch: Optional callback for streaming batches signature (List[Task], bool)
211+
on_stream_text: Optional callback for raw streaming text chunks
156212
157213
Returns:
158214
List[Task]: The decomposed subtasks or the original task
@@ -186,18 +242,23 @@ async def handle_decompose_append_task(
186242
task.content = task_with_context
187243

188244
logger.info(f"[DECOMPOSE] Calling _decompose_task with context")
189-
subtasks_result = self._decompose_task(task)
245+
subtasks_result = self._decompose_task(task, stream_callback=on_stream_text)
190246

191247
task.content = original_content
192248
else:
193249
logger.info(f"[DECOMPOSE] Calling _decompose_task without context")
194-
subtasks_result = self._decompose_task(task)
250+
subtasks_result = self._decompose_task(task, stream_callback=on_stream_text)
195251

196252
logger.info(f"[DECOMPOSE] _decompose_task returned, processing results")
197253
if isinstance(subtasks_result, Generator):
198254
subtasks = []
199255
for new_tasks in subtasks_result:
200256
subtasks.extend(new_tasks)
257+
if on_stream_batch:
258+
try:
259+
on_stream_batch(new_tasks, False)
260+
except Exception as e:
261+
logger.warning(f"Streaming callback failed: {e}")
201262
logger.info(f"[DECOMPOSE] Collected {len(subtasks)} subtasks from generator")
202263
else:
203264
subtasks = subtasks_result
@@ -218,6 +279,12 @@ async def handle_decompose_append_task(
218279
subtasks = [fallback_task]
219280
logger.info(f"[DECOMPOSE] Created fallback task: {fallback_task.id}")
220281

282+
if on_stream_batch:
283+
try:
284+
on_stream_batch(subtasks, True)
285+
except Exception as e:
286+
logger.warning(f"Final streaming callback failed: {e}")
287+
221288
return subtasks
222289

223290
async def _find_assignee(self, tasks: List[Task]) -> TaskAssignResult:

src/components/ChatBox/ProjectSection.tsx

Lines changed: 45 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,39 @@ export const ProjectSection = React.forwardRef<HTMLDivElement, ProjectSectionPro
2323
onSkip,
2424
isPauseResumeLoading
2525
}, ref) => {
26-
const chatState = chatStore.getState();
26+
// Subscribe to store changes with throttling to prevent excessive re-renders
27+
const [chatState, setChatState] = React.useState(() => chatStore.getState());
28+
29+
React.useEffect(() => {
30+
let timeoutId: NodeJS.Timeout | null = null;
31+
let latestState: any = null;
32+
33+
const unsubscribe = chatStore.subscribe((state) => {
34+
latestState = state;
35+
36+
// Throttle updates to max once per 100ms
37+
if (!timeoutId) {
38+
timeoutId = setTimeout(() => {
39+
if (latestState) {
40+
setChatState(latestState);
41+
}
42+
timeoutId = null;
43+
}, 100);
44+
}
45+
});
46+
47+
return () => {
48+
unsubscribe();
49+
if (timeoutId) {
50+
clearTimeout(timeoutId);
51+
// Apply final state on cleanup
52+
if (latestState) {
53+
setChatState(latestState);
54+
}
55+
}
56+
};
57+
}, [chatStore]);
58+
2759
const activeTaskId = chatState.activeTaskId;
2860

2961
if (!activeTaskId || !chatState.tasks[activeTaskId]) {
@@ -33,8 +65,17 @@ export const ProjectSection = React.forwardRef<HTMLDivElement, ProjectSectionPro
3365
const task = chatState.tasks[activeTaskId];
3466
const messages = task.messages || [];
3567

36-
// Group messages by query cycles and show in chronological order (oldest first)
37-
const queryGroups = groupMessagesByQuery(messages);
68+
// Create a stable key based on messages content to prevent excessive re-renders
69+
const lastMessage = messages[messages.length - 1];
70+
const messagesKey = React.useMemo(() => {
71+
// Only re-compute when message count or last message changes
72+
return `${messages.length}-${lastMessage?.id || ''}-${lastMessage?.content?.length || 0}`;
73+
}, [messages.length, lastMessage?.id, lastMessage?.content?.length]);
74+
75+
// Memoize grouping to prevent re-creating objects on every render
76+
const queryGroups = React.useMemo(() => {
77+
return groupMessagesByQuery(messages);
78+
}, [messagesKey]);
3879

3980
return (
4081
<motion.div
@@ -152,7 +193,7 @@ function groupMessagesByQuery(messages: any[]) {
152193
otherMessages: []
153194
};
154195
}
155-
} else {
196+
} else {
156197
// Other messages (assistant responses, errors, etc.)
157198
if (currentGroup) {
158199
currentGroup.otherMessages.push(message);

0 commit comments

Comments
 (0)