Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion camel/agents/chat_agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -495,7 +495,13 @@ def __init__(
self.model_backend.token_counter,
self.model_backend.token_limit,
)

# check the memory is a class type, then instantiate it
if memory is not None and inspect.isclass(memory):
memory = memory(
context_creator,
window_size=message_window_size,
agent_id=self.agent_id,
)
self._memory: AgentMemory = memory or ChatHistoryMemory(
context_creator,
window_size=message_window_size,
Expand Down
2 changes: 2 additions & 0 deletions camel/memories/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
ChatHistoryMemory,
LongtermAgentMemory,
VectorDBMemory,
BrowserChatHistoryMemory,
)
from .base import AgentMemory, BaseContextCreator, MemoryBlock
from .blocks.chat_history_block import ChatHistoryBlock
Expand All @@ -32,6 +33,7 @@
'ScoreBasedContextCreator',
'ChatHistoryMemory',
'VectorDBMemory',
'BrowserChatHistoryMemory',
'ChatHistoryBlock',
'VectorDBBlock',
'LongtermAgentMemory',
Expand Down
170 changes: 170 additions & 0 deletions camel/memories/agent_memories.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@

import warnings
from typing import List, Optional
import re

from camel.memories.base import AgentMemory, BaseContextCreator
from camel.memories.blocks import ChatHistoryBlock, VectorDBBlock
from camel.memories.records import ContextRecord, MemoryRecord
from camel.storages.key_value_storages.base import BaseKeyValueStorage
from camel.storages.vectordb_storages.base import BaseVectorStorage
from camel.types import OpenAIBackendRole
from camel.messages.func_message import FunctionCallingMessage


class ChatHistoryMemory(AgentMemory):
Expand Down Expand Up @@ -319,3 +321,171 @@ def remove_records_by_indices(
) -> List[MemoryRecord]:
r"""Removes records at specified indices from chat history."""
return self.chat_history_block.remove_records_by_indices(indices)


class BrowserChatHistoryMemory(AgentMemory):
r"""An browser agent memory wrapper of :obj:`ChatHistoryBlock` for browser agent.

Args:
context_creator (BaseContextCreator): A model context creator.
storage (BaseKeyValueStorage, optional): A storage backend for storing
chat history. If `None`, an :obj:`InMemoryKeyValueStorage`
will be used. (default: :obj:`None`)
window_size (int, optional): The number of recent chat messages to
retrieve. If not provided, the entire chat history will be
retrieved. (default: :obj:`None`)
agent_id (str, optional): The ID of the agent associated with the chat
history.
"""

def __init__(
self,
context_creator: BaseContextCreator,
storage: Optional[BaseKeyValueStorage] = None,
window_size: Optional[int] = None,
agent_id: Optional[str] = None,
prune_history_tool_calls: bool = True,
) -> None:
if window_size is not None and not isinstance(window_size, int):
raise TypeError("`window_size` must be an integer or None.")
if window_size is not None and window_size < 0:
raise ValueError("`window_size` must be non-negative.")
self._context_creator = context_creator
self._window_size = window_size
self._chat_history_block = ChatHistoryBlock(
storage=storage,
)
self._agent_id = agent_id
self.prune_history_tool_calls = prune_history_tool_calls

@property
def agent_id(self) -> Optional[str]:
return self._agent_id

@agent_id.setter
def agent_id(self, val: Optional[str]) -> None:
self._agent_id = val

def clean_lines(self, content:str) -> str:
""" Keep only the meaningful web content
Args:
content (str): The raw web content from browser toolkit to be cleaned.
"""
if not content:
return ""
result = set()
for line in content.splitlines():
# Trim whitespace
line = line.strip()
# Remove everything in square brackets, including the brackets
line = re.sub(r'\[.*?\]', '', line)
# If line starts with - /url: keep only the URL part
m = re.match(r'^-\s*/url:\s*(.+)', line)
if m:
line = m.group(1)
else:
# Remove labels starting with - and any non-space,
# non-colon chars, followed by optional space or colon
line = re.sub(r'^-\s*[^:\s]+\s*:? ?', '', line)
# Remove trailing colons and spaces
line = re.sub(r'[:\s]+$', '', line)
# Remove leading and trailing non-alphanumeric characters
line = re.sub(r'^[^A-Za-z0-9]+|[^A-Za-z0-9]+$', '', line)
if line.strip() != '':
result.add(line)
return '\n'.join(result)

def retrieve(self) -> List[ContextRecord]:
""" Retrieves context records from chat history memory
with history cache pruning option.
"""
records = self._chat_history_block.retrieve(self._window_size)
if self._window_size is not None and len(records) == self._window_size:
warnings.warn(
f"Chat history window size limit ({self._window_size}) "
f"reached. Some earlier messages will not be included in "
f"the context. Consider increasing window_size if you need "
f"a longer context.",
UserWarning,
stacklevel=2,
)
if self.prune_history_tool_calls:
# remove history tool calls label from records
# to to save token usage and improve context quality
# by keeping only the content
pruned_records = []
for i, record in enumerate(records[:-1]):
# only prune tool calls from past messages
if i == 5:
print("Reached 5th record, stop pruning further.")
if record.memory_record.role_at_backend == OpenAIBackendRole.FUNCTION:
# get the message from FundctionCallingMessage
if (isinstance(record.memory_record.message, FunctionCallingMessage)):
if isinstance(record.memory_record.message.result, dict):
web_content = record.memory_record.message.result.get("snapshot", "")
# we keep the snapshot content only
record.memory_record.message.result["snapshot"] = self.clean_lines(web_content)
elif isinstance(record.memory_record.message.result, str):
record.memory_record.message.result = self.clean_lines(
record.memory_record.message.result
)
else:
raise ValueError(
"Unsupported type for FunctionCallingMessage.result")
pruned_records.append(record)
else:
pruned_records.append(record)
pruned_records.append(records[-1]) # keep the last message as is
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pruned_records is defined in if self.prune_history_tool_calls, if prune_history_tool_calls=False, there won't be any defination of this

return pruned_records

def write_records(self, records: List[MemoryRecord]) -> None:
for record in records:
# assign the agent_id to the record
if record.agent_id == "" and self.agent_id is not None:
record.agent_id = self.agent_id
self._chat_history_block.write_records(records)

def get_context_creator(self) -> BaseContextCreator:
return self._context_creator

def clear(self) -> None:
self._chat_history_block.clear()

def clean_tool_calls(self) -> None:
r"""Removes tool call messages from memory.
This method removes all FUNCTION/TOOL role messages and any ASSISTANT
messages that contain tool_calls in their meta_dict to save token
usage.
"""
from camel.types import OpenAIBackendRole

# Get all messages from storage
record_dicts = self._chat_history_block.storage.load()
if not record_dicts:
return

# Track indices to remove (reverse order for efficient deletion)
indices_to_remove = []

# Identify indices of tool-related messages
for i, record in enumerate(record_dicts):
role = record.get('role_at_backend')

# Mark FUNCTION messages for removal
if role == OpenAIBackendRole.FUNCTION.value:
indices_to_remove.append(i)
# Mark TOOL messages for removal
elif role == OpenAIBackendRole.TOOL.value:
indices_to_remove.append(i)
# Mark ASSISTANT messages with tool_calls for removal
elif role == OpenAIBackendRole.ASSISTANT.value:
meta_dict = record.get('meta_dict', {})
if meta_dict and 'tool_calls' in meta_dict:
indices_to_remove.append(i)

# Remove records in-place
for i in reversed(indices_to_remove):
del record_dicts[i]

# Save the modified records back to storage
self._chat_history_block.storage.save(record_dicts)
Loading
Loading