Spaces:
Running
Running
File size: 4,635 Bytes
f1d5e1c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
from __future__ import annotations
import logging
import pdb
from typing import List, Optional, Type, Dict
from browser_use.agent.message_manager.service import MessageManager
from browser_use.agent.message_manager.views import MessageHistory
from browser_use.agent.prompts import SystemPrompt, AgentMessagePrompt
from browser_use.agent.views import ActionResult, AgentStepInfo, ActionModel
from browser_use.browser.views import BrowserState
from browser_use.agent.message_manager.service import MessageManagerSettings
from browser_use.agent.views import ActionResult, AgentOutput, AgentStepInfo, MessageManagerState
from langchain_core.language_models import BaseChatModel
from langchain_anthropic import ChatAnthropic
from langchain_core.language_models import BaseChatModel
from langchain_core.messages import (
AIMessage,
BaseMessage,
HumanMessage,
ToolMessage,
SystemMessage
)
from langchain_openai import ChatOpenAI
from ..utils.llm import DeepSeekR1ChatOpenAI
from .custom_prompts import CustomAgentMessagePrompt
logger = logging.getLogger(__name__)
class CustomMessageManagerSettings(MessageManagerSettings):
agent_prompt_class: Type[AgentMessagePrompt] = AgentMessagePrompt
class CustomMessageManager(MessageManager):
def __init__(
self,
task: str,
system_message: SystemMessage,
settings: MessageManagerSettings = MessageManagerSettings(),
state: MessageManagerState = MessageManagerState(),
):
super().__init__(
task=task,
system_message=system_message,
settings=settings,
state=state
)
def _init_messages(self) -> None:
"""Initialize the message history with system message, context, task, and other initial messages"""
self._add_message_with_tokens(self.system_prompt)
self.context_content = ""
if self.settings.message_context:
self.context_content += 'Context for the task' + self.settings.message_context
if self.settings.sensitive_data:
info = f'Here are placeholders for sensitive data: {list(self.settings.sensitive_data.keys())}'
info += 'To use them, write <secret>the placeholder name</secret>'
self.context_content += info
if self.settings.available_file_paths:
filepaths_msg = f'Here are file paths you can use: {self.settings.available_file_paths}'
self.context_content += filepaths_msg
if self.context_content:
context_message = HumanMessage(content=self.context_content)
self._add_message_with_tokens(context_message)
def cut_messages(self):
"""Get current message list, potentially trimmed to max tokens"""
diff = self.state.history.current_tokens - self.settings.max_input_tokens
min_message_len = 2 if self.context_content is not None else 1
while diff > 0 and len(self.state.history.messages) > min_message_len:
msg = self.state.history.messages.pop(min_message_len)
self.state.history.current_tokens -= msg.metadata.tokens
diff = self.state.history.current_tokens - self.settings.max_input_tokens
def add_state_message(
self,
state: BrowserState,
actions: Optional[List[ActionModel]] = None,
result: Optional[List[ActionResult]] = None,
step_info: Optional[AgentStepInfo] = None,
use_vision=True,
) -> None:
"""Add browser state as human message"""
# otherwise add state message and result to next message (which will not stay in memory)
state_message = self.settings.agent_prompt_class(
state,
actions,
result,
include_attributes=self.settings.include_attributes,
step_info=step_info,
).get_user_message(use_vision)
self._add_message_with_tokens(state_message)
def _remove_state_message_by_index(self, remove_ind=-1) -> None:
"""Remove state message by index from history"""
i = len(self.state.history.messages) - 1
remove_cnt = 0
while i >= 0:
if isinstance(self.state.history.messages[i].message, HumanMessage):
remove_cnt += 1
if remove_cnt == abs(remove_ind):
msg = self.state.history.messages.pop(i)
self.state.history.current_tokens -= msg.metadata.tokens
break
i -= 1
|