-
Notifications
You must be signed in to change notification settings - Fork 88
Open
Description
Some of the APIs has been changed, here is the latest version (I believe it works):
import os
import autogen
import memgpt.autogen.memgpt_agent as memgpt_autogen
#import memgpt.autogen.interface as autogen_interface
import memgpt.agent as agent
import memgpt.system as system
import memgpt.utils as utils
import memgpt.presets.presets as presets
import memgpt.constants as constants
from memgpt.autogen.memgpt_agent import create_memgpt_autogen_agent_from_config
BASE_URL = 'http://0.0.0.0:5000'
os.environ['OPENAI_API_HOST'] = BASE_URL
os.environ['OPENAI_API_KEY'] = 'none'
os.environ['OPENAI_API_BASE'] = f"{BASE_URL}/v1"
config_list = [
{
"model": "dolphin-2.6-mixtral-8x7b",
"model_endpoint_type":"webui",
#"openai_key":"none", # add this if you use "openai" as "model_endpoint_type"
"model_endpoint":os.environ['OPENAI_API_HOST'],
"model_wrapper": "dolphin-2.1-mistral-7b"
},
]
llm_config = {"config_list": config_list, "seed": 42}
USE_MEMGPT = True
# The user agent
user_proxy = autogen.UserProxyAgent(
name="User_proxy",
system_message="A human admin.",
code_execution_config={"last_n_messages": 2, "work_dir": "groupchat"},
human_input_mode="TERMINATE", # needed?
default_auto_reply="You are going to figure all out by your own. "
"Work by yourself, the user won't reply until you output `TERMINATE` to end the conversation.",
)
#interface = autogen_interface.AutoGenInterface()
persona = "I am a 10x engineer, trained in Python. I was the first engineer at Uber."
human = "Im a team manager at this company"
#memgpt_agent=presets.use_preset(presets.DEFAULT_PRESET, model='dolphin-2.6-mixtral-8x7b', persona=persona, human=human, interface=interface, persistence_manager=persistence_manager, agent_config=llm_config)
memgpt_agent = create_memgpt_autogen_agent_from_config(
"MemGPT_coder",
llm_config=llm_config,
system_message=f"I am a 10x engineer, trained in Python. I was the first engineer at Uber ",
# interface_kwargs=interface_kwargs,
default_auto_reply="...", # Set a default auto-reply message here (non-empty auto-reply is required for LM Studio)
)
if not USE_MEMGPT:
# In the AutoGen example, we create an AssistantAgent to play the role of the coder
coder = autogen.AssistantAgent(
name="Coder",
llm_config=llm_config,
system_message=f"I am a 10x engineer, trained in Python. I was the first engineer at Uber",
human_input_mode="TERMINATE",
)
else:
# In our example, we swap this AutoGen agent with a MemGPT agent
# This MemGPT agent will have all the benefits of MemGPT, ie persistent memory, etc.
print("\nMemGPT Agent at work\n")
coder = memgpt_autogen.MemGPTAgent(
name="MemGPT_coder",
agent=memgpt_agent,
)
# Begin the group chat with a message from the user
user_proxy.initiate_chat(
memgpt_agent,
message="Write a Function to print Numbers 1 to 10"
)I'm getting an error for another reason I guess with memgpt and mixtral support.
Metadata
Metadata
Assignees
Labels
No labels