Hi Team,
Im trying to run Chapter 4 code (below) in VS code and im fac ing the below issue:
from langgraph.graph import StateGraph, END
from typing import TypedDict, Annotated
import operator
from langchain_core.messages import AnyMessage, SystemMessage, HumanMessage, ToolMessage
from langchain_community.chat_models.oci_generative_ai import ChatOCIGenAI
import oci
from langchain_community.tools.tavily_search import TavilySearchResults
from dotenv import load_dotenv
_ = load_dotenv()
#from langgraph.checkpoint.sqlite import SqliteSaver
from langgraph.checkpoint.memory import MemorySaver
#memory = SqliteSaver.from_conn_string(“:memory:”)
memory = MemorySaver()
compartment_id = “ocid1.compartment.oc1..asjfhpggrjugg7ad7byla”
CONFIG_PROFILE = “DEFAULT”
config = oci.config.from_file(‘config’, CONFIG_PROFILE)
endpoint = “xxx” #deleted the endpoint url link
generative_ai_inference_client = oci.generative_ai_inference.GenerativeAiInferenceClient(config=config, service_endpoint=endpoint)
model = ChatOCIGenAI(
client=generative_ai_inference_client,
model_id=“cohere.command-r-plus-08-2024”,
service_endpoint=“xxx” , #deleted the endpoint url link
compartment_id=compartment_id,
model_kwargs={“temperature”: 0, “max_tokens”: 4000}
)
tool = TavilySearchResults(max_results=2)
class AgentState(TypedDict):
messages: Annotated[list[AnyMessage], operator.add]
from langgraph.checkpoint.sqlite import SqliteSaver
memory = SqliteSaver.from_conn_string(“:memory:”)
class Agent:
def init(self, model, tools, checkpointer, system=“”):
self.system = system
graph = StateGraph(AgentState)
graph.add_node(“llm”, self.call_openai)
graph.add_node(“action”, self.take_action)
graph.add_conditional_edges(“llm”, self.exists_action, {True: “action”, False: END})
graph.add_edge(“action”, “llm”)
graph.set_entry_point(“llm”)
self.graph = graph.compile(checkpointer=checkpointer)
self.tools = {t.name: t for t in tools}
self.model = model.bind_tools(tools)
def call_openai(self, state: AgentState):
messages = state['messages']
if self.system:
messages = [SystemMessage(content=self.system)] + messages
message = self.model.invoke(messages)
return {'messages': [message]}
def exists_action(self, state: AgentState):
result = state['messages'][-1]
return len(result.tool_calls) > 0
def take_action(self, state: AgentState):
tool_calls = state['messages'][-1].tool_calls
results = []
for t in tool_calls:
print(f"Calling: {t}")
result = self.tools[t['name']].invoke(t['args'])
results.append(ToolMessage(tool_call_id=t['id'], name=t['name'], content=str(result)))
print("Back to the model!")
return {'messages': results}
prompt = “”“You are a smart research assistant. Use the search engine to look up information.
You are allowed to make multiple calls (either together or in sequence).
Only look up information when you are sure of what you want.
If you need to look up some information before asking a follow up question, you are allowed to do that!
“””
with SqliteSaver.from_conn_string(“:memory:”) as memory:
abot = Agent(model, [tool], system=prompt, checkpointer=memory)
messages = [HumanMessage(content=“What is the weather in sf?”)]
thread = {“configurable”: {“thread_id”: “1”}}
for event in abot.graph.stream({“messages”: messages}, thread):
for v in event.values():
print(v[‘messages’])
I changed the LLM being used here to Oracle OCI Generative AI and also added the
with SqliteSaver.from_conn_string(“:memory:”) as memory:
abot = Agent(model, [tool], system=prompt, checkpointer=memory)
code as suggested in few other comments in the community here.
Im now getting this issue however.
Traceback (most recent call last):
File “/Users/sramired/Documents/ShivaWork/Learning/MyGenAIWork/DeepLearning/AIAgentsinLangGraph/2-LangGraphComponentswithPersistence_v1.py”, line 96, in
for event in abot.graph.stream({“messages”: messages}, thread):
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/pregel/init.py”, line 2292, in stream
with SyncPregelLoop(
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/pregel/loop.py”, line 1088, in exit
return self.stack.exit(exc_type, exc_value, traceback)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/contextlib.py”, line 610, in exit
raise exc_details[1]
File “/opt/anaconda3/lib/python3.12/contextlib.py”, line 595, in exit
if cb(*exc_details):
^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/pregel/executor.py”, line 120, in exit
task.result()
File “/opt/anaconda3/lib/python3.12/concurrent/futures/_base.py”, line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/concurrent/futures/_base.py”, line 401, in __get_result
raise self._exception
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/pregel/executor.py”, line 83, in done
task.result()
File “/opt/anaconda3/lib/python3.12/concurrent/futures/_base.py”, line 449, in result
return self.__get_result()
^^^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/concurrent/futures/_base.py”, line 401, in __get_result
raise self._exception
File “/opt/anaconda3/lib/python3.12/concurrent/futures/thread.py”, line 58, in run
result = self.fn(*self.args, **self.kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/checkpoint/sqlite/init.py”, line 458, in put_writes
*self.serde.dumps_typed(value),
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/checkpoint/serde/jsonplus.py”, line 210, in dumps_typed
raise exc
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/checkpoint/serde/jsonplus.py”, line 206, in dumps_typed
return “msgpack”, _msgpack_enc(obj)
^^^^^^^^^^^^^^^^^
File “/opt/anaconda3/lib/python3.12/site-packages/langgraph/checkpoint/serde/jsonplus.py”, line 628, in _msgpack_enc
return ormsgpack.packb(data, default=_msgpack_default, option=_option)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
TypeError: Type is not msgpack serializable: AIMessage
Can somebody please help me with this issue ! Not sure why am I facing this issue !
Regards,
Shiva Kiran