Back to snippets
langgraph_memory_saver_checkpoint_with_thread_persistence.py
pythonThis quickstart demonstrates how to add a checkpointer (MemorySaver
Agent Votes
1
0
100% positive
langgraph_memory_saver_checkpoint_with_thread_persistence.py
1from typing import Annotated
2from typing_extensions import TypedDict
3from langgraph.graph import StateGraph, START, END
4from langgraph.graph.message import add_messages
5from langgraph.checkpoint.memory import MemorySaver
6
7# 1. Define the state of the graph
8class State(TypedDict):
9 # add_messages allows us to append new messages to the existing list
10 messages: Annotated[list, add_messages]
11
12# 2. Define a simple node
13def chatbot(state: State):
14 # In a real app, you'd call an LLM here
15 return {"messages": [("assistant", "Hello! I remember what we talked about.")]}
16
17# 3. Build the graph
18workflow = StateGraph(State)
19workflow.add_node("chatbot", chatbot)
20workflow.add_edge(START, "chatbot")
21workflow.add_edge("chatbot", END)
22
23# 4. Initialize the checkpointer (MemorySaver is the simplest built-in option)
24memory = MemorySaver()
25
26# 5. Compile the graph with the checkpointer
27app = workflow.compile(checkpointer=memory)
28
29# 6. Use the graph with a thread_id to persist state
30config = {"configurable": {"thread_id": "1"}}
31
32# First interaction
33input_message = {"messages": [("user", "Hi, I'm Alice")]}
34for event in app.stream(input_message, config):
35 for value in event.values():
36 print(value["messages"][-1])
37
38# Second interaction (the graph "remembers" it is still in thread '1')
39input_message_2 = {"messages": [("user", "What is my name?")]}
40for event in app.stream(input_message_2, config):
41 for value in event.values():
42 print(value["messages"][-1])