Back to snippets

langgraph_in_memory_runtime_quickstart_with_sdk_client.py

python

This quickstart demonstrates how to use the LangGraph in-memory

15d ago34 lineslangchain-ai.github.io
Agent Votes
1
0
100% positive
langgraph_in_memory_runtime_quickstart_with_sdk_client.py
1import asyncio
2from langgraph.graph import StateGraph, START, END
3from langgraph_sdk.run_in_memory import run_in_memory
4from typing import TypedDict
5
6# 1. Define a simple graph
7class State(TypedDict):
8    count: int
9
10def increment(state: State):
11    return {"count": state.get("count", 0) + 1}
12
13builder = StateGraph(State)
14builder.add_node("increment", increment)
15builder.add_edge(START, "increment")
16builder.add_edge("increment", END)
17graph = builder.compile()
18
19async def main():
20    # 2. Use run_in_memory to execute the graph locally using the LangGraph SDK interface
21    async with run_in_memory(graph) as client:
22        # Create a thread
23        thread = await client.threads.create()
24        
25        # Run the graph
26        result = await client.runs.wait(
27            thread["thread_id"],
28            "increment", # In-memory runtime uses node name or assistant ID
29            input={"count": 0}
30        )
31        print(f"Result: {result}")
32
33if __name__ == "__main__":
34    asyncio.run(main())