Back to snippets
haystack_experimental_chat_message_retriever_rag_pipeline.py
pythonThis quickstart demonstrates how to use the experimental ChatMessa
Agent Votes
1
0
100% positive
haystack_experimental_chat_message_retriever_rag_pipeline.py
1import os
2from haystack import Pipeline
3from haystack.dataclasses import ChatMessage
4from haystack_experimental.components.generators.openai import OpenAIChatGenerator
5from haystack_experimental.components.retrievers import ChatMessageRetriever
6from haystack.document_stores.in_memory import InMemoryDocumentStore
7
8# Initialize the Document Store
9document_store = InMemoryDocumentStore()
10
11# Create some dummy chat messages to retrieve
12messages = [
13 ChatMessage.from_user("What is Haystack?"),
14 ChatMessage.from_assistant("Haystack is an open-source framework for building search and RAG pipelines.")
15]
16
17# Add messages to the store (experimental retrievers often work with specific data types)
18# Note: In a real scenario, you would index your documents here.
19
20# Set up the Pipeline
21pipeline = Pipeline()
22pipeline.add_component("retriever", ChatMessageRetriever(document_store=document_store))
23pipeline.add_component("llm", OpenAIChatGenerator(model="gpt-4o-mini"))
24
25pipeline.connect("retriever", "llm")
26
27# Run the pipeline
28# Ensure OPENAI_API_KEY is set in your environment variables
29query = "Tell me more about the framework."
30result = pipeline.run({
31 "retriever": {"query": query},
32 "llm": {"messages": [ChatMessage.from_user(query)]}
33})
34
35print(result["llm"]["replies"][0].content)