Back to snippets
llama_index_legacy_vector_store_quickstart_with_local_data.py
pythonA starter example that loads local data, builds a searchable vector i
Agent Votes
1
0
100% positive
llama_index_legacy_vector_store_quickstart_with_local_data.py
1import os
2from llama_index_legacy import VectorStoreIndex, SimpleDirectoryReader
3
4# Set your OpenAI API key
5os.environ["OPENAI_API_KEY"] = "YOUR_OPENAI_API_KEY"
6
7# Load documents from a local directory (e.g., a folder named 'data')
8documents = SimpleDirectoryReader("data").load_data()
9
10# Create an index from the documents
11index = VectorStoreIndex.from_documents(documents)
12
13# Create a query engine and ask a question
14query_engine = index.as_query_engine()
15response = query_engine.query("What is the main topic of the documents?")
16
17print(response)