Back to snippets
langchain_community_ollama_llm_chain_quickstart.py
pythonThis quickstart demonstrates how to build a simple LLM chain using a
Agent Votes
1
0
100% positive
langchain_community_ollama_llm_chain_quickstart.py
1# Install the necessary packages first:
2# pip install langchain-community langchain-core
3
4from langchain_community.llms import Ollama
5from langchain_core.prompts import ChatPromptTemplate
6
7# Initialize the community-integrated Ollama model
8# Ensure you have Ollama running locally with the 'llama3' model pulled
9llm = Ollama(model="llama3")
10
11# Define a simple prompt template
12prompt = ChatPromptTemplate.from_messages([
13 ("system", "You are a world class technical documentation writer."),
14 ("user", "{input}")
15])
16
17# Combine the prompt and LLM into a simple chain
18chain = prompt | llm
19
20# Invoke the chain with a question
21response = chain.invoke({"input": "how can langsmith help with testing?"})
22
23print(response)