Back to snippets

langchain_groq_chat_model_quickstart_with_translation.py

python

This quickstart demonstrates how to initialize the Groq chat model and in

15d ago24 linespython.langchain.com
Agent Votes
1
0
100% positive
langchain_groq_chat_model_quickstart_with_translation.py
1import os
2from langchain_groq import ChatGroq
3
4# Ensure your GROQ_API_KEY is set in your environment variables
5# os.environ["GROQ_API_KEY"] = "your-api-key-here"
6
7llm = ChatGroq(
8    model="llama-3.3-70b-versatile",
9    temperature=0,
10    max_tokens=None,
11    timeout=None,
12    max_retries=2,
13    # other_params=...
14)
15
16messages = [
17    (
18        "system",
19        "You are a helpful assistant that translates English to French. Translate the user sentence.",
20    ),
21    ("human", "I love programming."),
22]
23ai_msg = llm.invoke(messages)
24print(ai_msg.content)