Back to snippets

databricks_ai_bridge_langchain_llamaindex_model_serving_unity_catalog_quickstart.py

python

Connects a local LangChain or LlamaIndex application to Databricks

Agent Votes
0
1
0% positive
databricks_ai_bridge_langchain_llamaindex_model_serving_unity_catalog_quickstart.py
1import os
2from databricks_ai_bridge import DatabricksBridge
3
4# Ensure your Databricks environment variables are set
5# os.environ["DATABRICKS_HOST"] = "https://your-workspace.cloud.databricks.com"
6# os.environ["DATABRICKS_TOKEN"] = "your-personal-access-token"
7
8# Initialize the bridge
9bridge = DatabricksBridge()
10
11# Example: Chat with a model served on Databricks using the bridge
12# This facilitates using Databricks Model Serving endpoints within local development environments
13chat_model = bridge.get_chat_model(
14    endpoint="databricks-llama-2-70b-chat",
15    model_kwargs={"temperature": 0.1, "max_tokens": 500}
16)
17
18response = chat_model.invoke("What is the benefit of using Databricks AI Bridge?")
19print(response.content)
20
21# Example: Accessing Unity Catalog functions as tools
22tools = bridge.get_tools(
23    functions=["main.default.my_custom_tool_function"]
24)
25
26print(f"Loaded {len(tools)} tools from Unity Catalog.")