-
Notifications
You must be signed in to change notification settings - Fork 217
Expand file tree
/
Copy pathapp.yaml.example
More file actions
117 lines (107 loc) · 4.46 KB
/
app.yaml.example
File metadata and controls
117 lines (107 loc) · 4.46 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
# Databricks Apps configuration for Builder App
# Copy this file to app.yaml and customize for your deployment
#
# Prerequisites:
# 1. Create the app: databricks apps create <your-app-name>
# 2. Add Lakebase as a resource (see instructions below)
# 3. Configure your LLM provider settings
command:
- "uvicorn"
- "server.app:app"
- "--host"
- "0.0.0.0"
- "--port"
- "$DATABRICKS_APP_PORT"
env:
# =============================================================================
# Application Settings
# =============================================================================
- name: ENV
value: "production"
- name: PROJECTS_BASE_DIR
value: "./projects"
- name: PYTHONPATH
value: "/app/python/source_code/packages"
# =============================================================================
# Skills Configuration
# =============================================================================
# Comma-separated list of skills to enable.
# AUTO-POPULATED by deploy.sh at deploy time based on installed skills.
# To override, set a specific list here before deploying.
- name: ENABLED_SKILLS
value: ""
- name: SKILLS_ONLY_MODE
value: "false"
# =============================================================================
# Database Configuration (Lakebase)
# =============================================================================
# Choose ONE of the two options below.
#
# --- Option A: Autoscale Lakebase (recommended) ---
# Scales to zero when idle. No add-resource step needed — connects via OAuth.
# Find endpoint name in: Catalog → Lakebase → your project → Branches → Endpoints
#
# - name: LAKEBASE_ENDPOINT
# value: "projects/<project-name>/branches/production/endpoints/<endpoint>"
# - name: LAKEBASE_DATABASE_NAME
# value: "databricks_postgres"
#
# --- Option B: Provisioned Lakebase ---
# Fixed-capacity instance. Must add as an app resource:
# databricks apps add-resource <app-name> \
# --resource-type database \
# --resource-name lakebase \
# --database-instance <your-lakebase-instance-name>
#
- name: LAKEBASE_INSTANCE_NAME
value: "<your-lakebase-instance-name>"
- name: LAKEBASE_DATABASE_NAME
value: "databricks_postgres"
# =============================================================================
# LLM Provider Configuration
# =============================================================================
# Option 1: Databricks Foundation Models (default)
- name: LLM_PROVIDER
value: "DATABRICKS"
- name: DATABRICKS_MODEL
value: "databricks-meta-llama-3-3-70b-instruct"
- name: DATABRICKS_MODEL_MINI
value: "databricks-gemini-3-flash"
# Option 2: Anthropic Claude (uncomment and add your key)
# - name: ANTHROPIC_API_KEY
# value: "<your-anthropic-api-key>"
# Option 3: Azure OpenAI (uncomment and configure)
# - name: LLM_PROVIDER
# value: "AZURE"
# - name: AZURE_OPENAI_API_KEY
# value: "<your-azure-api-key>"
# - name: AZURE_OPENAI_ENDPOINT
# value: "https://<your-resource>.cognitiveservices.azure.com/"
# - name: AZURE_OPENAI_API_VERSION
# value: "2024-08-01-preview"
# - name: AZURE_OPENAI_DEPLOYMENT
# value: "gpt-4o"
# - name: AZURE_OPENAI_DEPLOYMENT_MINI
# value: "gpt-4o-mini"
# =============================================================================
# Claude SDK Configuration
# =============================================================================
- name: CLAUDE_CODE_STREAM_CLOSE_TIMEOUT
value: "3600000"
# =============================================================================
# MLflow Tracing Configuration
# =============================================================================
# Enable MLflow tracing for Claude Code conversations
# Traces are automatically sent to your Databricks workspace
# See: https://docs.databricks.com/aws/en/mlflow3/genai/tracing/integrations/claude-code
- name: MLFLOW_TRACKING_URI
value: "databricks"
# Optional: Specify a custom experiment for traces
# - name: MLFLOW_EXPERIMENT_NAME
# value: "/Users/your-email@company.com/claude-code-traces"
# =============================================================================
# Permission Configuration
# =============================================================================
# Grant created resources to this principal (e.g., "account users" for all)
- name: AUTO_GRANT_PERMISSIONS_TO
value: "account users"