Skip to content

Commit 1e6ae37

Browse files
authored
Merge pull request #5 from databricks-industry-solutions/feature/modernize-to-2025-standards
feat: Modernize demand forecasting solution to 2025 standards
2 parents 4c6ec22 + bf79386 commit 1e6ae37

25 files changed

+2207
-2152
lines changed

.github/workflows/databricks-ci.yml

Lines changed: 120 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,120 @@
1+
name: Databricks Asset Bundles CI
2+
3+
on:
4+
pull_request:
5+
branches:
6+
- main
7+
- feature/dabsdeploy
8+
push:
9+
branches:
10+
- main
11+
- feature/dabsdeploy
12+
13+
jobs:
14+
validate-and-test:
15+
runs-on: html_publisher
16+
17+
steps:
18+
- name: Checkout code
19+
uses: actions/checkout@v4
20+
21+
- name: Set up Python
22+
uses: actions/setup-python@v5
23+
with:
24+
python-version: '3.11'
25+
26+
- name: Set up Databricks CLI
27+
uses: databricks/setup-cli@main
28+
env:
29+
DATABRICKS_HOST: 'https://e2-demo-field-eng.cloud.databricks.com'
30+
DATABRICKS_TOKEN: ${{ secrets.DEPLOY_NOTEBOOK_TOKEN }}
31+
32+
- name: Configure Databricks CLI authentication
33+
run: |
34+
echo "[DEFAULT]" > ~/.databrickscfg
35+
echo "host = https://e2-demo-field-eng.cloud.databricks.com" >> ~/.databrickscfg
36+
echo "token = ${{ secrets.DEPLOY_NOTEBOOK_TOKEN }}" >> ~/.databrickscfg
37+
38+
- name: Get or Create serverless SQL warehouse
39+
env:
40+
DATABRICKS_HOST: 'https://e2-demo-field-eng.cloud.databricks.com'
41+
DATABRICKS_TOKEN: ${{ secrets.DEPLOY_NOTEBOOK_TOKEN }}
42+
run: |
43+
WAREHOUSE_NAME="Shared Unity Catalog Serverless"
44+
echo "Looking for warehouse named: $WAREHOUSE_NAME"
45+
46+
echo "Fetching existing warehouses..."
47+
EXISTING_WAREHOUSE=$(curl -s -H "Authorization: Bearer $DATABRICKS_TOKEN" \
48+
"$DATABRICKS_HOST/api/2.0/sql/warehouses")
49+
50+
echo "Warehouse List Response: $(echo $EXISTING_WAREHOUSE | sed 's/\"token\":\"[^\"]*\"/\"token\":\"***\"/g')"
51+
52+
WAREHOUSE_ID=$(echo "$EXISTING_WAREHOUSE" | python3 -c "
53+
import sys, json
54+
import argparse
55+
56+
parser = argparse.ArgumentParser()
57+
parser.add_argument('--name', required=True)
58+
args = parser.parse_args()
59+
60+
try:
61+
data = json.load(sys.stdin)
62+
name = args.name
63+
if 'warehouses' in data:
64+
warehouses = data['warehouses']
65+
matching = [w for w in warehouses if w['name'] == name]
66+
if matching:
67+
print(matching[0]['id'])
68+
else:
69+
print('')
70+
else:
71+
print('')
72+
except Exception as e:
73+
print(f'Error parsing response: {str(e)}', file=sys.stderr)
74+
print('')
75+
" --name "$WAREHOUSE_NAME")
76+
77+
if [ -z "$WAREHOUSE_ID" ]; then
78+
echo "Creating new warehouse..."
79+
RESPONSE=$(curl -s -X POST -H "Authorization: Bearer $DATABRICKS_TOKEN" \
80+
-H "Content-Type: application/json" \
81+
"$DATABRICKS_HOST/api/2.0/sql/warehouses" \
82+
-d "{
83+
\"name\": \"${WAREHOUSE_NAME}\",
84+
\"cluster_size\": \"2X-Small\",
85+
\"enable_serverless_compute\": true,
86+
\"auto_stop_mins\": 10,
87+
\"max_num_clusters\": 1
88+
}")
89+
90+
echo "Create API Response: $(echo $RESPONSE | sed 's/\"token\":\"[^\"]*\"/\"token\":\"***\"/g')"
91+
WAREHOUSE_ID=$(echo $RESPONSE | python3 -c "import sys, json; print(json.load(sys.stdin).get('id', ''))")
92+
else
93+
echo "Found existing warehouse with ID: $WAREHOUSE_ID"
94+
fi
95+
96+
if [ -z "$WAREHOUSE_ID" ]; then
97+
echo "Error: Failed to get warehouse ID"
98+
exit 1
99+
fi
100+
101+
echo "Using warehouse with ID: $WAREHOUSE_ID"
102+
echo "WAREHOUSE_ID=$WAREHOUSE_ID" >> $GITHUB_ENV
103+
# Set default environment to dev
104+
echo "DEPLOY_ENV=dev" >> $GITHUB_ENV
105+
106+
- name: Validate bundle
107+
run: databricks bundle validate --var="environment=${{ env.DEPLOY_ENV }}"
108+
109+
- name: Deploy bundle
110+
run: databricks bundle deploy --var="environment=${{ env.DEPLOY_ENV }}"
111+
112+
- name: Run and monitor workflow
113+
run: |
114+
echo "Starting workflow execution..."
115+
databricks bundle run demand_forecasting_workflow --target dev --var="environment=${{ env.DEPLOY_ENV }}"
116+
echo "Workflow execution completed"
117+
118+
- name: Cleanup PR deployment
119+
run: |
120+
databricks bundle destroy --target dev --var="environment=${{ env.DEPLOY_ENV }}" || true

.github/workflows/integration-test-aws-pr.yml

Lines changed: 0 additions & 46 deletions
This file was deleted.

.github/workflows/integration-test-aws-push.yml

Lines changed: 0 additions & 49 deletions
This file was deleted.

.github/workflows/integration-test-gcp-pr.yml

Lines changed: 0 additions & 45 deletions
This file was deleted.

.github/workflows/integration-test-gcp-push.yml

Lines changed: 0 additions & 49 deletions
This file was deleted.

.github/workflows/integration-test-msa-pr.yml

Lines changed: 0 additions & 45 deletions
This file was deleted.

0 commit comments

Comments
 (0)