-
Notifications
You must be signed in to change notification settings - Fork 7
Expand file tree
/
Copy pathdatabricks.yml
More file actions
59 lines (51 loc) · 1.7 KB
/
databricks.yml
File metadata and controls
59 lines (51 loc) · 1.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
bundle:
name: demo_workflow
variables:
# Project name - replace with your solution accelerator name
project_name:
description: "Python Data Sources"
default: "python-data-sources"
# The "warehouse_id" variable is used to reference the warehouse used by the dashboard.
warehouse_id:
lookup:
# Replace this with the name of your SQL warehouse.
warehouse: "Shared Unity Catalog Serverless"
# Environment variable used for deployment paths
environment:
description: "Deployment environment (dev, staging, prod)"
default: "dev"
targets:
dev:
default: true
mode: development
# Customize the resources below based on your solution accelerator needs
resources:
# Example job workflow - customize based on your notebooks
jobs:
demo_workflow:
name: "${var.project_name} - Pytest Workflow"
max_concurrent_runs: 4
tasks:
- task_key: dbrunner
notebook_task:
notebook_path: /Workspace/Users/douglas.moore@databricks.com/python-data-sources-x/zipdcm/db_runner
source: WORKSPACE
existing_cluster_id: 0519-014005-pr11dvi3
libraries:
- pypi:
package: pyspark==4.0.0.dev1
tags:
dev: douglas_moore
owner: douglas.moore@databricks.com
solacc: pixels
queue:
enabled: true
environments:
- environment_key: zipdcm_pytest_environment
spec:
client: "3"
dependencies:
- pydicom==3.0.1
- pytest==8.3.5
budget_policy_id: d8e5830d-97cb-40b9-bd65-063434295162
# For more options and schema, see: https://docs.databricks.com/aws/en/dev-tools/bundles/settings