-
Notifications
You must be signed in to change notification settings - Fork 14
/
Copy pathdatabricks.yml
155 lines (139 loc) · 4.79 KB
/
databricks.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
# This is a Databricks asset bundle definition
# The Databricks extension requires databricks.yml configuration file.
# See https://docs.databricks.com/dev-tools/bundles/index.html for documentation.
# https://docs.databricks.com/en/dev-tools/bundles/settings.html
bundle:
name: mlops-databricks-credit-default
## Required for production bundles
## As an alternative remove "mode: production" below in the targets and set default or add the flag "-t prod*
# run_as:
# user_name: "service_principal_email"
include:
- bundle_monitoring.yml
# These are the default artifact settings if not otherwise overridden in
# the following "targets" top-level mapping.
artifacts:
default:
type: whl
build: uv build
path: .
# These are for any custom variables for use throughout the bundle.
variables:
root_path:
description: root_path for the bundle files
default: /Workspace/Users/${workspace.current_user.userName}/.bundle/${bundle.name}/${bundle.target}/files
git_sha:
description: git_sha
default: 1aa0037267af83a7685bea002a332739563fa3c9
schedule_pause_status:
description: schedule pause status
default: UNPAUSED
# These are the default job and pipeline settings if not otherwise overridden in
# the following "targets" top-level mapping.
# Recommended: only jobs and pipelines
resources:
jobs:
credit-default:
name: credit-default-workflow-test-demo
schedule:
quartz_cron_expression: "0 0 6 ? * MON"
timezone_id: "Europe/Amsterdam"
pause_status: ${var.schedule_pause_status}
tags:
project_name: "credit-default"
job_clusters:
- job_cluster_key: "credit-default-cluster"
new_cluster:
spark_version: "15.4.x-scala2.12"
data_security_mode: "SINGLE_USER"
node_type_id: "i3.xlarge"
driver_node_type_id: "i3.xlarge"
autoscale:
min_workers: 1
max_workers: 1
tasks:
- task_key: "preprocessing"
job_cluster_key: "credit-default-cluster"
existing_cluster_id: 1109-205408-dob4qyuc
spark_python_task:
python_file: "workflows/preprocess.py"
parameters:
- "--root_path"
- ${var.root_path}
libraries:
- whl: ./dist/*.whl
- task_key: if_refreshed
condition_task:
op: "EQUAL_TO"
left: "{{tasks.preprocessing.values.refreshed}}"
right: "1"
depends_on:
- task_key: "preprocessing"
- task_key: "train_model"
depends_on:
- task_key: "if_refreshed"
outcome: "true"
job_cluster_key: "credit-default-cluster"
existing_cluster_id: 1109-205408-dob4qyuc
spark_python_task:
python_file: "workflows/train_model.py"
parameters:
- "--root_path"
- ${var.root_path}
- "--git_sha"
- ${var.git_sha}
- "--job_run_id"
- "{{job.id}}"
libraries:
- whl: ./dist/*.whl
- task_key: "evaluate_model"
depends_on:
- task_key: "train_model"
job_cluster_key: "credit-default-cluster"
existing_cluster_id: 1109-205408-dob4qyuc
spark_python_task:
python_file: "workflows/evaluate_model.py"
parameters:
- "--root_path"
- ${var.root_path}
- "--new_model_uri"
- "{{tasks.train_model.values.new_model_uri}}"
- "--job_run_id"
- "{{job.id}}"
- "--git_sha"
- ${var.git_sha}
libraries:
- whl: ./dist/*.whl
- task_key: model_update
condition_task:
op: "EQUAL_TO"
left: "{{tasks.evaluate_model.values.model_update}}"
right: "1"
depends_on:
- task_key: "evaluate_model"
- task_key: "deploy_model"
depends_on:
- task_key: "model_update"
outcome: "true"
job_cluster_key: "credit-default-cluster"
existing_cluster_id: 1109-205408-dob4qyuc
spark_python_task:
python_file: "workflows/deploy_model.py"
parameters:
- "--root_path"
- ${var.root_path}
libraries:
- whl: ./dist/*.whl
# Two targets
targets:
dev: # AWS West Region workspace
mode: development
default: true
cluster_id: 1109-205408-dob4qyuc
workspace:
host: https://dbc-46617658-3f2b.cloud.databricks.com
prod:
mode: production # Comment out this line when deploying to production, so that "run_as" above is not required
default: false
workspace:
host: https://dbc-46617658-3f2b.cloud.databricks.com