Skip to content

Commit 77e4492

Browse files
authored
[Python] Add pipelines to template (#2706)
## Changes Add pipelines to `experimental-jobs-as-code` template ## Why It makes template consistent with capabilities in `default-python` template ## Tests Acceptance tests
1 parent dc624fa commit 77e4492

File tree

6 files changed

+144
-4
lines changed

6 files changed

+144
-4
lines changed
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
{
22
"project_name": "my_jobs_as_code",
33
"include_notebook": "yes",
4-
"include_python": "yes"
4+
"include_python": "yes",
5+
"include_dlt": "yes"
56
}

acceptance/bundle/templates/experimental-jobs-as-code/output.txt

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -79,6 +79,29 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi
7979
}
8080
}
8181
}
82+
},
83+
"pipelines": {
84+
"my_jobs_as_code_pipeline": {
85+
"catalog": "catalog_name",
86+
"configuration": {
87+
"bundle.sourcePath": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src"
88+
},
89+
"deployment": {
90+
"kind": "BUNDLE",
91+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/state/metadata.json"
92+
},
93+
"development": true,
94+
"libraries": [
95+
{
96+
"notebook": {
97+
"path": "/Workspace/Users/[USERNAME]/.bundle/my_jobs_as_code/dev/files/src/dlt_pipeline"
98+
}
99+
}
100+
],
101+
"name": "[dev [USERNAME]] my_jobs_as_code_pipeline",
102+
"permissions": [],
103+
"target": "my_jobs_as_code_dev"
104+
}
82105
}
83106
}
84107

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from databricks.bundles.pipelines import Pipeline
2+
3+
my_jobs_as_code_pipeline = Pipeline.from_dict(
4+
{
5+
"name": "my_jobs_as_code_pipeline",
6+
"target": "my_jobs_as_code_${bundle.target}",
7+
## Specify the 'catalog' field to configure this pipeline to make use of Unity Catalog:
8+
"catalog": "catalog_name",
9+
"libraries": [
10+
{
11+
"notebook": {
12+
"path": "src/dlt_pipeline.ipynb",
13+
},
14+
},
15+
],
16+
"configuration": {
17+
"bundle.sourcePath": "${workspace.file_path}/src",
18+
},
19+
}
20+
)
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"metadata": {
6+
"application/vnd.databricks.v1+cell": {
7+
"cellMetadata": {},
8+
"inputWidgets": {},
9+
"nuid": "[UUID]",
10+
"showTitle": false,
11+
"title": ""
12+
}
13+
},
14+
"source": [
15+
"# DLT pipeline\n",
16+
"\n",
17+
"This Delta Live Tables (DLT) definition is executed using a pipeline defined in resources/my_jobs_as_code.pipeline.yml."
18+
]
19+
},
20+
{
21+
"cell_type": "code",
22+
"execution_count": 0,
23+
"metadata": {
24+
"application/vnd.databricks.v1+cell": {
25+
"cellMetadata": {},
26+
"inputWidgets": {},
27+
"nuid": "[UUID]",
28+
"showTitle": false,
29+
"title": ""
30+
}
31+
},
32+
"outputs": [],
33+
"source": [
34+
"# Import DLT and src/my_jobs_as_code\n",
35+
"import dlt\n",
36+
"import sys\n",
37+
"\n",
38+
"sys.path.append(spark.conf.get(\"bundle.sourcePath\", \".\"))\n",
39+
"from pyspark.sql.functions import expr\n",
40+
"from my_jobs_as_code import main"
41+
]
42+
},
43+
{
44+
"cell_type": "code",
45+
"execution_count": 0,
46+
"metadata": {
47+
"application/vnd.databricks.v1+cell": {
48+
"cellMetadata": {},
49+
"inputWidgets": {},
50+
"nuid": "[UUID]",
51+
"showTitle": false,
52+
"title": ""
53+
}
54+
},
55+
"outputs": [],
56+
"source": [
57+
"@dlt.view\n",
58+
"def taxi_raw():\n",
59+
" return main.get_taxis(spark)\n",
60+
"\n",
61+
"\n",
62+
"@dlt.table\n",
63+
"def filtered_taxis():\n",
64+
" return dlt.read(\"taxi_raw\").filter(expr(\"fare_amount < 30\"))"
65+
]
66+
}
67+
],
68+
"metadata": {
69+
"application/vnd.databricks.v1+notebook": {
70+
"dashboards": [],
71+
"language": "python",
72+
"notebookMetadata": {
73+
"pythonIndentUnit": 2
74+
},
75+
"notebookName": "dlt_pipeline",
76+
"widgets": {}
77+
},
78+
"kernelspec": {
79+
"display_name": "Python 3",
80+
"language": "python",
81+
"name": "python3"
82+
},
83+
"language_info": {
84+
"name": "python",
85+
"version": "3.11.4"
86+
}
87+
},
88+
"nbformat": 4,
89+
"nbformat_minor": 0
90+
}

libs/template/templates/experimental-jobs-as-code/databricks_template_schema.json

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,12 +16,19 @@
1616
"description": "Include a stub (sample) notebook in '{{.project_name}}{{path_separator}}src'",
1717
"order": 2
1818
},
19+
"include_dlt": {
20+
"type": "string",
21+
"default": "yes",
22+
"enum": ["yes", "no"],
23+
"description": "Include a stub (sample) Delta Live Tables pipeline in '{{.project_name}}{{path_separator}}src'",
24+
"order": 3
25+
},
1926
"include_python": {
2027
"type": "string",
2128
"default": "yes",
2229
"enum": ["yes", "no"],
2330
"description": "Include a stub (sample) Python package in '{{.project_name}}/src'",
24-
"order": 3
31+
"order": 4
2532
}
2633
},
2734
"success_message": "Workspace to use (auto-detected, edit in '{{.project_name}}/databricks.yml'): {{workspace_host}}\n\n✨ Your new project has been created in the '{{.project_name}}' directory!\n\nPlease refer to the README.md file for \"getting started\" instructions.\nSee also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html."

libs/template/templates/experimental-jobs-as-code/template/__preamble.tmpl

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,7 @@ This file only template directives; it is skipped for the actual output.
44

55
{{skip "__preamble"}}
66

7-
# TODO add DLT support, placeholder for now
8-
{{$notDLT := true }}
7+
{{$notDLT := not (eq .include_dlt "yes")}}
98
{{$notNotebook := not (eq .include_notebook "yes")}}
109
{{$notPython := not (eq .include_python "yes")}}
1110

0 commit comments

Comments
 (0)