Skip to content

Commit 9640525

Browse files
Pretty print telemetry payload in acceptance tests (#2552)
## Why Before the telemetry payload was a single line of encoded JSON in `out.requests.txt. This can be hard to review and catch regressions in. After, the JSON payload is filtered and pretty printed in `output.txt` which will be a lot easier to review. ## Tests N/A
1 parent c0b1a88 commit 9640525

File tree

18 files changed

+442
-0
lines changed

18 files changed

+442
-0
lines changed
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,24 @@
11

22
>>> [CLI] bundle init . --config-file input.json --output-dir output
33
✨ Successfully initialized template
4+
5+
>>> cat out.requests.txt
6+
{
7+
"frontend_log_event_id": "[UUID]",
8+
"entry": {
9+
"databricks_cli_log": {
10+
"execution_context": {
11+
"cmd_exec_id": "[CMD-EXEC-ID]",
12+
"version": "[DEV_VERSION]",
13+
"command": "bundle_init",
14+
"operating_system": "[OS]",
15+
"execution_time_ms": SMALL_INT,
16+
"exit_code": 0
17+
},
18+
"bundle_init_event": {
19+
"bundle_uuid": "[BUNDLE-UUID]",
20+
"template_name": "custom"
21+
}
22+
}
23+
}
24+
}

acceptance/bundle/templates/telemetry/custom-template/script

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,6 @@ bundle_uuid=$(cat out.databricks.yml | grep -o 'uuid: [^\n]*' | cut -d ' ' -f2)
1212
update_file.py out.requests.txt $cmd_exec_id '[CMD-EXEC-ID]'
1313
update_file.py out.requests.txt $bundle_uuid '[BUNDLE-UUID]'
1414
update_file.py out.databricks.yml $bundle_uuid '[BUNDLE-UUID]'
15+
16+
# pretty print the telemetry payload.
17+
trace cat out.requests.txt | jq 'select(has("path") and .path == "/telemetry-ext") | .body.protoLogs.[] | fromjson'

acceptance/bundle/templates/telemetry/dbt-sql/output.txt

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,30 @@ workspace_host: [DATABRICKS_URL]
88
If you already have dbt installed, just type 'cd my_dbt_sql; dbt init' to get started.
99
Refer to the README.md file for full "getting started" guide and production setup instructions.
1010

11+
12+
>>> cat out.requests.txt
13+
{
14+
"frontend_log_event_id": "[UUID]",
15+
"entry": {
16+
"databricks_cli_log": {
17+
"execution_context": {
18+
"cmd_exec_id": "[CMD-EXEC-ID]",
19+
"version": "[DEV_VERSION]",
20+
"command": "bundle_init",
21+
"operating_system": "[OS]",
22+
"execution_time_ms": SMALL_INT,
23+
"exit_code": 0
24+
},
25+
"bundle_init_event": {
26+
"bundle_uuid": "[BUNDLE-UUID]",
27+
"template_name": "dbt-sql",
28+
"template_enum_args": [
29+
{
30+
"key": "personal_schemas",
31+
"value": "yes, use a schema based on the current user name during development"
32+
}
33+
]
34+
}
35+
}
36+
}
37+
}

acceptance/bundle/templates/telemetry/dbt-sql/script

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,6 @@ bundle_uuid=$(cat out.databricks.yml | grep -o 'uuid: [^\n]*' | cut -d ' ' -f2)
1212
update_file.py out.requests.txt $cmd_exec_id '[CMD-EXEC-ID]'
1313
update_file.py out.requests.txt $bundle_uuid '[BUNDLE-UUID]'
1414
update_file.py out.databricks.yml $bundle_uuid '[BUNDLE-UUID]'
15+
16+
# pretty print the telemetry payload.
17+
trace cat out.requests.txt | jq 'select(has("path") and .path == "/telemetry-ext") | .body.protoLogs.[] | fromjson'

acceptance/bundle/templates/telemetry/default-python/output.txt

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,3 +6,42 @@ Workspace to use (auto-detected, edit in 'my_default_python/databricks.yml'): [D
66

77
Please refer to the README.md file for "getting started" instructions.
88
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
9+
10+
>>> cat out.requests.txt
11+
{
12+
"frontend_log_event_id": "[UUID]",
13+
"entry": {
14+
"databricks_cli_log": {
15+
"execution_context": {
16+
"cmd_exec_id": "[CMD-EXEC-ID]",
17+
"version": "[DEV_VERSION]",
18+
"command": "bundle_init",
19+
"operating_system": "[OS]",
20+
"execution_time_ms": SMALL_INT,
21+
"exit_code": 0
22+
},
23+
"bundle_init_event": {
24+
"bundle_uuid": "[BUNDLE-UUID]",
25+
"template_name": "default-python",
26+
"template_enum_args": [
27+
{
28+
"key": "include_dlt",
29+
"value": "no"
30+
},
31+
{
32+
"key": "include_notebook",
33+
"value": "yes"
34+
},
35+
{
36+
"key": "include_python",
37+
"value": "yes"
38+
},
39+
{
40+
"key": "serverless",
41+
"value": "no"
42+
}
43+
]
44+
}
45+
}
46+
}
47+
}

acceptance/bundle/templates/telemetry/default-python/script

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,6 @@ bundle_uuid=$(cat out.databricks.yml | grep -o 'uuid: [^\n]*' | cut -d ' ' -f2)
1212
update_file.py out.requests.txt $cmd_exec_id '[CMD-EXEC-ID]'
1313
update_file.py out.requests.txt $bundle_uuid '[BUNDLE-UUID]'
1414
update_file.py out.databricks.yml $bundle_uuid '[BUNDLE-UUID]'
15+
16+
# pretty print the telemetry payload.
17+
trace cat out.requests.txt | jq 'select(has("path") and .path == "/telemetry-ext") | .body.protoLogs.[] | fromjson'

acceptance/bundle/templates/telemetry/default-sql/output.txt

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,30 @@ workspace_host: [DATABRICKS_URL]
88

99
Please refer to the README.md file for "getting started" instructions.
1010
See also the documentation at https://docs.databricks.com/dev-tools/bundles/index.html.
11+
12+
>>> cat out.requests.txt
13+
{
14+
"frontend_log_event_id": "[UUID]",
15+
"entry": {
16+
"databricks_cli_log": {
17+
"execution_context": {
18+
"cmd_exec_id": "[CMD-EXEC-ID]",
19+
"version": "[DEV_VERSION]",
20+
"command": "bundle_init",
21+
"operating_system": "[OS]",
22+
"execution_time_ms": SMALL_INT,
23+
"exit_code": 0
24+
},
25+
"bundle_init_event": {
26+
"bundle_uuid": "[BUNDLE-UUID]",
27+
"template_name": "default-sql",
28+
"template_enum_args": [
29+
{
30+
"key": "personal_schemas",
31+
"value": "yes, automatically use a schema based on the current user name during development"
32+
}
33+
]
34+
}
35+
}
36+
}
37+
}

acceptance/bundle/templates/telemetry/default-sql/script

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,3 +12,6 @@ bundle_uuid=$(cat out.databricks.yml | grep -o 'uuid: [^\n]*' | cut -d ' ' -f2)
1212
update_file.py out.requests.txt $cmd_exec_id '[CMD-EXEC-ID]'
1313
update_file.py out.requests.txt $bundle_uuid '[BUNDLE-UUID]'
1414
update_file.py out.databricks.yml $bundle_uuid '[BUNDLE-UUID]'
15+
16+
# pretty print the telemetry payload.
17+
trace cat out.requests.txt | jq 'select(has("path") and .path == "/telemetry-ext") | .body.protoLogs.[] | fromjson'

acceptance/bundle/templates/telemetry/test.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,10 @@ New = '[OS]'
1313
Old = 'execution_time_ms\\\":\d{1,5},'
1414
New = 'execution_time_ms\":\"SMALL_INT\",'
1515

16+
[[Repls]]
17+
Old = '"execution_time_ms": \d{1,5},'
18+
New = '"execution_time_ms": SMALL_INT,'
19+
1620
[[Repls]]
1721
Old = " upstream/[A-Za-z0-9.-]+"
1822
New = ""

acceptance/telemetry/failure/output.txt

Lines changed: 110 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,3 +51,113 @@ HH:MM:SS Debug: POST /telemetry-ext
5151
HH:MM:SS Debug: non-retriable error: Endpoint not implemented. pid=PID sdk=true
5252
HH:MM:SS Info: Attempt 3 failed due to a server side error. Retrying status code: 501 pid=PID
5353
HH:MM:SS Info: telemetry upload failed: failed to upload telemetry logs after three attempts pid=PID
54+
55+
>>> cat out.requests.txt
56+
{
57+
"frontend_log_event_id": "[UUID]",
58+
"entry": {
59+
"databricks_cli_log": {
60+
"execution_context": {
61+
"cmd_exec_id": "[UUID]",
62+
"version": "[DEV_VERSION]",
63+
"command": "selftest_send-telemetry",
64+
"operating_system": "[OS]",
65+
"execution_time_ms": SMALL_INT,
66+
"exit_code": 0
67+
},
68+
"cli_test_event": {
69+
"name": "VALUE1"
70+
}
71+
}
72+
}
73+
}
74+
{
75+
"frontend_log_event_id": "[UUID]",
76+
"entry": {
77+
"databricks_cli_log": {
78+
"execution_context": {
79+
"cmd_exec_id": "[UUID]",
80+
"version": "[DEV_VERSION]",
81+
"command": "selftest_send-telemetry",
82+
"operating_system": "[OS]",
83+
"execution_time_ms": SMALL_INT,
84+
"exit_code": 0
85+
},
86+
"cli_test_event": {
87+
"name": "VALUE2"
88+
}
89+
}
90+
}
91+
}
92+
{
93+
"frontend_log_event_id": "[UUID]",
94+
"entry": {
95+
"databricks_cli_log": {
96+
"execution_context": {
97+
"cmd_exec_id": "[UUID]",
98+
"version": "[DEV_VERSION]",
99+
"command": "selftest_send-telemetry",
100+
"operating_system": "[OS]",
101+
"execution_time_ms": SMALL_INT,
102+
"exit_code": 0
103+
},
104+
"cli_test_event": {
105+
"name": "VALUE1"
106+
}
107+
}
108+
}
109+
}
110+
{
111+
"frontend_log_event_id": "[UUID]",
112+
"entry": {
113+
"databricks_cli_log": {
114+
"execution_context": {
115+
"cmd_exec_id": "[UUID]",
116+
"version": "[DEV_VERSION]",
117+
"command": "selftest_send-telemetry",
118+
"operating_system": "[OS]",
119+
"execution_time_ms": SMALL_INT,
120+
"exit_code": 0
121+
},
122+
"cli_test_event": {
123+
"name": "VALUE2"
124+
}
125+
}
126+
}
127+
}
128+
{
129+
"frontend_log_event_id": "[UUID]",
130+
"entry": {
131+
"databricks_cli_log": {
132+
"execution_context": {
133+
"cmd_exec_id": "[UUID]",
134+
"version": "[DEV_VERSION]",
135+
"command": "selftest_send-telemetry",
136+
"operating_system": "[OS]",
137+
"execution_time_ms": SMALL_INT,
138+
"exit_code": 0
139+
},
140+
"cli_test_event": {
141+
"name": "VALUE1"
142+
}
143+
}
144+
}
145+
}
146+
{
147+
"frontend_log_event_id": "[UUID]",
148+
"entry": {
149+
"databricks_cli_log": {
150+
"execution_context": {
151+
"cmd_exec_id": "[UUID]",
152+
"version": "[DEV_VERSION]",
153+
"command": "selftest_send-telemetry",
154+
"operating_system": "[OS]",
155+
"execution_time_ms": SMALL_INT,
156+
"exit_code": 0
157+
},
158+
"cli_test_event": {
159+
"name": "VALUE2"
160+
}
161+
}
162+
}
163+
}

0 commit comments

Comments
 (0)