Skip to content

Commit 3dddc6d

Browse files
authored
fix: loose ends and cleanups (#900)
* group kerberos code together * clean up bootstrap listener ports * update metrics.py to actually test for the existence of metrics * read jmx config from server.yaml instead of broker.yaml * update docs * update changelog
1 parent 6cfd7aa commit 3dddc6d

File tree

7 files changed

+50
-55
lines changed

7 files changed

+50
-55
lines changed

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,17 @@ All notable changes to this project will be documented in this file.
1616
- Deprecate support for Kafka `3.7.2` ([#892]).
1717
- BREAKING: The `<cluster>-<role>-<rolegroup>` rolegroup service was replaced with a `<cluster>-<role>-<rolegroup>-headless`
1818
and `<cluster>-<role>-<rolegroup>-metrics` rolegroup service ([#897]).
19+
- Small cleanups and updates ([#900])
20+
- remove the metrics port from services that don't need it
21+
- use the new `server.yaml` for jmx configuration
22+
- update metrics tests
23+
- update monitoring doc
1924

2025
[#889]: https://github.com/stackabletech/kafka-operator/pull/889
2126
[#890]: https://github.com/stackabletech/kafka-operator/pull/890
2227
[#892]: https://github.com/stackabletech/kafka-operator/pull/892
2328
[#897]: https://github.com/stackabletech/kafka-operator/pull/897
29+
[#900]: https://github.com/stackabletech/kafka-operator/pull/900
2430

2531
## [25.7.0] - 2025-07-23
2632

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
= Monitoring
22
:description: The managed Kafka instances are automatically configured to export Prometheus metrics.
33

4-
The managed Kafka instances are automatically configured to export Prometheus metrics.
4+
The operator sets up all Kafka server instances with a JMX exporter agent.
5+
The operator also sets up a dedicated metrics service for each role group.
6+
The name of this service follows the schema `<stacklet name>-<role name>-<group name>-metrics`.
57
See xref:operators:monitoring.adoc[] for more details.

rust/operator-binary/src/config/jvm.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ fn construct_jvm_args<ConfigFragment>(
5151
format!("-Xms{java_heap}"),
5252
format!("-Djava.security.properties={STACKABLE_CONFIG_DIR}/{JVM_SECURITY_PROPERTIES_FILE}"),
5353
format!(
54-
"-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/broker.yaml"
54+
"-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar={METRICS_PORT}:/stackable/jmx/server.yaml"
5555
),
5656
];
5757

@@ -130,7 +130,7 @@ mod tests {
130130
assert_eq!(
131131
non_heap_jvm_args,
132132
"-Djava.security.properties=/stackable/config/security.properties \
133-
-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9606:/stackable/jmx/broker.yaml"
133+
-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9606:/stackable/jmx/server.yaml"
134134
);
135135
assert_eq!(heap_jvm_args, "-Xmx1638m -Xms1638m");
136136
}
@@ -177,7 +177,7 @@ mod tests {
177177
assert_eq!(
178178
non_heap_jvm_args,
179179
"-Djava.security.properties=/stackable/config/security.properties \
180-
-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9606:/stackable/jmx/broker.yaml \
180+
-javaagent:/stackable/jmx/jmx_prometheus_javaagent.jar=9606:/stackable/jmx/server.yaml \
181181
-Dhttps.proxyHost=proxy.my.corp \
182182
-Djava.net.preferIPv4Stack=true \
183183
-Dhttps.proxyPort=1234"

rust/operator-binary/src/crd/security.rs

Lines changed: 10 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -484,6 +484,16 @@ impl KafkaTlsSecurity {
484484
KafkaListenerName::Bootstrap.listener_ssl_truststore_type(),
485485
"PKCS12".to_string(),
486486
);
487+
config.insert("sasl.enabled.mechanisms".to_string(), "GSSAPI".to_string());
488+
config.insert(
489+
"sasl.kerberos.service.name".to_string(),
490+
KafkaRole::Broker.kerberos_service_name().to_string(),
491+
);
492+
config.insert(
493+
"sasl.mechanism.inter.broker.protocol".to_string(),
494+
"GSSAPI".to_string(),
495+
);
496+
tracing::debug!("Kerberos configs added: [{:#?}]", config);
487497
}
488498

489499
// Internal TLS
@@ -545,20 +555,6 @@ impl KafkaTlsSecurity {
545555
);
546556
}
547557

548-
// Kerberos
549-
if self.has_kerberos_enabled() {
550-
config.insert("sasl.enabled.mechanisms".to_string(), "GSSAPI".to_string());
551-
config.insert(
552-
"sasl.kerberos.service.name".to_string(),
553-
KafkaRole::Broker.kerberos_service_name().to_string(),
554-
);
555-
config.insert(
556-
"sasl.mechanism.inter.broker.protocol".to_string(),
557-
"GSSAPI".to_string(),
558-
);
559-
tracing::debug!("Kerberos configs added: [{:#?}]", config);
560-
}
561-
562558
// common
563559
config.insert(
564560
Self::INTER_BROKER_LISTENER_NAME.to_string(),

rust/operator-binary/src/resource/listener.rs

Lines changed: 12 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -5,10 +5,7 @@ use stackable_operator::{
55
};
66

77
use crate::{
8-
crd::{
9-
METRICS_PORT, METRICS_PORT_NAME, role::broker::BrokerConfig, security::KafkaTlsSecurity,
10-
v1alpha1,
11-
},
8+
crd::{role::broker::BrokerConfig, security::KafkaTlsSecurity, v1alpha1},
129
kafka_controller::KAFKA_CONTROLLER_NAME,
1310
utils::build_recommended_labels,
1411
};
@@ -53,33 +50,27 @@ pub fn build_broker_rolegroup_bootstrap_listener(
5350
.build(),
5451
spec: listener::v1alpha1::ListenerSpec {
5552
class_name: Some(merged_config.bootstrap_listener_class.clone()),
56-
ports: Some(listener_ports(kafka_security)),
53+
ports: Some(bootstrap_listener_ports(kafka_security)),
5754
..listener::v1alpha1::ListenerSpec::default()
5855
},
5956
status: None,
6057
})
6158
}
6259

63-
/// We only expose client HTTP / HTTPS and Metrics ports.
64-
fn listener_ports(kafka_security: &KafkaTlsSecurity) -> Vec<listener::v1alpha1::ListenerPort> {
65-
let mut ports = vec![
60+
fn bootstrap_listener_ports(
61+
kafka_security: &KafkaTlsSecurity,
62+
) -> Vec<listener::v1alpha1::ListenerPort> {
63+
vec![if kafka_security.has_kerberos_enabled() {
6664
listener::v1alpha1::ListenerPort {
67-
name: METRICS_PORT_NAME.to_string(),
68-
port: METRICS_PORT.into(),
65+
name: kafka_security.bootstrap_port_name().to_string(),
66+
port: kafka_security.bootstrap_port().into(),
6967
protocol: Some("TCP".to_string()),
70-
},
68+
}
69+
} else {
7170
listener::v1alpha1::ListenerPort {
7271
name: kafka_security.client_port_name().to_string(),
7372
port: kafka_security.client_port().into(),
7473
protocol: Some("TCP".to_string()),
75-
},
76-
];
77-
if kafka_security.has_kerberos_enabled() {
78-
ports.push(listener::v1alpha1::ListenerPort {
79-
name: kafka_security.bootstrap_port_name().to_string(),
80-
port: kafka_security.bootstrap_port().into(),
81-
protocol: Some("TCP".to_string()),
82-
});
83-
}
84-
ports
74+
}
75+
}]
8576
}

tests/templates/kuttl/smoke-kraft/metrics.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,19 @@
33
import requests
44

55
if __name__ == "__main__":
6-
result = 0
7-
86
LOG_LEVEL = "DEBUG" # if args.debug else 'INFO'
97
logging.basicConfig(
108
level=LOG_LEVEL,
119
format="%(asctime)s %(levelname)s: %(message)s",
1210
stream=sys.stdout,
1311
)
1412

15-
http_code = requests.get(
16-
"http://test-kafka-broker-default-metrics:9606"
17-
).status_code
18-
if http_code != 200:
19-
result = 1
13+
response = requests.get("http://test-kafka-broker-default-metrics:9606/metrics")
14+
15+
assert response.status_code == 200, (
16+
f"Expected HTTP return code 200 from the metrics endpoint but got [{response.status_code}]"
17+
)
2018

21-
sys.exit(result)
19+
assert "jmx_scrape_error" in response.text, (
20+
"Expected metric [jmx_scrape_error] not found"
21+
)

tests/templates/kuttl/smoke/metrics.py

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3,19 +3,19 @@
33
import requests
44

55
if __name__ == "__main__":
6-
result = 0
7-
86
LOG_LEVEL = "DEBUG" # if args.debug else 'INFO'
97
logging.basicConfig(
108
level=LOG_LEVEL,
119
format="%(asctime)s %(levelname)s: %(message)s",
1210
stream=sys.stdout,
1311
)
1412

15-
http_code = requests.get(
16-
"http://test-kafka-broker-default-metrics:9606/metrics"
17-
).status_code
18-
if http_code != 200:
19-
result = 1
13+
response = requests.get("http://test-kafka-broker-default-metrics:9606/metrics")
14+
15+
assert response.status_code == 200, (
16+
f"Expected HTTP return code 200 from the metrics endpoint but got [{response.status_code}]"
17+
)
2018

21-
sys.exit(result)
19+
assert "jmx_scrape_error" in response.text, (
20+
"Expected metric [jmx_scrape_error] not found"
21+
)

0 commit comments

Comments
 (0)