Skip to content

Commit c719e55

Browse files
authored
Merge pull request #617 from fede1024/scanterog/event-based-client
Use rdkafka event API instead of the callback API
2 parents c87c1e7 + 978c964 commit c719e55

File tree

12 files changed

+600
-317
lines changed

12 files changed

+600
-317
lines changed

src/admin.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -403,7 +403,7 @@ fn start_poll_thread(queue: Arc<NativeQueue>, should_stop: Arc<AtomicBool>) -> J
403403
.expect("Failed to start polling thread")
404404
}
405405

406-
type NativeEvent = NativePtr<RDKafkaEvent>;
406+
pub(crate) type NativeEvent = NativePtr<RDKafkaEvent>;
407407

408408
unsafe impl KafkaDrop for RDKafkaEvent {
409409
const TYPE: &'static str = "event";

src/client.rs

Lines changed: 130 additions & 114 deletions
Original file line numberDiff line numberDiff line change
@@ -11,19 +11,19 @@
1111
//! [`consumer`]: crate::consumer
1212
//! [`producer`]: crate::producer
1313
14-
use std::convert::TryFrom;
1514
use std::error::Error;
1615
use std::ffi::{CStr, CString};
1716
use std::mem::ManuallyDrop;
18-
use std::os::raw::{c_char, c_void};
17+
use std::os::raw::c_char;
1918
use std::ptr;
20-
use std::slice;
2119
use std::string::ToString;
2220
use std::sync::Arc;
2321

22+
use libc::c_void;
2423
use rdkafka_sys as rdsys;
2524
use rdkafka_sys::types::*;
2625

26+
use crate::admin::NativeEvent;
2727
use crate::config::{ClientConfig, NativeClientConfig, RDKafkaLogLevel};
2828
use crate::consumer::RebalanceProtocol;
2929
use crate::error::{IsError, KafkaError, KafkaResult};
@@ -239,21 +239,6 @@ impl<C: ClientContext> Client<C> {
239239
Arc::as_ptr(&context) as *mut c_void,
240240
)
241241
};
242-
unsafe { rdsys::rd_kafka_conf_set_log_cb(native_config.ptr(), Some(native_log_cb::<C>)) };
243-
unsafe {
244-
rdsys::rd_kafka_conf_set_stats_cb(native_config.ptr(), Some(native_stats_cb::<C>))
245-
};
246-
unsafe {
247-
rdsys::rd_kafka_conf_set_error_cb(native_config.ptr(), Some(native_error_cb::<C>))
248-
};
249-
if C::ENABLE_REFRESH_OAUTH_TOKEN {
250-
unsafe {
251-
rdsys::rd_kafka_conf_set_oauthbearer_token_refresh_cb(
252-
native_config.ptr(),
253-
Some(native_oauth_refresh_cb::<C>),
254-
)
255-
};
256-
}
257242

258243
let client_ptr = unsafe {
259244
let native_config = ManuallyDrop::new(native_config);
@@ -293,6 +278,128 @@ impl<C: ClientContext> Client<C> {
293278
&self.context
294279
}
295280

281+
pub(crate) fn poll_event(&self, queue: &NativeQueue, timeout: Timeout) -> Option<NativeEvent> {
282+
let event = unsafe { NativeEvent::from_ptr(queue.poll(timeout)) };
283+
if let Some(ev) = event {
284+
let evtype = unsafe { rdsys::rd_kafka_event_type(ev.ptr()) };
285+
match evtype {
286+
rdsys::RD_KAFKA_EVENT_LOG => self.handle_log_event(ev.ptr()),
287+
rdsys::RD_KAFKA_EVENT_STATS => self.handle_stats_event(ev.ptr()),
288+
rdsys::RD_KAFKA_EVENT_ERROR => {
289+
// rdkafka reports consumer errors via RD_KAFKA_EVENT_ERROR but producer errors gets
290+
// embedded on the ack returned via RD_KAFKA_EVENT_DR. Hence we need to return this event
291+
// for the consumer case in order to return the error to the user.
292+
self.handle_error_event(ev.ptr());
293+
return Some(ev);
294+
}
295+
rdsys::RD_KAFKA_EVENT_OAUTHBEARER_TOKEN_REFRESH => {
296+
if C::ENABLE_REFRESH_OAUTH_TOKEN {
297+
self.handle_oauth_refresh_event(ev.ptr());
298+
}
299+
}
300+
_ => {
301+
return Some(ev);
302+
}
303+
}
304+
}
305+
None
306+
}
307+
308+
fn handle_log_event(&self, event: *mut RDKafkaEvent) {
309+
let mut fac: *const c_char = std::ptr::null();
310+
let mut str_: *const c_char = std::ptr::null();
311+
let mut level: i32 = 0;
312+
let result = unsafe { rdsys::rd_kafka_event_log(event, &mut fac, &mut str_, &mut level) };
313+
if result == 0 {
314+
let fac = unsafe { CStr::from_ptr(fac).to_string_lossy() };
315+
let log_message = unsafe { CStr::from_ptr(str_).to_string_lossy() };
316+
self.context().log(
317+
RDKafkaLogLevel::from_int(level),
318+
fac.trim(),
319+
log_message.trim(),
320+
);
321+
}
322+
}
323+
324+
fn handle_stats_event(&self, event: *mut RDKafkaEvent) {
325+
let json = unsafe { CStr::from_ptr(rdsys::rd_kafka_event_stats(event)) };
326+
self.context().stats_raw(json.to_bytes());
327+
}
328+
329+
fn handle_error_event(&self, event: *mut RDKafkaEvent) {
330+
let rdkafka_err = unsafe { rdsys::rd_kafka_event_error(event) };
331+
let error = KafkaError::Global(rdkafka_err.into());
332+
let reason =
333+
unsafe { CStr::from_ptr(rdsys::rd_kafka_event_error_string(event)).to_string_lossy() };
334+
self.context().error(error, reason.trim());
335+
}
336+
337+
fn handle_oauth_refresh_event(&self, event: *mut RDKafkaEvent) {
338+
let oauthbearer_config = unsafe { rdsys::rd_kafka_event_config_string(event) };
339+
let res: Result<_, Box<dyn Error>> = (|| {
340+
let oauthbearer_config = match oauthbearer_config.is_null() {
341+
true => None,
342+
false => unsafe { Some(util::cstr_to_owned(oauthbearer_config)) },
343+
};
344+
let token_info = self
345+
.context()
346+
.generate_oauth_token(oauthbearer_config.as_deref())?;
347+
let token = CString::new(token_info.token)?;
348+
let principal_name = CString::new(token_info.principal_name)?;
349+
Ok((token, principal_name, token_info.lifetime_ms))
350+
})();
351+
match res {
352+
Ok((token, principal_name, lifetime_ms)) => {
353+
let mut err_buf = ErrBuf::new();
354+
let code = unsafe {
355+
rdkafka_sys::rd_kafka_oauthbearer_set_token(
356+
self.native_ptr(),
357+
token.as_ptr(),
358+
lifetime_ms,
359+
principal_name.as_ptr(),
360+
ptr::null_mut(),
361+
0,
362+
err_buf.as_mut_ptr(),
363+
err_buf.capacity(),
364+
)
365+
};
366+
if code == RDKafkaRespErr::RD_KAFKA_RESP_ERR_NO_ERROR {
367+
debug!("successfully set refreshed OAuth token");
368+
} else {
369+
debug!(
370+
"failed to set refreshed OAuth token (code {:?}): {}",
371+
code, err_buf
372+
);
373+
unsafe {
374+
rdkafka_sys::rd_kafka_oauthbearer_set_token_failure(
375+
self.native_ptr(),
376+
err_buf.as_mut_ptr(),
377+
)
378+
};
379+
}
380+
}
381+
Err(e) => {
382+
debug!("failed to refresh OAuth token: {}", e);
383+
let message = match CString::new(e.to_string()) {
384+
Ok(message) => message,
385+
Err(e) => {
386+
error!("error message generated while refreshing OAuth token has embedded null character: {}", e);
387+
CString::new(
388+
"error while refreshing OAuth token has embedded null character",
389+
)
390+
.expect("known to be a valid CString")
391+
}
392+
};
393+
unsafe {
394+
rdkafka_sys::rd_kafka_oauthbearer_set_token_failure(
395+
self.native_ptr(),
396+
message.as_ptr(),
397+
)
398+
};
399+
}
400+
}
401+
}
402+
296403
/// Returns the metadata information for the specified topic, or for all topics in the cluster
297404
/// if no topic is specified.
298405
pub fn fetch_metadata<T: Into<Timeout>>(
@@ -442,6 +549,11 @@ impl<C: ClientContext> Client<C> {
442549
pub(crate) fn consumer_queue(&self) -> Option<NativeQueue> {
443550
unsafe { NativeQueue::from_ptr(rdsys::rd_kafka_queue_get_consumer(self.native_ptr())) }
444551
}
552+
553+
/// Returns a NativeQueue for the main librdkafka event queue from the current client.
554+
pub(crate) fn main_queue(&self) -> NativeQueue {
555+
unsafe { NativeQueue::from_ptr(rdsys::rd_kafka_queue_get_main(self.native_ptr())).unwrap() }
556+
}
445557
}
446558

447559
pub(crate) type NativeTopic = NativePtr<RDKafkaTopic>;
@@ -471,48 +583,6 @@ impl NativeQueue {
471583
}
472584
}
473585

474-
pub(crate) unsafe extern "C" fn native_log_cb<C: ClientContext>(
475-
client: *const RDKafka,
476-
level: i32,
477-
fac: *const c_char,
478-
buf: *const c_char,
479-
) {
480-
let fac = CStr::from_ptr(fac).to_string_lossy();
481-
let log_message = CStr::from_ptr(buf).to_string_lossy();
482-
483-
let context = &mut *(rdsys::rd_kafka_opaque(client) as *mut C);
484-
context.log(
485-
RDKafkaLogLevel::from_int(level),
486-
fac.trim(),
487-
log_message.trim(),
488-
);
489-
}
490-
491-
pub(crate) unsafe extern "C" fn native_stats_cb<C: ClientContext>(
492-
_conf: *mut RDKafka,
493-
json: *mut c_char,
494-
json_len: usize,
495-
opaque: *mut c_void,
496-
) -> i32 {
497-
let context = &mut *(opaque as *mut C);
498-
context.stats_raw(slice::from_raw_parts(json as *mut u8, json_len));
499-
0 // librdkafka will free the json buffer
500-
}
501-
502-
pub(crate) unsafe extern "C" fn native_error_cb<C: ClientContext>(
503-
_client: *mut RDKafka,
504-
err: i32,
505-
reason: *const c_char,
506-
opaque: *mut c_void,
507-
) {
508-
let err = RDKafkaRespErr::try_from(err).expect("global error not an rd_kafka_resp_err_t");
509-
let error = KafkaError::Global(err.into());
510-
let reason = CStr::from_ptr(reason).to_string_lossy();
511-
512-
let context = &mut *(opaque as *mut C);
513-
context.error(error, reason.trim());
514-
}
515-
516586
/// A generated OAuth token and its associated metadata.
517587
///
518588
/// When using the `OAUTHBEARER` SASL authentication method, this type is
@@ -529,60 +599,6 @@ pub struct OAuthToken {
529599
pub lifetime_ms: i64,
530600
}
531601

532-
pub(crate) unsafe extern "C" fn native_oauth_refresh_cb<C: ClientContext>(
533-
client: *mut RDKafka,
534-
oauthbearer_config: *const c_char,
535-
opaque: *mut c_void,
536-
) {
537-
let res: Result<_, Box<dyn Error>> = (|| {
538-
let context = &mut *(opaque as *mut C);
539-
let oauthbearer_config = match oauthbearer_config.is_null() {
540-
true => None,
541-
false => Some(util::cstr_to_owned(oauthbearer_config)),
542-
};
543-
let token_info = context.generate_oauth_token(oauthbearer_config.as_deref())?;
544-
let token = CString::new(token_info.token)?;
545-
let principal_name = CString::new(token_info.principal_name)?;
546-
Ok((token, principal_name, token_info.lifetime_ms))
547-
})();
548-
match res {
549-
Ok((token, principal_name, lifetime_ms)) => {
550-
let mut err_buf = ErrBuf::new();
551-
let code = rdkafka_sys::rd_kafka_oauthbearer_set_token(
552-
client,
553-
token.as_ptr(),
554-
lifetime_ms,
555-
principal_name.as_ptr(),
556-
ptr::null_mut(),
557-
0,
558-
err_buf.as_mut_ptr(),
559-
err_buf.capacity(),
560-
);
561-
if code == RDKafkaRespErr::RD_KAFKA_RESP_ERR_NO_ERROR {
562-
debug!("successfully set refreshed OAuth token");
563-
} else {
564-
debug!(
565-
"failed to set refreshed OAuth token (code {:?}): {}",
566-
code, err_buf
567-
);
568-
rdkafka_sys::rd_kafka_oauthbearer_set_token_failure(client, err_buf.as_mut_ptr());
569-
}
570-
}
571-
Err(e) => {
572-
debug!("failed to refresh OAuth token: {}", e);
573-
let message = match CString::new(e.to_string()) {
574-
Ok(message) => message,
575-
Err(e) => {
576-
error!("error message generated while refreshing OAuth token has embedded null character: {}", e);
577-
CString::new("error while refreshing OAuth token has embedded null character")
578-
.expect("known to be a valid CString")
579-
}
580-
};
581-
rdkafka_sys::rd_kafka_oauthbearer_set_token_failure(client, message.as_ptr());
582-
}
583-
}
584-
}
585-
586602
#[cfg(test)]
587603
mod tests {
588604
// Just call everything to test there no panics by default, behavior

0 commit comments

Comments
 (0)