Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ prost = "0.14"
prost-types = "0.14"
rand = "0.8.5"
rdkafka = { version = "0.37.0", features = ["cmake-build", "ssl"] }
rmp-serde = "1.3"
sentry = { version = "0.41.0", default-features = false, features = [
# default features, except `release-health` is disabled
"backtrace",
Expand Down
21 changes: 21 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,22 @@ pub struct Config {

/// Maps every application to its worker endpoint, both represented as strings.
pub worker_map: BTreeMap<String, String>,

/// Enable passthrough mode for consuming raw bytes from legacy topics.
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
/// Enable passthrough mode for consuming raw bytes from legacy topics.
/// Enable passthrough mode for consuming raw bytes from raw topics.

/// In passthrough mode, raw Kafka message bytes are wrapped into TaskActivation.
pub passthrough_mode: bool,

/// The namespace to assign to passthrough activations.
pub passthrough_namespace: Option<String>,

/// The application to assign to passthrough activations.
pub passthrough_application: Option<String>,

/// The taskname to assign to passthrough activations.
pub passthrough_taskname: Option<String>,

/// Processing deadline duration in seconds for passthrough activations.
pub passthrough_processing_deadline_duration: u64,
}

impl Default for Config {
Expand Down Expand Up @@ -386,6 +402,11 @@ impl Default for Config {
callback_addr: "0.0.0.0".into(),
callback_port: 50051,
worker_map: [("sentry".into(), "http://127.0.0.1:50052".into())].into(),
passthrough_mode: false,
passthrough_namespace: None,
passthrough_application: None,
passthrough_taskname: None,
passthrough_processing_deadline_duration: 30,
}
}
}
Expand Down
42 changes: 42 additions & 0 deletions src/kafka/deserialize.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use std::sync::Arc;

use anyhow::Error;
use rdkafka::message::OwnedMessage;

use crate::config::Config;
use crate::store::activation::InflightActivation;

use super::deserialize_activation::{self, DeserializeActivationConfig};
use super::deserialize_passthrough::{self, PassthroughConfig};

pub struct DeserializeConfig {
activation_config: DeserializeActivationConfig,
passthrough_config: Option<PassthroughConfig>,
}

impl DeserializeConfig {
pub fn from_config(config: &Config) -> Self {
Self {
activation_config: DeserializeActivationConfig::from_config(config),
passthrough_config: PassthroughConfig::from_config(config),
}
}
}

/// Create a unified deserializer that handles both normal and passthrough modes.
/// In passthrough mode, raw Kafka bytes are wrapped into a TaskActivation.
/// In normal mode, Kafka messages are expected to contain encoded TaskActivation protos.
pub fn new(
config: DeserializeConfig,
) -> impl Fn(Arc<OwnedMessage>) -> Result<InflightActivation, Error> {
let passthrough_deserializer = config.passthrough_config.map(deserialize_passthrough::new);
let activation_deserializer = deserialize_activation::new(config.activation_config);

move |msg: Arc<OwnedMessage>| {
if let Some(ref pt_deserializer) = passthrough_deserializer {
pt_deserializer(msg)
} else {
activation_deserializer(msg)
}
}
}
227 changes: 227 additions & 0 deletions src/kafka/deserialize_passthrough.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,227 @@
use std::collections::HashMap;
use std::sync::Arc;

use anyhow::{Error, anyhow};
use chrono::Utc;
use prost::Message as _;
use rdkafka::Message;
use rdkafka::message::OwnedMessage;
use sentry_protos::taskbroker::v1::{OnAttemptsExceeded, TaskActivation};
use uuid::Uuid;

use crate::config::Config;
use crate::store::activation::{InflightActivation, InflightActivationStatus};

use super::deserialize_activation::bucket_from_id;

pub struct PassthroughConfig {
pub namespace: String,
pub application: String,
pub taskname: String,
pub processing_deadline_duration: u64,
}

impl PassthroughConfig {
pub fn from_config(config: &Config) -> Option<Self> {
if !config.passthrough_mode {
return None;
}
Some(Self {
namespace: config
.passthrough_namespace
.clone()
.expect("passthrough_namespace required when passthrough_mode is enabled"),
application: config
.passthrough_application
.clone()
.expect("passthrough_application required when passthrough_mode is enabled"),
taskname: config
.passthrough_taskname
.clone()
.expect("passthrough_taskname required when passthrough_mode is enabled"),
processing_deadline_duration: config.passthrough_processing_deadline_duration,
})
}
}

/// Encode raw bytes into msgpack format: {"args": [raw_bytes], "kwargs": {}}
fn encode_passthrough_params(raw_bytes: &[u8]) -> Result<Vec<u8>, Error> {
use serde::Serialize;

#[derive(Serialize)]
struct Params<'a> {
args: (&'a [u8],),
kwargs: HashMap<(), ()>,
}
Comment on lines +51 to +55
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For my own understanding, why define this struct here vs outside of the function?


let params = Params {
args: (raw_bytes,),
kwargs: HashMap::new(),
};

rmp_serde::to_vec_named(&params).map_err(|e| anyhow!("Failed to encode msgpack: {}", e))
}

/// Create a deserializer closure for passthrough mode.
/// Wraps raw Kafka message bytes into a TaskActivation with msgpack-encoded parameters_bytes.
pub fn new(
config: PassthroughConfig,
) -> impl Fn(Arc<OwnedMessage>) -> Result<InflightActivation, Error> {
move |msg: Arc<OwnedMessage>| {
let Some(payload) = msg.payload() else {
return Err(anyhow!("Message has no payload"));
};

let id = Uuid::new_v4().to_string();
let parameters_bytes = encode_passthrough_params(payload)?;
let now = Utc::now();
let received_at = prost_types::Timestamp {
seconds: now.timestamp(),
nanos: 0,
};

let activation = TaskActivation {
id: id.clone(),
application: Some(config.application.clone()),
namespace: config.namespace.clone(),
taskname: config.taskname.clone(),
#[allow(deprecated)]
parameters: String::new(),
parameters_bytes,
headers: HashMap::new(),
received_at: Some(received_at),
retry_state: None,
processing_deadline_duration: config.processing_deadline_duration,
expires: None,
delay: None,
};

let activation_bytes = activation.encode_to_vec();
let bucket = bucket_from_id(&id);

metrics::histogram!(
"consumer.passthrough.payload_size_bytes",
"namespace" => config.namespace.clone(),
"taskname" => config.taskname.clone()
)
.record(payload.len() as f64);

Ok(InflightActivation {
id,
activation: activation_bytes,
status: InflightActivationStatus::Pending,
partition: msg.partition(),
offset: msg.offset(),
added_at: now,
received_at: now,
processing_deadline: None,
claim_expires_at: None,
processing_deadline_duration: config.processing_deadline_duration as i32,
processing_attempts: 0,
expires_at: None,
delay_until: None,
at_most_once: false,
application: config.application.clone(),
namespace: config.namespace.clone(),
taskname: config.taskname.clone(),
on_attempts_exceeded: OnAttemptsExceeded::Discard,
bucket,
})
}
}

#[cfg(test)]
mod tests {
use std::sync::Arc;

use rdkafka::Timestamp;
use rdkafka::message::OwnedMessage;

use super::*;

#[test]
fn test_encode_passthrough_params() {
use serde::Deserialize;

#[derive(Deserialize, Debug)]
struct Params {
args: (Vec<u8>,),
kwargs: HashMap<(), ()>,
}

let raw_bytes = b"hello world";
let encoded = encode_passthrough_params(raw_bytes).unwrap();

// Decode and verify
let decoded: Params = rmp_serde::from_slice(&encoded).unwrap();
assert_eq!(decoded.args.0, raw_bytes);
assert!(decoded.kwargs.is_empty());
}

#[test]
fn test_passthrough_deserializer() {
let config = PassthroughConfig {
namespace: "test-namespace".to_string(),
application: "test-app".to_string(),
taskname: "test-task".to_string(),
processing_deadline_duration: 60,
};

let deserializer = new(config);

let raw_payload = b"raw kafka message bytes";
let message = OwnedMessage::new(
Some(raw_payload.to_vec()),
None,
"legacy-topic".into(),
Timestamp::now(),
0,
42,
None,
);

let result = deserializer(Arc::new(message));
assert!(result.is_ok());

let inflight = result.unwrap();
assert_eq!(inflight.namespace, "test-namespace");
assert_eq!(inflight.application, "test-app");
assert_eq!(inflight.taskname, "test-task");
assert_eq!(inflight.processing_deadline_duration, 60);
assert_eq!(inflight.offset, 42);
assert_eq!(inflight.status, InflightActivationStatus::Pending);

// Verify the activation can be decoded
let activation = TaskActivation::decode(inflight.activation.as_slice()).unwrap();
assert_eq!(activation.namespace, "test-namespace");
assert_eq!(activation.application, Some("test-app".to_string()));
assert_eq!(activation.taskname, "test-task");
assert!(!activation.parameters_bytes.is_empty());
}

#[test]
fn test_passthrough_deserializer_empty_payload() {
let config = PassthroughConfig {
namespace: "test-namespace".to_string(),
application: "test-app".to_string(),
taskname: "test-task".to_string(),
processing_deadline_duration: 60,
};

let deserializer = new(config);

let message = OwnedMessage::new(
None, // No payload
None,
"legacy-topic".into(),
Timestamp::now(),
0,
0,
None,
);

let result = deserializer(Arc::new(message));
assert!(result.is_err());
assert!(result.unwrap_err().to_string().contains("no payload"));
}
}
2 changes: 2 additions & 0 deletions src/kafka/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
pub mod admin;
pub mod consumer;
pub mod deserialize;
pub mod deserialize_activation;
pub mod deserialize_passthrough;
pub mod inflight_activation_batcher;
pub mod inflight_activation_writer;
pub mod os_stream_writer;
5 changes: 2 additions & 3 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,7 @@ use taskbroker::grpc::metrics_middleware::MetricsLayer;
use taskbroker::grpc::server::TaskbrokerServer;
use taskbroker::kafka::admin::create_missing_topics;
use taskbroker::kafka::consumer::start_consumer;
use taskbroker::kafka::deserialize_activation;
use taskbroker::kafka::deserialize_activation::DeserializeActivationConfig;
use taskbroker::kafka::deserialize::{self, DeserializeConfig};
use taskbroker::kafka::inflight_activation_batcher::{
ActivationBatcherConfig, InflightActivationBatcher,
};
Expand Down Expand Up @@ -174,7 +173,7 @@ async fn main() -> Result<(), Error> {
),

map:
deserialize_activation::new(DeserializeActivationConfig::from_config(&consumer_config)),
deserialize::new(DeserializeConfig::from_config(&consumer_config)),

reduce:
InflightActivationBatcher::new(
Expand Down
Loading