Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 22 additions & 21 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ nix = { version = "0.29", features = ["ioctl"] }
# Enable vendored OpenSSL for cross-compilation to musl targets
# This ensures OpenSSL builds from source with musl compatibility
openssl = { version = "0.10", features = ["vendored"] }
xz2 = "0.1"
liblzma = { version = "0.4", features = ["parallel"] }

[dev-dependencies]
http-body = "1.0.1"
Expand Down
77 changes: 77 additions & 0 deletions src/fls/decompress.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
use crate::fls::byte_channel::ByteBoundedReceiver;
use crate::fls::compression::Compression;
use crate::fls::stream_utils::ChannelReader;
use bytes::Bytes;
use std::io::Read;
use tokio::io::AsyncReadExt;
use tokio::process::{Child, Command};
use tokio::sync::mpsc;
Expand Down Expand Up @@ -75,6 +80,78 @@ fn spawn_decompressor(
Ok((process, cmd))
}

pub(crate) fn get_compression_from_url(url: &str) -> Compression {
let path = url.split('?').next().unwrap_or(url);
let path = path.split('#').next().unwrap_or(path);
let extension = path.rsplit('.').next().unwrap_or("").to_lowercase();
match extension.as_str() {
"gz" => Compression::Gzip,
"xz" => Compression::Xz,
"zst" | "zstd" => Compression::Zstd,
_ => Compression::None,
}
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}

type DecompressorResult = (
mpsc::Receiver<Vec<u8>>,
std::thread::JoinHandle<Result<(), String>>,
);

pub(crate) fn start_inprocess_decompressor(
buffer_rx: ByteBoundedReceiver<Bytes>,
compression: Compression,
consumed_progress_tx: mpsc::UnboundedSender<u64>,
) -> Result<DecompressorResult, Box<dyn std::error::Error>> {
let (decompressed_tx, decompressed_rx) = mpsc::channel::<Vec<u8>>(8);

let handle = std::thread::Builder::new()
.name("decompressor".to_string())
.spawn(move || {
let channel_reader =
ChannelReader::new_byte_bounded(buffer_rx).with_progress(consumed_progress_tx);

let mut decoder: Box<dyn Read + Send> = match compression {
Compression::Xz => {
let num_threads = std::thread::available_parallelism()
.map(|n| n.get() as u32)
.unwrap_or(2);
eprintln!("XZ decompression: using {} threads", num_threads);
let stream = liblzma::stream::MtStreamBuilder::new()
.threads(num_threads)
.memlimit_threading(u64::MAX)
.memlimit_stop(u64::MAX)
.decoder()
.map_err(|e| format!("Failed to create MT XZ decoder: {}", e))?;
Box::new(liblzma::read::XzDecoder::new_stream(channel_reader, stream))
Comment thread
coderabbitai[bot] marked this conversation as resolved.
}
Compression::Gzip => Box::new(flate2::read::GzDecoder::new(channel_reader)),
Compression::None => Box::new(channel_reader),
Compression::Zstd => {
return Err("Zstd in-process decompression is not supported".to_string());
}
};

let mut buf = vec![0u8; 8 * 1024 * 1024];
loop {
let n = decoder
.read(&mut buf)
.map_err(|e| format!("Decompression error: {}", e))?;
if n == 0 {
break;
}
if decompressed_tx.blocking_send(buf[..n].to_vec()).is_err() {
return Err("Writer task closed, stopping decompression".to_string());
}
}
Ok(())
})
.map_err(|e| -> Box<dyn std::error::Error> {
format!("Failed to spawn decompressor thread: {}", e).into()
})?;

Ok((decompressed_rx, handle))
}

pub(crate) async fn spawn_stderr_reader(
mut stderr: tokio::process::ChildStderr,
error_tx: mpsc::UnboundedSender<String>,
Expand Down
Loading
Loading