diff --git a/src/cli.rs b/src/cli.rs index dde71420..9a43e77a 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -843,6 +843,42 @@ pub fn cli() -> Command { .help("Set timeout for download in seconds") .value_parser(clap::value_parser!(u64)) ) + + .arg(Arg::new("recursive") + .action(ArgAction::SetTrue) + .required(false) + .long("recursive") + .help("Download the sources and all the dependency sources") + ) + + .arg(Arg::new("image") + .required(false) + .value_name("IMAGE NAME") + .short('I') + .long("image") + .help("Name of the Docker image to use") + .long_help(indoc::indoc!(r#" + Name of the Docker image to use. + + Required because tree might look different on different images because of + conditions on dependencies. + "#)) + ) + + .arg(Arg::new("env") + .required(false) + .action(ArgAction::Append) + .short('E') + .long("env") + .value_parser(env_pass_validator) + .help("Additional env to be passed when building packages") + .long_help(indoc::indoc!(r#" + Additional env to be passed when building packages. + + Required because tree might look different on different images because of + conditions on dependencies. + "#)) + ) ) .subcommand(Command::new("of") .about("Get the paths of the sources of a package") diff --git a/src/commands/source/download.rs b/src/commands/source/download.rs index 73244eac..ed21d632 100644 --- a/src/commands/source/download.rs +++ b/src/commands/source/download.rs @@ -8,30 +8,52 @@ // SPDX-License-Identifier: EPL-2.0 // +use std::collections::HashSet; use std::concat; +use std::fmt; use std::path::PathBuf; use std::sync::Arc; -use anyhow::anyhow; -use anyhow::Context; -use anyhow::Error; -use anyhow::Result; +use anyhow::{anyhow, Context, Error, Result}; +use ascii_table::{Align, AsciiTable}; use clap::ArgMatches; +use futures::stream::{FuturesOrdered, StreamExt}; +use regex::Regex; use tokio::io::AsyncWriteExt; -use tokio::sync::Mutex; -use tokio_stream::StreamExt; -use tracing::{info, trace, warn}; +use tokio::sync::{Mutex, Semaphore}; +use tracing::{debug, error, info, trace, warn}; -use crate::config::*; -use crate::package::PackageName; -use crate::package::PackageVersionConstraint; +use crate::config::Configuration; +use crate::package::condition::ConditionData; +use crate::package::{Dag, Package, PackageName, PackageVersionConstraint}; use crate::repository::Repository; -use crate::source::*; +use crate::source::{SourceCache, SourceEntry}; +use crate::util::docker::ImageNameLookup; use crate::util::progress::ProgressBars; +use crate::util::EnvironmentVariableName; const NUMBER_OF_MAX_CONCURRENT_DOWNLOADS: usize = 100; const APP_USER_AGENT: &str = concat! {env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION")}; +#[derive(Clone, Debug)] +enum DownloadResult { + Forced, + Skipped, + Succeeded, + MarkedManual, +} + +impl fmt::Display for DownloadResult { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + DownloadResult::Forced => write!(f, "forced"), + DownloadResult::Skipped => write!(f, "skipped"), + DownloadResult::Succeeded => write!(f, "succeeded"), + DownloadResult::MarkedManual => write!(f, "marked manual"), + } + } +} + /// A wrapper around the indicatif::ProgressBar /// /// A wrapper around the indicatif::ProgressBar that is used to synchronize status information from @@ -63,28 +85,28 @@ impl ProgressWrapper { async fn inc_download_count(&mut self) { self.download_count += 1; - self.set_message().await; + self.show_progress().await; let bar = self.bar.lock().await; bar.inc_length(1); } async fn inc_download_bytes(&mut self, bytes: u64) { self.sum_bytes += bytes; - self.set_message().await; + self.show_progress().await; } async fn finish_one_download(&mut self) { self.finished_downloads += 1; self.bar.lock().await.inc(1); - self.set_message().await; + self.show_progress().await; } async fn add_bytes(&mut self, len: usize) { self.current_bytes += len; - self.set_message().await; + self.show_progress().await; } - async fn set_message(&self) { + async fn show_progress(&self) { let bar = self.bar.lock().await; bar.set_message(format!("Downloading ({current_bytes}/{sum_bytes} bytes, {dlfinished}/{dlsum} downloads finished)", current_bytes = self.current_bytes, @@ -92,22 +114,12 @@ impl ProgressWrapper { dlfinished = self.finished_downloads, dlsum = self.download_count)); } +} - async fn success(&self) { - let bar = self.bar.lock().await; - bar.finish_with_message(format!( - "Succeeded {}/{} downloads", - self.finished_downloads, self.download_count - )); - } - - async fn error(&self) { - let bar = self.bar.lock().await; - bar.finish_with_message(format!( - "At least one download of {} failed", - self.download_count - )); - } +struct DownloadJob { + package: Package, + source_entry: SourceEntry, + download_result: Result, } async fn perform_download( @@ -197,6 +209,77 @@ async fn perform_download( file.flush().await.map_err(Error::from).map(|_| ()) } +async fn download_source_entry( + source: &SourceEntry, + download_sema: Arc, + progressbar: Arc>, + force: bool, + timeout: Option, +) -> Result { + let source_path_exists = source.path().exists(); + if !source_path_exists && source.download_manually() { + return Ok(DownloadResult::MarkedManual); + } + + if source_path_exists && !force { + info!("Source already exists: {}", source.path().display()); + return Ok(DownloadResult::Skipped); + } + + { + if source_path_exists && force { + source.remove_file().await?; + } + + // perform the download + progressbar.lock().await.inc_download_count().await; + { + let permit = download_sema.acquire_owned().await?; + perform_download(source, progressbar.clone(), timeout).await?; + drop(permit); + } + progressbar.lock().await.finish_one_download().await; + + if source_path_exists && force { + Ok(DownloadResult::Forced) + } else { + Ok(DownloadResult::Succeeded) + } + } +} + +fn find_packages( + repo: &Repository, + pname: Option, + pvers: Option, + matching_regexp: Option, +) -> Result, anyhow::Error> { + let packages: Vec<&Package> = repo.packages() + .filter(|p| { + match (pname.as_ref(), pvers.as_ref(), matching_regexp.as_ref()) { + (None, None, None) => true, + (Some(pname), None, None) => p.name() == pname, + (Some(pname), Some(vers), None) => p.name() == pname && vers.matches(p.version()), + (None, None, Some(regex)) => regex.is_match(p.name()), + (_, _, _) => { + panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex.") + }, + } + }) + .collect(); + + if packages.is_empty() { + return match (pname, pvers, matching_regexp) { + (Some(pname), None, None) => Err(anyhow!("{} not found", pname)), + (Some(pname), Some(vers), None) => Err(anyhow!("{} {} not found", pname, vers)), + (None, None, Some(regex)) => Err(anyhow!("{} regex not found", regex)), + (_, _, _) => panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex."), + }; + } + + Ok(packages) +} + // Implementation of the 'source download' subcommand pub async fn download( matches: &ArgMatches, @@ -205,6 +288,7 @@ pub async fn download( progressbars: ProgressBars, ) -> Result<()> { let force = matches.get_flag("force"); + let recursive = matches.get_flag("recursive"); let timeout = matches.get_one::("timeout").copied(); let cache = PathBuf::from(config.source_cache_root()); let sc = SourceCache::new(cache); @@ -229,88 +313,160 @@ pub async fn download( NUMBER_OF_MAX_CONCURRENT_DOWNLOADS, )); - let mut r = repo.packages() - .filter(|p| { - match (pname.as_ref(), pvers.as_ref(), matching_regexp.as_ref()) { - (None, None, None) => true, - (Some(pname), None, None) => p.name() == pname, - (Some(pname), Some(vers), None) => p.name() == pname && vers.matches(p.version()), - (None, None, Some(regex)) => regex.is_match(p.name()), - - (_, _, _) => { - panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex.") - }, - } - }).peekable(); - - // check if the iterator is empty - if r.peek().is_none() { - let pname = matches.get_one::("package_name"); - let pvers = matches.get_one::("package_version"); - let matching_regexp = matches.get_one::("matching"); - - match (pname, pvers, matching_regexp) { - (Some(pname), None, None) => return Err(anyhow!("{} not found", pname)), - (Some(pname), Some(vers), None) => return Err(anyhow!("{} {} not found", pname, vers)), - (None, None, Some(regex)) => return Err(anyhow!("{} regex not found", regex)), - - (_, _, _) => { - panic!("This should not be possible, either we select packages by name and (optionally) version, or by regex.") - } + let found_packages = find_packages(&repo, pname, pvers, matching_regexp)?; + + let packages_to_download: HashSet = match recursive { + true => { + debug!("Finding package dependencies recursively"); + + let image_name_lookup = ImageNameLookup::create(config.docker().images())?; + let image_name = matches + .get_one::("image") + .map(|s| image_name_lookup.expand(s)) + .transpose()?; + + let additional_env = matches + .get_many::("env") + .unwrap_or_default() + .map(AsRef::as_ref) + .map(crate::util::env::parse_to_env) + .collect::>>()?; + + let condition_data = ConditionData { + image_name: image_name.as_ref(), + env: &additional_env, + }; + + let dependencies: Vec = found_packages + .iter() + .flat_map(|package| { + Dag::for_root_package((*package).clone(), &repo, None, &condition_data) + .map(|d| d.dag().graph().node_weights().cloned().collect::>()) + .unwrap() + }) + .collect(); + + HashSet::from_iter(dependencies) } - } + false => HashSet::from_iter(found_packages.into_iter().cloned()), + }; - let r = r + let download_results: Vec<(SourceEntry, Result)> = packages_to_download + .iter() .flat_map(|p| { + //download the sources and wait for all packages to finish sc.sources_for(p).into_iter().map(|source| { let download_sema = download_sema.clone(); let progressbar = progressbar.clone(); async move { - let source_path_exists = source.path().exists(); - if !source_path_exists && source.download_manually() { - return Err(anyhow!( - "Cannot download source that is marked for manual download" - )) - .context(anyhow!("Creating source: {}", source.path().display())) - .context(anyhow!("Downloading source: {}", source.url())) - .map_err(Error::from); - } - - if source_path_exists && !force { - Err(anyhow!("Source exists: {}", source.path().display())) - } else { - if source_path_exists - /* && force is implied by 'if' above*/ - { - source.remove_file().await?; - } - - progressbar.lock().await.inc_download_count().await; - { - let permit = download_sema.acquire_owned().await?; - perform_download(&source, progressbar.clone(), timeout).await?; - drop(permit); - } - progressbar.lock().await.finish_one_download().await; - Ok(()) - } + let download_result = + download_source_entry(&source, download_sema, progressbar, force, timeout) + .await; + (source, download_result) } }) }) - .collect::>() - .collect::>>() - .await + .collect::>() + .collect() + .await; + + let mut r: Vec = download_results .into_iter() - .collect::>(); + .zip(packages_to_download) + .map(|r| { + let download_result = r.0; + let package = r.1; + DownloadJob { + package, + source_entry: download_result.0, + download_result: download_result.1, + } + }) + .collect(); + + { + let mut ascii_table = AsciiTable::default(); + ascii_table.set_max_width( + terminal_size::terminal_size() + .map(|tpl| tpl.0 .0 as usize) + .unwrap_or(80), + ); + ascii_table.column(0).set_header("#").set_align(Align::Left); + ascii_table + .column(1) + .set_header("Package name") + .set_align(Align::Left); + ascii_table + .column(2) + .set_header("Version") + .set_align(Align::Left); + ascii_table + .column(3) + .set_header("Source name") + .set_align(Align::Left); + ascii_table + .column(4) + .set_header("Status") + .set_align(Align::Left); + ascii_table + .column(5) + .set_header("Path") + .set_align(Align::Left); + + let numbers: Vec = (0..r.len()).map(|n| n + 1).collect(); + r.sort_by(|a, b| { + a.source_entry + .package_name() + .partial_cmp(b.source_entry.package_name()) + .unwrap() + }); + let source_paths: Vec = r.iter().map(|v| v.source_entry.path_as_string()).collect(); + + let data: Vec> = r + .iter() + .enumerate() + .map(|(i, v)| { + debug!("source_entry: {:#?}", v.source_entry); + let n = &numbers[i]; + let mut row: Vec<&dyn fmt::Display> = vec![ + n, + v.source_entry.package_name(), + v.source_entry.package_version(), + v.source_entry.package_source_name(), + ]; + if v.download_result.is_ok() { + let result = v.download_result.as_ref().unwrap() as &dyn fmt::Display; + row.push(result); + } else { + row.push(&"failed"); + } + row.push(&source_paths[i]); + row + }) + .collect(); - if r.is_err() { - progressbar.lock().await.error().await; - return r; - } else { - progressbar.lock().await.success().await; + ascii_table.print(data); + } + + for p in &r { + if p.download_result.is_err() { + error!("{}: {:?}", p.source_entry.package_name(), p.download_result); + } } - super::verify(matches, config, repo, progressbars).await?; + let packages_to_verify = r.iter().filter_map(|j| { + if let Ok(r) = &j.download_result { + match r { + DownloadResult::MarkedManual => None, + _ => Some(&j.package), + } + } else { + None + } + }); + + let sc = SourceCache::new(config.source_cache_root().clone()); + super::verify_impl(packages_to_verify, &sc, &progressbars).await?; Ok(()) } diff --git a/src/source/mod.rs b/src/source/mod.rs index 8e1fa830..8e9306ed 100644 --- a/src/source/mod.rs +++ b/src/source/mod.rs @@ -14,6 +14,7 @@ use anyhow::anyhow; use anyhow::Context; use anyhow::Error; use anyhow::Result; +use getset::Getters; use tracing::trace; use url::Url; @@ -37,11 +38,14 @@ impl SourceCache { } } -#[derive(Debug)] +#[derive(Debug, Getters)] pub struct SourceEntry { cache_root: PathBuf, + #[getset(get = "pub")] package_name: PackageName, + #[getset(get = "pub")] package_version: PackageVersion, + #[getset(get = "pub")] package_source_name: String, package_source: Source, } @@ -73,6 +77,10 @@ impl SourceEntry { }) } + pub fn path_as_string(&self) -> String { + self.path().to_string_lossy().to_string() + } + pub fn url(&self) -> &Url { self.package_source.url() }