From 6a247b05bc3e6a7f79a2d7ceec708cf80f3f575f Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Tue, 26 Nov 2024 11:23:31 +0100 Subject: [PATCH 01/10] feat: add max_solve_group to the global and cli configuration and let it trickle down to the update builder --- crates/pixi_config/src/lib.rs | 36 +++++++++++++++++++ .../pixi_config__tests__config_merge.snap | 2 ++ src/cli/install.rs | 9 +++-- src/cli/list.rs | 2 +- src/cli/project/channel/add.rs | 14 +++++--- src/cli/project/channel/list.rs | 11 ++++-- src/cli/project/channel/mod.rs | 31 +++++++--------- src/cli/project/channel/remove.rs | 13 ++++--- src/cli/project/export/conda_explicit_spec.rs | 2 +- src/cli/project/platform/add.rs | 9 +++-- src/cli/project/platform/remove.rs | 9 +++-- src/cli/remove.rs | 9 +++-- src/cli/run.rs | 1 + src/cli/shell.rs | 9 +++-- src/cli/shell_hook.rs | 9 +++-- src/cli/tree.rs | 2 +- src/cli/update.rs | 7 ++++ src/environment.rs | 8 ++--- src/lock_file/mod.rs | 2 +- src/lock_file/update.rs | 2 +- tests/integration_rust/common/builders.rs | 3 -- tests/integration_rust/common/mod.rs | 23 ++++++++++-- tests/integration_rust/install_tests.rs | 9 +++-- 23 files changed, 156 insertions(+), 66 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index a2f881098..439750488 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -116,6 +116,10 @@ pub struct ConfigCli { /// Specifies if we want to use uv keyring provider #[arg(long)] pypi_keyring_provider: Option, + + /// Max concurrent solves, default is the number of CPUs + #[arg(long, short = 'j', visible_alias = "solve-jobs")] + pub max_concurrent_solves: Option, } #[derive(Parser, Debug, Clone, Default)] @@ -549,6 +553,10 @@ pub struct Config { #[serde(default)] #[serde(skip_serializing_if = "ExperimentalConfig::is_default")] pub experimental: ExperimentalConfig, + + /// Max concurrent solves, defaults + #[serde(skip_serializing_if = "Option::is_none")] + pub max_concurrent_solves: Option, } impl Default for Config { @@ -567,6 +575,7 @@ impl Default for Config { pinning_strategy: Default::default(), force_activate: None, experimental: Default::default(), + max_concurrent_solves: None, } } } @@ -581,6 +590,7 @@ impl From for Config { .map(|val| PyPIConfig::default().with_keyring(val)) .unwrap_or_default(), detached_environments: None, + max_concurrent_solves: cli.max_concurrent_solves, ..Default::default() } } @@ -798,6 +808,7 @@ impl Config { "mirrors", "detached-environments", "pinning-strategy", + "max-concurrent-solves", "repodata-config", "repodata-config.disable-jlap", "repodata-config.disable-bzip2", @@ -838,6 +849,7 @@ impl Config { pinning_strategy: other.pinning_strategy.or(self.pinning_strategy), force_activate: other.force_activate, experimental: other.experimental.merge(self.experimental), + max_concurrent_solves: other.max_concurrent_solves.or(self.max_concurrent_solves), } } @@ -903,6 +915,11 @@ impl Config { self.experimental.use_environment_activation_cache() } + /// Retrieve the value for the max_concurrent_solves field. + pub fn max_concurrent_solves(&self) -> Option { + self.max_concurrent_solves + } + /// Modify this config with the given key and value /// /// # Note @@ -1049,6 +1066,10 @@ impl Config { _ => return Err(err), } } + "max-concurrent-solves" => { + self.max_concurrent_solves = + value.map(|v| v.parse()).transpose().into_diagnostic()?; + } _ => return Err(err), } @@ -1131,6 +1152,7 @@ mod tests { tls-no-verify = true detached-environments = "{}" pinning-strategy = "no-pin" +max-concurrent-solves = 5 UNUSED = "unused" "#, env!("CARGO_MANIFEST_DIR").replace('\\', "\\\\").as_str() @@ -1145,6 +1167,7 @@ UNUSED = "unused" config.detached_environments().path().unwrap(), Some(PathBuf::from(env!("CARGO_MANIFEST_DIR"))) ); + assert_eq!(config.max_concurrent_solves(), Some(5)); assert!(unused.contains("UNUSED")); let toml = r"detached-environments = true"; @@ -1177,6 +1200,7 @@ UNUSED = "unused" tls_no_verify: true, auth_file: None, pypi_keyring_provider: Some(KeyringProvider::Subprocess), + max_concurrent_solves: None, }; let config = Config::from(cli); assert_eq!(config.tls_no_verify, Some(true)); @@ -1189,6 +1213,7 @@ UNUSED = "unused" tls_no_verify: false, auth_file: Some(PathBuf::from("path.json")), pypi_keyring_provider: None, + max_concurrent_solves: None, }; let config = Config::from(cli); @@ -1228,6 +1253,7 @@ UNUSED = "unused" channel_config: ChannelConfig::default_with_root_dir(PathBuf::from("/root/dir")), tls_no_verify: Some(true), detached_environments: Some(DetachedEnvironments::Path(PathBuf::from("/path/to/envs"))), + max_concurrent_solves: Some(5), ..Default::default() }; config = config.merge_config(other); @@ -1261,6 +1287,7 @@ UNUSED = "unused" config.detached_environments().path().unwrap(), Some(PathBuf::from("/path/to/envs2")) ); + assert_eq!(config.max_concurrent_solves(), Some(5)); let d = Path::new(&env!("CARGO_MANIFEST_DIR")) .join("tests") @@ -1436,6 +1463,15 @@ UNUSED = "unused" config.set("change-ps1", None).unwrap(); assert_eq!(config.change_ps1, None); + config + .set("max-concurrent-solves", Some("10".to_string())) + .unwrap(); + assert_eq!(config.max_concurrent_solves(), Some(10)); + config + .set("max-concurrent-solves", Some("1".to_string())) + .unwrap(); + assert_eq!(config.max_concurrent_solves(), Some(1)); + config.set("unknown-key", None).unwrap_err(); } diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap index 91d5b26d6..b01964d36 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap @@ -1,6 +1,7 @@ --- source: crates/pixi_config/src/lib.rs expression: debug +snapshot_kind: text --- Config { default_channels: [ @@ -72,4 +73,5 @@ Config { experimental: ExperimentalConfig { use_environment_activation_cache: None, }, + max_concurrent_solves: None, } diff --git a/src/cli/install.rs b/src/cli/install.rs index b3299d424..a5a0176c0 100644 --- a/src/cli/install.rs +++ b/src/cli/install.rs @@ -1,7 +1,7 @@ use crate::cli::cli_config::ProjectConfig; use crate::environment::get_update_lock_file_and_prefix; use crate::lock_file::UpdateMode; -use crate::Project; +use crate::{Project, UpdateLockFileOptions}; use clap::Parser; use fancy_display::FancyDisplay; use itertools::Itertools; @@ -55,9 +55,12 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Update the prefix by installing all packages get_update_lock_file_and_prefix( &environment, - args.lock_file_usage.into(), - false, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: args.lock_file_usage.into(), + no_install: false, + max_concurrent_solves: project.config().max_concurrent_solves, + }, ) .await?; diff --git a/src/cli/list.rs b/src/cli/list.rs index 0d1d987fc..e7d2d11ff 100644 --- a/src/cli/list.rs +++ b/src/cli/list.rs @@ -149,7 +149,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - ..UpdateLockFileOptions::default() + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, }) .await?; diff --git a/src/cli/project/channel/add.rs b/src/cli/project/channel/add.rs index e27cff66a..cfe475936 100644 --- a/src/cli/project/channel/add.rs +++ b/src/cli/project/channel/add.rs @@ -1,12 +1,15 @@ use crate::{ environment::{get_update_lock_file_and_prefix, LockFileUsage}, lock_file::UpdateMode, - Project, + Project, UpdateLockFileOptions, }; use super::AddRemoveArgs; -pub async fn execute(mut project: Project, args: AddRemoveArgs) -> miette::Result<()> { +pub async fn execute(args: AddRemoveArgs) -> miette::Result<()> { + let mut project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())? + .with_cli_config(args.clone().prefix_update_config.config); + // Add the channels to the manifest project.manifest.add_channels( args.prioritized_channels(), @@ -17,9 +20,12 @@ pub async fn execute(mut project: Project, args: AddRemoveArgs) -> miette::Resul // TODO: Update all environments touched by the features defined. get_update_lock_file_and_prefix( &project.default_environment(), - LockFileUsage::Update, - args.no_install, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: LockFileUsage::Update, + no_install: args.prefix_update_config.no_install(), + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + }, ) .await?; project.save()?; diff --git a/src/cli/project/channel/list.rs b/src/cli/project/channel/list.rs index 94a1c764b..881ded5f9 100644 --- a/src/cli/project/channel/list.rs +++ b/src/cli/project/channel/list.rs @@ -1,18 +1,23 @@ use clap::Parser; use miette::IntoDiagnostic; -use crate::Project; +use crate::{cli::cli_config::ProjectConfig, Project}; use fancy_display::FancyDisplay; use pixi_manifest::FeaturesExt; -#[derive(Parser, Debug, Default)] +#[derive(Parser, Debug, Default, Clone)] pub struct Args { + #[clap(flatten)] + pub project_config: ProjectConfig, /// Whether to display the channel's names or urls #[clap(long)] pub urls: bool, } -pub(crate) fn execute(project: Project, args: Args) -> miette::Result<()> { +pub(crate) fn execute(args: Args) -> miette::Result<()> { + // Project without cli config as it shouldn't be needed here. + let project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())?; + let channel_config = project.channel_config(); project .environments() diff --git a/src/cli/project/channel/mod.rs b/src/cli/project/channel/mod.rs index 8f9baecf5..d4cf54c82 100644 --- a/src/cli/project/channel/mod.rs +++ b/src/cli/project/channel/mod.rs @@ -2,27 +2,24 @@ pub mod add; pub mod list; pub mod remove; -use crate::Project; +use crate::cli::cli_config::{PrefixUpdateConfig, ProjectConfig}; use clap::Parser; use miette::IntoDiagnostic; use pixi_manifest::{FeatureName, PrioritizedChannel}; use rattler_conda_types::{ChannelConfig, NamedChannelOrUrl}; -use std::path::PathBuf; /// Commands to manage project channels. -#[derive(Parser, Debug)] +#[derive(Parser, Debug, Clone)] pub struct Args { - /// The path to `pixi.toml` or `pyproject.toml` - #[clap(long, global = true)] - pub manifest_path: Option, - /// The subcommand to execute #[clap(subcommand)] pub command: Command, } -#[derive(Parser, Debug, Default)] +#[derive(Parser, Debug, Default, Clone)] pub struct AddRemoveArgs { + #[clap(flatten)] + pub project_config: ProjectConfig, /// The channel name or URL #[clap(required = true, num_args=1..)] pub channel: Vec, @@ -31,14 +28,12 @@ pub struct AddRemoveArgs { #[clap(long, num_args = 1)] pub priority: Option, - /// Add the channel(s) to the beginning of the channels list, making them highest priority + /// Add the channel(s) to the beginning of the channels list, making them the highest priority #[clap(long)] pub prepend: bool, - /// Don't update the environment, only modify the manifest and the - /// lock-file. - #[clap(long)] - pub no_install: bool, + #[clap(flatten)] + pub prefix_update_config: PrefixUpdateConfig, /// The name of the feature to modify. #[clap(long, short)] @@ -91,7 +86,7 @@ impl AddRemoveArgs { } } -#[derive(Parser, Debug)] +#[derive(Parser, Debug, Clone)] pub enum Command { /// Adds a channel to the project file and updates the lockfile. #[clap(visible_alias = "a")] @@ -105,11 +100,9 @@ pub enum Command { } pub async fn execute(args: Args) -> miette::Result<()> { - let project = Project::load_or_else_discover(args.manifest_path.as_deref())?; - match args.command { - Command::Add(args) => add::execute(project, args).await, - Command::List(args) => list::execute(project, args), - Command::Remove(args) => remove::execute(project, args).await, + Command::Add(add_args) => add::execute(add_args).await, + Command::List(args) => list::execute(args), + Command::Remove(remove_args) => remove::execute(remove_args).await, } } diff --git a/src/cli/project/channel/remove.rs b/src/cli/project/channel/remove.rs index 901a98d90..424adad4a 100644 --- a/src/cli/project/channel/remove.rs +++ b/src/cli/project/channel/remove.rs @@ -1,12 +1,14 @@ use crate::lock_file::UpdateMode; use crate::{ environment::{get_update_lock_file_and_prefix, LockFileUsage}, - Project, + Project, UpdateLockFileOptions, }; use super::AddRemoveArgs; -pub async fn execute(mut project: Project, args: AddRemoveArgs) -> miette::Result<()> { +pub async fn execute(args: AddRemoveArgs) -> miette::Result<()> { + let mut project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())? + .with_cli_config(args.clone().prefix_update_config.config); // Remove the channels from the manifest project .manifest @@ -15,9 +17,12 @@ pub async fn execute(mut project: Project, args: AddRemoveArgs) -> miette::Resul // Try to update the lock-file without the removed channels get_update_lock_file_and_prefix( &project.default_environment(), - LockFileUsage::Update, - args.no_install, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: LockFileUsage::Update, + no_install: args.prefix_update_config.no_install(), + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + }, ) .await?; project.save()?; diff --git a/src/cli/project/export/conda_explicit_spec.rs b/src/cli/project/export/conda_explicit_spec.rs index 9176a827a..13af41bd4 100644 --- a/src/cli/project/export/conda_explicit_spec.rs +++ b/src/cli/project/export/conda_explicit_spec.rs @@ -146,7 +146,7 @@ pub async fn execute(project: Project, args: Args) -> miette::Result<()> { .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - ..UpdateLockFileOptions::default() + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, }) .await? .lock_file; diff --git a/src/cli/project/platform/add.rs b/src/cli/project/platform/add.rs index 71ffaa892..29ecfa381 100644 --- a/src/cli/project/platform/add.rs +++ b/src/cli/project/platform/add.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use crate::{ environment::{get_update_lock_file_and_prefix, LockFileUsage}, lock_file::UpdateMode, - Project, + Project, UpdateLockFileOptions, }; use clap::Parser; use miette::IntoDiagnostic; @@ -47,9 +47,12 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { // Try to update the lock-file with the new channels get_update_lock_file_and_prefix( &project.default_environment(), - LockFileUsage::Update, - args.no_install, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: LockFileUsage::Update, + no_install: args.no_install, + max_concurrent_solves: project.config().max_concurrent_solves, + }, ) .await?; project.save()?; diff --git a/src/cli/project/platform/remove.rs b/src/cli/project/platform/remove.rs index a5d24c9ea..69e5f7633 100644 --- a/src/cli/project/platform/remove.rs +++ b/src/cli/project/platform/remove.rs @@ -3,7 +3,7 @@ use std::str::FromStr; use crate::lock_file::UpdateMode; use crate::{ environment::{get_update_lock_file_and_prefix, LockFileUsage}, - Project, + Project, UpdateLockFileOptions, }; use clap::Parser; use miette::IntoDiagnostic; @@ -46,9 +46,12 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { get_update_lock_file_and_prefix( &project.default_environment(), - LockFileUsage::Update, - args.no_install, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: LockFileUsage::Update, + no_install: args.no_install, + max_concurrent_solves: project.config().max_concurrent_solves, + }, ) .await?; project.save()?; diff --git a/src/cli/remove.rs b/src/cli/remove.rs index 189057e3b..cbea3fdd7 100644 --- a/src/cli/remove.rs +++ b/src/cli/remove.rs @@ -2,8 +2,8 @@ use clap::Parser; use miette::Context; use crate::environment::get_update_lock_file_and_prefix; -use crate::DependencyType; use crate::Project; +use crate::{DependencyType, UpdateLockFileOptions}; use crate::cli::cli_config::{DependencyConfig, PrefixUpdateConfig, ProjectConfig}; use crate::lock_file::UpdateMode; @@ -81,9 +81,12 @@ pub async fn execute(args: Args) -> miette::Result<()> { if !prefix_update_config.no_lockfile_update { get_update_lock_file_and_prefix( &project.default_environment(), - prefix_update_config.lock_file_usage(), - prefix_update_config.no_install, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: prefix_update_config.lock_file_usage(), + no_install: prefix_update_config.no_install, + max_concurrent_solves: prefix_update_config.config.max_concurrent_solves, + }, ) .await?; } diff --git a/src/cli/run.rs b/src/cli/run.rs index 5bfee8eb7..c31649958 100644 --- a/src/cli/run.rs +++ b/src/cli/run.rs @@ -93,6 +93,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { let mut lock_file = project .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, ..UpdateLockFileOptions::default() }) .await?; diff --git a/src/cli/shell.rs b/src/cli/shell.rs index 7f8f22496..37b31da79 100644 --- a/src/cli/shell.rs +++ b/src/cli/shell.rs @@ -13,7 +13,7 @@ use crate::lock_file::UpdateMode; use crate::{ activation::CurrentEnvVarBehavior, environment::get_update_lock_file_and_prefix, project::virtual_packages::verify_current_platform_has_required_virtual_packages, prompt, - Project, + Project, UpdateLockFileOptions, }; use pixi_config::{ConfigCliActivation, ConfigCliPrompt}; use pixi_manifest::EnvironmentName; @@ -246,9 +246,12 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Make sure environment is up-to-date, default to install, users can avoid this with frozen or locked. let (lock_file_data, _prefix) = get_update_lock_file_and_prefix( &environment, - args.prefix_update_config.lock_file_usage(), - false, UpdateMode::QuickValidate, + UpdateLockFileOptions { + lock_file_usage: args.prefix_update_config.lock_file_usage(), + no_install: args.prefix_update_config.no_install(), + max_concurrent_solves: project.config().max_concurrent_solves, + }, ) .await?; diff --git a/src/cli/shell_hook.rs b/src/cli/shell_hook.rs index 0f663981c..0494b036e 100644 --- a/src/cli/shell_hook.rs +++ b/src/cli/shell_hook.rs @@ -17,7 +17,7 @@ use crate::{ activation::get_activator, cli::cli_config::{PrefixUpdateConfig, ProjectConfig}, project::{Environment, HasProjectRef}, - Project, + Project, UpdateLockFileOptions, }; /// Print the pixi environment activation script. @@ -127,9 +127,12 @@ pub async fn execute(args: Args) -> miette::Result<()> { let (lock_file_data, _prefix) = get_update_lock_file_and_prefix( &environment, - args.prefix_update_config.lock_file_usage(), - false, args.prefix_update_config.update_mode(), + UpdateLockFileOptions { + lock_file_usage: args.prefix_update_config.lock_file_usage(), + no_install: args.prefix_update_config.no_install(), + max_concurrent_solves: project.config().max_concurrent_solves, + }, ) .await?; diff --git a/src/cli/tree.rs b/src/cli/tree.rs index 141252215..6ee002167 100644 --- a/src/cli/tree.rs +++ b/src/cli/tree.rs @@ -81,7 +81,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - ..UpdateLockFileOptions::default() + max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, }) .await .wrap_err("Failed to update lock file")?; diff --git a/src/cli/update.rs b/src/cli/update.rs index e10ecaf3a..f5719f1cf 100644 --- a/src/cli/update.rs +++ b/src/cli/update.rs @@ -2,6 +2,7 @@ use std::{cmp::Ordering, collections::HashSet}; use fancy_display::FancyDisplay; +use crate::lock_file::default_max_concurrent_solves; use crate::{ cli::cli_config::ProjectConfig, diff::{LockFileDiff, LockFileJsonDiff}, @@ -155,10 +156,16 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Unlock dependencies in the lock-file that we want to update. let relaxed_lock_file = unlock_packages(&project, &loaded_lock_file, &specs); + let max_concurrent_solves = project + .config() + .max_concurrent_solves() + .unwrap_or_else(default_max_concurrent_solves); + // Update the packages in the lock-file. let updated_lock_file = UpdateContext::builder(&project) .with_lock_file(relaxed_lock_file.clone()) .with_no_install(args.no_install) + .with_max_concurrent_solves(max_concurrent_solves) .finish()? .update() .await?; diff --git a/src/environment.rs b/src/environment.rs index 2fa369a28..b51ea1e74 100644 --- a/src/environment.rs +++ b/src/environment.rs @@ -391,14 +391,14 @@ impl LockFileUsage { /// `sparse_repo_data` is ignored. pub async fn get_update_lock_file_and_prefix<'env>( environment: &Environment<'env>, - lock_file_usage: LockFileUsage, - mut no_install: bool, update_mode: UpdateMode, + update_lock_file_options: UpdateLockFileOptions, ) -> miette::Result<(LockFileDerivedData<'env>, Prefix)> { let current_platform = environment.best_platform(); let project = environment.project(); // Do not install if the platform is not supported + let mut no_install = update_lock_file_options.no_install; if !no_install && !environment.platforms().contains(¤t_platform) { tracing::warn!("Not installing dependency on current platform: ({current_platform}) as it is not part of this project's supported platforms."); no_install = true; @@ -410,9 +410,9 @@ pub async fn get_update_lock_file_and_prefix<'env>( // Ensure that the lock-file is up-to-date let mut lock_file = project .update_lock_file(UpdateLockFileOptions { - lock_file_usage, + lock_file_usage: update_lock_file_options.lock_file_usage, no_install, - ..UpdateLockFileOptions::default() + max_concurrent_solves: update_lock_file_options.max_concurrent_solves, }) .await?; diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index c70aca2e0..39f786430 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -19,8 +19,8 @@ pub use satisfiability::{ verify_environment_satisfiability, verify_platform_satisfiability, EnvironmentUnsat, PlatformUnsat, }; +pub use update::{default_max_concurrent_solves, UpdateLockFileOptions, UpdateMode}; pub(crate) use update::{LockFileDerivedData, UpdateContext}; -pub use update::{UpdateLockFileOptions, UpdateMode}; pub(crate) use utils::filter_lock_file; use crate::Project; diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index dc3e28efa..9a9f9a31c 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -592,7 +592,7 @@ impl<'p> UpdateContext<'p> { } /// Returns the default number of concurrent solves. -fn default_max_concurrent_solves() -> usize { +pub fn default_max_concurrent_solves() -> usize { std::thread::available_parallelism().map_or(1, |n| n.get()) } diff --git a/tests/integration_rust/common/builders.rs b/tests/integration_rust/common/builders.rs index d2cff748a..da1d3e4bf 100644 --- a/tests/integration_rust/common/builders.rs +++ b/tests/integration_rust/common/builders.rs @@ -331,7 +331,6 @@ impl TaskAliasBuilder { } pub struct ProjectChannelAddBuilder { - pub manifest_path: Option, pub args: project::channel::AddRemoveArgs, } @@ -361,7 +360,6 @@ impl IntoFuture for ProjectChannelAddBuilder { fn into_future(self) -> Self::IntoFuture { project::channel::execute(project::channel::Args { - manifest_path: self.manifest_path, command: project::channel::Command::Add(self.args), }) .boxed_local() @@ -394,7 +392,6 @@ impl IntoFuture for ProjectChannelRemoveBuilder { fn into_future(self) -> Self::IntoFuture { project::channel::execute(project::channel::Args { - manifest_path: self.manifest_path, command: project::channel::Command::Remove(self.args), }) .boxed_local() diff --git a/tests/integration_rust/common/mod.rs b/tests/integration_rust/common/mod.rs index e8300a478..49926793b 100644 --- a/tests/integration_rust/common/mod.rs +++ b/tests/integration_rust/common/mod.rs @@ -375,10 +375,18 @@ impl PixiControl { /// Add a new channel to the project. pub fn project_channel_add(&self) -> ProjectChannelAddBuilder { ProjectChannelAddBuilder { - manifest_path: Some(self.manifest_path()), args: project::channel::AddRemoveArgs { + project_config: ProjectConfig { + manifest_path: Some(self.manifest_path()), + }, channel: vec![], - no_install: true, + prefix_update_config: PrefixUpdateConfig { + no_lockfile_update: false, + no_install: true, + lock_file_usage: LockFileUsageArgs::default(), + config: Default::default(), + revalidate: false, + }, feature: None, priority: None, prepend: false, @@ -391,8 +399,17 @@ impl PixiControl { ProjectChannelRemoveBuilder { manifest_path: Some(self.manifest_path()), args: project::channel::AddRemoveArgs { + project_config: ProjectConfig { + manifest_path: Some(self.manifest_path()), + }, channel: vec![], - no_install: true, + prefix_update_config: PrefixUpdateConfig { + no_lockfile_update: false, + no_install: true, + lock_file_usage: LockFileUsageArgs::default(), + config: Default::default(), + revalidate: false, + }, feature: None, priority: None, prepend: false, diff --git a/tests/integration_rust/install_tests.rs b/tests/integration_rust/install_tests.rs index 76b369a08..858a5049a 100644 --- a/tests/integration_rust/install_tests.rs +++ b/tests/integration_rust/install_tests.rs @@ -7,7 +7,7 @@ use pixi::cli::cli_config::{PrefixUpdateConfig, ProjectConfig}; use pixi::cli::{run, run::Args, LockFileUsageArgs}; use pixi::environment::LockFileUsage; use pixi::lock_file::UpdateMode; -use pixi::Project; +use pixi::{Project, UpdateLockFileOptions}; use pixi_config::{Config, DetachedEnvironments}; use pixi_consts::consts; use pixi_manifest::{FeatureName, FeaturesExt}; @@ -571,9 +571,12 @@ async fn test_old_lock_install() { .unwrap(); pixi::environment::get_update_lock_file_and_prefix( &project.default_environment(), - LockFileUsage::Update, - false, UpdateMode::Revalidate, + UpdateLockFileOptions { + lock_file_usage: LockFileUsage::Update, + no_install: false, + ..Default::default() + }, ) .await .unwrap(); From e08ca6bc6d9f1b32c9628f96022d949b610a1291 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Tue, 26 Nov 2024 16:48:59 +0100 Subject: [PATCH 02/10] refactor: move into concurrency struct --- crates/pixi_config/src/lib.rs | 75 +++++++++++++++---- .../pixi_config__tests__config_merge.snap | 4 +- src/cli/install.rs | 2 +- src/cli/project/platform/add.rs | 2 +- src/cli/project/platform/remove.rs | 2 +- src/cli/shell.rs | 2 +- src/cli/shell_hook.rs | 2 +- 7 files changed, 67 insertions(+), 22 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index 439750488..05d52a894 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -1,12 +1,10 @@ use std::{ - cmp::PartialEq, collections::{BTreeSet as Set, HashMap}, fs, path::{Path, PathBuf}, process::{Command, Stdio}, str::FromStr, }; - use clap::{ArgAction, Parser}; use itertools::Itertools; use miette::{miette, Context, IntoDiagnostic}; @@ -337,6 +335,29 @@ impl ExperimentalConfig { } } +#[derive(Debug, Deserialize, Serialize, Default, Clone, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub struct ConcurrencyConfig { + /// The maximum number of concurrent solves that can be run at once. + #[serde(skip_serializing_if = "Option::is_none")] + pub max_concurrent_solves: Option, +} + + +impl ConcurrencyConfig { + /// Merge the given ConcurrencyConfig into the current one. + pub fn merge(self, other: Self) -> Self { + Self { + max_concurrent_solves: other.max_concurrent_solves.or(self.max_concurrent_solves), + } + } + + pub fn is_default(&self) -> bool { + ConcurrencyConfig::default() == *self + } +} + + impl PyPIConfig { /// Merge the given PyPIConfig into the current one. pub fn merge(self, other: Self) -> Self { @@ -554,9 +575,10 @@ pub struct Config { #[serde(skip_serializing_if = "ExperimentalConfig::is_default")] pub experimental: ExperimentalConfig, - /// Max concurrent solves, defaults - #[serde(skip_serializing_if = "Option::is_none")] - pub max_concurrent_solves: Option, + /// Concurrency configuration for pixi + #[serde(default)] + #[serde(skip_serializing_if = "ConcurrencyConfig::is_default")] + pub concurrency: ConcurrencyConfig, } impl Default for Config { @@ -575,7 +597,7 @@ impl Default for Config { pinning_strategy: Default::default(), force_activate: None, experimental: Default::default(), - max_concurrent_solves: None, + concurrency: Default::default(), } } } @@ -590,7 +612,9 @@ impl From for Config { .map(|val| PyPIConfig::default().with_keyring(val)) .unwrap_or_default(), detached_environments: None, - max_concurrent_solves: cli.max_concurrent_solves, + concurrency: ConcurrencyConfig { + max_concurrent_solves: cli.max_concurrent_solves, + }, ..Default::default() } } @@ -849,7 +873,7 @@ impl Config { pinning_strategy: other.pinning_strategy.or(self.pinning_strategy), force_activate: other.force_activate, experimental: other.experimental.merge(self.experimental), - max_concurrent_solves: other.max_concurrent_solves.or(self.max_concurrent_solves), + concurrency: other.concurrency.merge(self.concurrency), } } @@ -917,7 +941,7 @@ impl Config { /// Retrieve the value for the max_concurrent_solves field. pub fn max_concurrent_solves(&self) -> Option { - self.max_concurrent_solves + self.concurrency.max_concurrent_solves } /// Modify this config with the given key and value @@ -1066,9 +1090,26 @@ impl Config { _ => return Err(err), } } - "max-concurrent-solves" => { - self.max_concurrent_solves = - value.map(|v| v.parse()).transpose().into_diagnostic()?; + key if key.starts_with("concurrency") => { + if key == "concurrency" { + if let Some(value) = value { + self.pypi_config = serde_json::de::from_str(&value).into_diagnostic()?; + } else { + self.pypi_config = PyPIConfig::default(); + } + return Ok(()); + } else if !key.starts_with("concurrency.") { + return Err(err); + } + let subkey = key.strip_prefix("concurrency.").unwrap(); + match subkey { + "max-concurrent-solves" => { + self.concurrency.max_concurrent_solves = + value.map(|v| v.parse()).transpose().into_diagnostic()?; + } + _ => return Err(err), + } + } _ => return Err(err), } @@ -1152,7 +1193,7 @@ mod tests { tls-no-verify = true detached-environments = "{}" pinning-strategy = "no-pin" -max-concurrent-solves = 5 +concurrency.max-concurrent-solves = 5 UNUSED = "unused" "#, env!("CARGO_MANIFEST_DIR").replace('\\', "\\\\").as_str() @@ -1253,7 +1294,9 @@ UNUSED = "unused" channel_config: ChannelConfig::default_with_root_dir(PathBuf::from("/root/dir")), tls_no_verify: Some(true), detached_environments: Some(DetachedEnvironments::Path(PathBuf::from("/path/to/envs"))), - max_concurrent_solves: Some(5), + concurrency: ConcurrencyConfig{ + max_concurrent_solves: Some(5) + }, ..Default::default() }; config = config.merge_config(other); @@ -1464,11 +1507,11 @@ UNUSED = "unused" assert_eq!(config.change_ps1, None); config - .set("max-concurrent-solves", Some("10".to_string())) + .set("concurrency.max-concurrent-solves", Some("10".to_string())) .unwrap(); assert_eq!(config.max_concurrent_solves(), Some(10)); config - .set("max-concurrent-solves", Some("1".to_string())) + .set("concurrency.max-concurrent-solves", Some("1".to_string())) .unwrap(); assert_eq!(config.max_concurrent_solves(), Some(1)); diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap index b01964d36..94e4b678c 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap @@ -73,5 +73,7 @@ Config { experimental: ExperimentalConfig { use_environment_activation_cache: None, }, - max_concurrent_solves: None, + concurrency: ConcurrencyConfig { + max_concurrent_solves: None, + }, } diff --git a/src/cli/install.rs b/src/cli/install.rs index a5a0176c0..3fd2507a4 100644 --- a/src/cli/install.rs +++ b/src/cli/install.rs @@ -59,7 +59,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: args.lock_file_usage.into(), no_install: false, - max_concurrent_solves: project.config().max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/project/platform/add.rs b/src/cli/project/platform/add.rs index 29ecfa381..8dda29dae 100644 --- a/src/cli/project/platform/add.rs +++ b/src/cli/project/platform/add.rs @@ -51,7 +51,7 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: LockFileUsage::Update, no_install: args.no_install, - max_concurrent_solves: project.config().max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/project/platform/remove.rs b/src/cli/project/platform/remove.rs index 69e5f7633..0dc6c95db 100644 --- a/src/cli/project/platform/remove.rs +++ b/src/cli/project/platform/remove.rs @@ -50,7 +50,7 @@ pub async fn execute(mut project: Project, args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: LockFileUsage::Update, no_install: args.no_install, - max_concurrent_solves: project.config().max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/shell.rs b/src/cli/shell.rs index 37b31da79..8b95908b1 100644 --- a/src/cli/shell.rs +++ b/src/cli/shell.rs @@ -250,7 +250,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install(), - max_concurrent_solves: project.config().max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/shell_hook.rs b/src/cli/shell_hook.rs index 0494b036e..d891c00a1 100644 --- a/src/cli/shell_hook.rs +++ b/src/cli/shell_hook.rs @@ -131,7 +131,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install(), - max_concurrent_solves: project.config().max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; From 54fc1d85835254698b0194b6770259eac06ff253 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Tue, 26 Nov 2024 17:48:07 +0100 Subject: [PATCH 03/10] feat: add network concurrency --- crates/pixi_config/src/lib.rs | 68 +++++++++++++++---- .../pixi_config__tests__config_merge.snap | 1 + src/global/project/mod.rs | 1 + src/project/repodata.rs | 1 + src/repodata.rs | 7 ++ 5 files changed, 65 insertions(+), 13 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index 05d52a894..4ffc2d6c9 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -1,10 +1,3 @@ -use std::{ - collections::{BTreeSet as Set, HashMap}, - fs, - path::{Path, PathBuf}, - process::{Command, Stdio}, - str::FromStr, -}; use clap::{ArgAction, Parser}; use itertools::Itertools; use miette::{miette, Context, IntoDiagnostic}; @@ -16,6 +9,13 @@ use rattler_conda_types::{ use rattler_repodata_gateway::{Gateway, SourceConfig}; use reqwest_middleware::ClientWithMiddleware; use serde::{de::IntoDeserializer, Deserialize, Serialize}; +use std::{ + collections::{BTreeSet as Set, HashMap}, + fs, + path::{Path, PathBuf}, + process::{Command, Stdio}, + str::FromStr, +}; use url::Url; const EXPERIMENTAL: &str = "experimental"; @@ -118,6 +118,10 @@ pub struct ConfigCli { /// Max concurrent solves, default is the number of CPUs #[arg(long, short = 'j', visible_alias = "solve-jobs")] pub max_concurrent_solves: Option, + + /// Max concurrent network requests, default is 50 + #[arg(long)] + pub network_concurrency: Option, } #[derive(Parser, Debug, Clone, Default)] @@ -335,20 +339,40 @@ impl ExperimentalConfig { } } -#[derive(Debug, Deserialize, Serialize, Default, Clone, PartialEq, Eq)] +#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct ConcurrencyConfig { /// The maximum number of concurrent solves that can be run at once. #[serde(skip_serializing_if = "Option::is_none")] pub max_concurrent_solves: Option, + + /// The maximum number of concurrent HTTP requests to make. + #[serde(default)] + pub network_requests: usize, } +impl Default for ConcurrencyConfig { + fn default() -> Self { + Self { + max_concurrent_solves: None, + network_requests: 50, + } + } +} impl ConcurrencyConfig { /// Merge the given ConcurrencyConfig into the current one. pub fn merge(self, other: Self) -> Self { Self { max_concurrent_solves: other.max_concurrent_solves.or(self.max_concurrent_solves), + // Use the non default value if it is set otherwise + network_requests: if other.network_requests + != ConcurrencyConfig::default().network_requests + { + other.network_requests + } else { + self.network_requests + }, } } @@ -357,7 +381,6 @@ impl ConcurrencyConfig { } } - impl PyPIConfig { /// Merge the given PyPIConfig into the current one. pub fn merge(self, other: Self) -> Self { @@ -614,6 +637,9 @@ impl From for Config { detached_environments: None, concurrency: ConcurrencyConfig { max_concurrent_solves: cli.max_concurrent_solves, + network_requests: cli + .network_concurrency + .unwrap_or(ConcurrencyConfig::default().network_requests), }, ..Default::default() } @@ -873,7 +899,8 @@ impl Config { pinning_strategy: other.pinning_strategy.or(self.pinning_strategy), force_activate: other.force_activate, experimental: other.experimental.merge(self.experimental), - concurrency: other.concurrency.merge(self.concurrency), + // Make other take precedence over self to allow for setting the value through the CLI + concurrency: self.concurrency.merge(other.concurrency), } } @@ -944,6 +971,11 @@ impl Config { self.concurrency.max_concurrent_solves } + /// Retrieve the value for the network_requests field. + pub fn network_requests(&self) -> usize { + self.concurrency.network_requests + } + /// Modify this config with the given key and value /// /// # Note @@ -1107,9 +1139,15 @@ impl Config { self.concurrency.max_concurrent_solves = value.map(|v| v.parse()).transpose().into_diagnostic()?; } + "network-requests" => { + if let Some(value) = value { + self.concurrency.network_requests = value.parse().into_diagnostic()?; + } else { + return Err(miette!("'network-requests' requires a number value")); + } + } _ => return Err(err), } - } _ => return Err(err), } @@ -1147,6 +1185,7 @@ impl Config { .with_client(client) .with_cache_dir(cache_dir.join(consts::CONDA_REPODATA_CACHE_DIR)) .with_channel_config(self.into()) + .with_max_concurrent_requests(self.network_requests()) .finish() } } @@ -1242,6 +1281,7 @@ UNUSED = "unused" auth_file: None, pypi_keyring_provider: Some(KeyringProvider::Subprocess), max_concurrent_solves: None, + network_concurrency: None, }; let config = Config::from(cli); assert_eq!(config.tls_no_verify, Some(true)); @@ -1255,6 +1295,7 @@ UNUSED = "unused" auth_file: Some(PathBuf::from("path.json")), pypi_keyring_provider: None, max_concurrent_solves: None, + network_concurrency: None, }; let config = Config::from(cli); @@ -1294,8 +1335,9 @@ UNUSED = "unused" channel_config: ChannelConfig::default_with_root_dir(PathBuf::from("/root/dir")), tls_no_verify: Some(true), detached_environments: Some(DetachedEnvironments::Path(PathBuf::from("/path/to/envs"))), - concurrency: ConcurrencyConfig{ - max_concurrent_solves: Some(5) + concurrency: ConcurrencyConfig { + max_concurrent_solves: Some(5), + ..ConcurrencyConfig::default() }, ..Default::default() }; diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap index 94e4b678c..f66275fca 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap @@ -75,5 +75,6 @@ Config { }, concurrency: ConcurrencyConfig { max_concurrent_solves: None, + network_requests: 50, }, } diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index ff2a82ce3..fc2a31faa 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -1017,6 +1017,7 @@ impl Repodata for Project { Self::repodata_gateway_init( self.authenticated_client().clone(), self.config().clone().into(), + self.config().network_requests(), ) }) } diff --git a/src/project/repodata.rs b/src/project/repodata.rs index 472915642..48917cf3f 100644 --- a/src/project/repodata.rs +++ b/src/project/repodata.rs @@ -9,6 +9,7 @@ impl Repodata for Project { Self::repodata_gateway_init( self.authenticated_client().clone(), self.config().clone().into(), + self.config().network_requests(), ) }) } diff --git a/src/repodata.rs b/src/repodata.rs index 23942560e..c979db4ba 100644 --- a/src/repodata.rs +++ b/src/repodata.rs @@ -7,6 +7,7 @@ pub(crate) trait Repodata { fn repodata_gateway_init( authenticated_client: reqwest_middleware::ClientWithMiddleware, channel_config: ChannelConfig, + max_concurrent_requests: usize, ) -> Gateway { // Determine the cache directory and fall back to sane defaults otherwise. let cache_dir = pixi_config::get_cache_dir().unwrap_or_else(|e| { @@ -16,12 +17,18 @@ pub(crate) trait Repodata { let package_cache = PackageCache::new(cache_dir.join("pkgs")); + tracing::info!( + "repodata gateway: using max '{}' concurrent network requests", + max_concurrent_requests + ); + // Construct the gateway Gateway::builder() .with_client(authenticated_client) .with_cache_dir(cache_dir.join(pixi_consts::consts::CONDA_REPODATA_CACHE_DIR)) .with_package_cache(package_cache) .with_channel_config(channel_config) + .with_max_concurrent_requests(max_concurrent_requests) .finish() } From 2ef177825abd84ce7a6b555a392dff947274a268 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Wed, 27 Nov 2024 17:02:28 +0100 Subject: [PATCH 04/10] refactor: rename the values and improve the default settings --- crates/pixi_config/src/lib.rs | 132 ++++++++++++------ .../pixi_config__tests__config_merge.snap | 4 +- crates/pixi_config/tests/config/config_1.toml | 3 + src/cli/list.rs | 2 +- src/cli/project/channel/add.rs | 2 +- src/cli/project/channel/remove.rs | 2 +- src/cli/project/export/conda_environment.rs | 14 +- src/cli/project/export/conda_explicit_spec.rs | 16 ++- src/cli/project/export/mod.rs | 11 +- src/cli/remove.rs | 2 +- src/cli/run.rs | 2 +- src/cli/tree.rs | 2 +- src/cli/update.rs | 11 +- src/global/project/mod.rs | 2 +- src/lock_file/mod.rs | 2 +- src/lock_file/update.rs | 29 ++-- src/project/repodata.rs | 2 +- 17 files changed, 138 insertions(+), 100 deletions(-) diff --git a/crates/pixi_config/src/lib.rs b/crates/pixi_config/src/lib.rs index 155b4430d..cf912bdaf 100644 --- a/crates/pixi_config/src/lib.rs +++ b/crates/pixi_config/src/lib.rs @@ -116,12 +116,12 @@ pub struct ConfigCli { pypi_keyring_provider: Option, /// Max concurrent solves, default is the number of CPUs - #[arg(long, short = 'j', visible_alias = "solve-jobs")] - pub max_concurrent_solves: Option, + #[arg(long)] + pub concurrent_solves: Option, /// Max concurrent network requests, default is 50 #[arg(long)] - pub network_concurrency: Option, + pub concurrent_downloads: Option, } #[derive(Parser, Debug, Clone, Default)] @@ -339,23 +339,39 @@ impl ExperimentalConfig { } } +// Making the default values part of pixi_config to allow for printing the default settings in the future. +/// The default maximum number of concurrent solves that can be run at once. +/// Defaulting to the number of CPUs available. +fn default_max_concurrent_solves() -> usize { + std::thread::available_parallelism().map_or(1, |n| n.get()) +} + +/// The default maximum number of concurrent downloads that can be run at once. +/// 50 is a reasonable default for the number of concurrent downloads. +/// More verification is needed to determine the optimal number. +fn default_max_concurrent_downloads() -> usize { + 50 +} + #[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)] #[serde(rename_all = "kebab-case")] pub struct ConcurrencyConfig { /// The maximum number of concurrent solves that can be run at once. - #[serde(skip_serializing_if = "Option::is_none")] - pub max_concurrent_solves: Option, + // Needing to set this default next to the default of the full struct to avoid serde defaulting to 0 of partial struct was omitted. + #[serde(default = "default_max_concurrent_solves")] + pub solves: usize, /// The maximum number of concurrent HTTP requests to make. - #[serde(default)] - pub network_requests: usize, + // Needing to set this default next to the default of the full struct to avoid serde defaulting to 0 of partial struct was omitted. + #[serde(default = "default_max_concurrent_downloads")] + pub downloads: usize, } impl Default for ConcurrencyConfig { fn default() -> Self { Self { - max_concurrent_solves: None, - network_requests: 50, + solves: default_max_concurrent_solves(), + downloads: default_max_concurrent_downloads(), } } } @@ -363,15 +379,17 @@ impl Default for ConcurrencyConfig { impl ConcurrencyConfig { /// Merge the given ConcurrencyConfig into the current one. pub fn merge(self, other: Self) -> Self { + // Merging means using the other value if they are none default. Self { - max_concurrent_solves: other.max_concurrent_solves.or(self.max_concurrent_solves), - // Use the non default value if it is set otherwise - network_requests: if other.network_requests - != ConcurrencyConfig::default().network_requests - { - other.network_requests + solves: if other.solves != ConcurrencyConfig::default().solves { + other.solves + } else { + self.solves + }, + downloads: if other.downloads != ConcurrencyConfig::default().downloads { + other.downloads } else { - self.network_requests + self.downloads }, } } @@ -617,10 +635,10 @@ impl Default for Config { repodata_config: RepodataConfig::default(), pypi_config: PyPIConfig::default(), detached_environments: Some(DetachedEnvironments::default()), - pinning_strategy: Default::default(), + pinning_strategy: None, force_activate: None, - experimental: Default::default(), - concurrency: Default::default(), + experimental: ExperimentalConfig::default(), + concurrency: ConcurrencyConfig::default(), } } } @@ -636,10 +654,12 @@ impl From for Config { .unwrap_or_default(), detached_environments: None, concurrency: ConcurrencyConfig { - max_concurrent_solves: cli.max_concurrent_solves, - network_requests: cli - .network_concurrency - .unwrap_or(ConcurrencyConfig::default().network_requests), + solves: cli + .concurrent_solves + .unwrap_or(ConcurrencyConfig::default().solves), + downloads: cli + .concurrent_downloads + .unwrap_or(ConcurrencyConfig::default().downloads), }, ..Default::default() } @@ -967,13 +987,13 @@ impl Config { } /// Retrieve the value for the max_concurrent_solves field. - pub fn max_concurrent_solves(&self) -> Option { - self.concurrency.max_concurrent_solves + pub fn max_concurrent_solves(&self) -> usize { + self.concurrency.solves } /// Retrieve the value for the network_requests field. - pub fn network_requests(&self) -> usize { - self.concurrency.network_requests + pub fn max_concurrent_downloads(&self) -> usize { + self.concurrency.downloads } /// Modify this config with the given key and value @@ -1135,15 +1155,18 @@ impl Config { } let subkey = key.strip_prefix("concurrency.").unwrap(); match subkey { - "max-concurrent-solves" => { - self.concurrency.max_concurrent_solves = - value.map(|v| v.parse()).transpose().into_diagnostic()?; + "solves" => { + if let Some(value) = value { + self.concurrency.solves = value.parse().into_diagnostic()?; + } else { + return Err(miette!("'solves' requires a number value")); + } } - "network-requests" => { + "downloads" => { if let Some(value) = value { - self.concurrency.network_requests = value.parse().into_diagnostic()?; + self.concurrency.downloads = value.parse().into_diagnostic()?; } else { - return Err(miette!("'network-requests' requires a number value")); + return Err(miette!("'downloads' requires a number value")); } } _ => return Err(err), @@ -1185,7 +1208,7 @@ impl Config { .with_client(client) .with_cache_dir(cache_dir.join(consts::CONDA_REPODATA_CACHE_DIR)) .with_channel_config(self.into()) - .with_max_concurrent_requests(self.network_requests()) + .with_max_concurrent_requests(self.max_concurrent_downloads()) .finish() } } @@ -1226,7 +1249,7 @@ mod tests { tls-no-verify = true detached-environments = "{}" pinning-strategy = "no-pin" -concurrency.max-concurrent-solves = 5 +concurrency.solves = 5 UNUSED = "unused" "#, env!("CARGO_MANIFEST_DIR").replace('\\', "\\\\").as_str() @@ -1241,7 +1264,7 @@ UNUSED = "unused" config.detached_environments().path().unwrap(), Some(PathBuf::from(env!("CARGO_MANIFEST_DIR"))) ); - assert_eq!(config.max_concurrent_solves(), Some(5)); + assert_eq!(config.max_concurrent_solves(), 5); assert!(unused.contains("UNUSED")); let toml = r"detached-environments = true"; @@ -1274,8 +1297,8 @@ UNUSED = "unused" tls_no_verify: true, auth_file: None, pypi_keyring_provider: Some(KeyringProvider::Subprocess), - max_concurrent_solves: None, - network_concurrency: None, + concurrent_solves: None, + concurrent_downloads: None, }; let config = Config::from(cli); assert_eq!(config.tls_no_verify, Some(true)); @@ -1288,8 +1311,8 @@ UNUSED = "unused" tls_no_verify: false, auth_file: Some(PathBuf::from("path.json")), pypi_keyring_provider: None, - max_concurrent_solves: None, - network_concurrency: None, + concurrent_solves: None, + concurrent_downloads: None, }; let config = Config::from(cli); @@ -1321,6 +1344,14 @@ UNUSED = "unused" ); } + #[test] + fn test_default_config() { + let config = Config::default(); + // This depends on the system so it's hard to test. + assert!(config.concurrency.solves > 0); + assert_eq!(config.concurrency.downloads, 50); + } + #[test] fn test_config_merge() { let mut config = Config::default(); @@ -1330,7 +1361,7 @@ UNUSED = "unused" tls_no_verify: Some(true), detached_environments: Some(DetachedEnvironments::Path(PathBuf::from("/path/to/envs"))), concurrency: ConcurrencyConfig { - max_concurrent_solves: Some(5), + solves: 5, ..ConcurrencyConfig::default() }, ..Default::default() @@ -1366,7 +1397,7 @@ UNUSED = "unused" config.detached_environments().path().unwrap(), Some(PathBuf::from("/path/to/envs2")) ); - assert_eq!(config.max_concurrent_solves(), Some(5)); + assert_eq!(config.max_concurrent_solves(), 5); let d = Path::new(&env!("CARGO_MANIFEST_DIR")) .join("tests") @@ -1543,13 +1574,22 @@ UNUSED = "unused" assert_eq!(config.change_ps1, None); config - .set("concurrency.max-concurrent-solves", Some("10".to_string())) + .set("concurrency.solves", Some("10".to_string())) .unwrap(); - assert_eq!(config.max_concurrent_solves(), Some(10)); + assert_eq!(config.max_concurrent_solves(), 10); config - .set("concurrency.max-concurrent-solves", Some("1".to_string())) + .set("concurrency.solves", Some("1".to_string())) .unwrap(); - assert_eq!(config.max_concurrent_solves(), Some(1)); + + config + .set("concurrency.downloads", Some("10".to_string())) + .unwrap(); + assert_eq!(config.max_concurrent_downloads(), 10); + config + .set("concurrency.downloads", Some("1".to_string())) + .unwrap(); + + assert_eq!(config.max_concurrent_downloads(), 1); config.set("unknown-key", None).unwrap_err(); } diff --git a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap index f66275fca..002e07945 100644 --- a/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap +++ b/crates/pixi_config/src/snapshots/pixi_config__tests__config_merge.snap @@ -74,7 +74,7 @@ Config { use_environment_activation_cache: None, }, concurrency: ConcurrencyConfig { - max_concurrent_solves: None, - network_requests: 50, + solves: 1, + downloads: 50, }, } diff --git a/crates/pixi_config/tests/config/config_1.toml b/crates/pixi_config/tests/config/config_1.toml index 7ec6a40d0..262b77f60 100644 --- a/crates/pixi_config/tests/config/config_1.toml +++ b/crates/pixi_config/tests/config/config_1.toml @@ -1,2 +1,5 @@ default_channels = ["conda-forge", "bioconda", "defaults"] tls_no_verify = true + +# Hardcode as we don't want this to depend on the system in the tests +concurrency.solves = 1 diff --git a/src/cli/list.rs b/src/cli/list.rs index 896b69567..fc9817975 100644 --- a/src/cli/list.rs +++ b/src/cli/list.rs @@ -150,7 +150,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }) .await?; diff --git a/src/cli/project/channel/add.rs b/src/cli/project/channel/add.rs index cfe475936..fe6344bd9 100644 --- a/src/cli/project/channel/add.rs +++ b/src/cli/project/channel/add.rs @@ -24,7 +24,7 @@ pub async fn execute(args: AddRemoveArgs) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: LockFileUsage::Update, no_install: args.prefix_update_config.no_install(), - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/project/channel/remove.rs b/src/cli/project/channel/remove.rs index 424adad4a..20a604eb2 100644 --- a/src/cli/project/channel/remove.rs +++ b/src/cli/project/channel/remove.rs @@ -21,7 +21,7 @@ pub async fn execute(args: AddRemoveArgs) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: LockFileUsage::Update, no_install: args.prefix_update_config.no_install(), - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/project/export/conda_environment.rs b/src/cli/project/export/conda_environment.rs index df73a4310..d0843097d 100644 --- a/src/cli/project/export/conda_environment.rs +++ b/src/cli/project/export/conda_environment.rs @@ -1,5 +1,6 @@ use std::path::PathBuf; +use crate::cli::cli_config::ProjectConfig; use clap::Parser; use itertools::Itertools; use miette::{Context, IntoDiagnostic}; @@ -18,6 +19,9 @@ use crate::{project::Environment, Project}; #[derive(Debug, Parser)] pub struct Args { + #[clap(flatten)] + pub project_config: ProjectConfig, + /// Explicit path to export the environment to pub output_path: Option, @@ -221,7 +225,8 @@ fn channels_with_nodefaults(channels: Vec) -> Vec miette::Result<()> { +pub async fn execute(args: Args) -> miette::Result<()> { + let project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())?; let environment = project.environment_from_name_or_env_var(args.environment)?; let platform = args.platform.unwrap_or_else(|| environment.best_platform()); let config = project.config(); @@ -255,6 +260,7 @@ mod tests { output_path: None, platform: Some(Platform::Osx64), environment: Some("default".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -280,6 +286,7 @@ mod tests { output_path: None, platform: None, environment: Some("default".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -306,6 +313,7 @@ mod tests { output_path: None, platform: None, environment: Some("default".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -332,6 +340,7 @@ mod tests { output_path: None, platform: None, environment: Some("alternative".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -357,6 +366,7 @@ mod tests { output_path: None, platform: None, environment: Some("default".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -382,6 +392,7 @@ mod tests { output_path: None, platform: Some(Platform::OsxArm64), environment: Some("default".to_string()), + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) @@ -415,6 +426,7 @@ mod tests { output_path: None, platform: Some(Platform::Osx64), environment: None, + project_config: ProjectConfig::default(), }; let environment = project .environment_from_name_or_env_var(args.environment) diff --git a/src/cli/project/export/conda_explicit_spec.rs b/src/cli/project/export/conda_explicit_spec.rs index f51cd3bb3..fdbea28fc 100644 --- a/src/cli/project/export/conda_explicit_spec.rs +++ b/src/cli/project/export/conda_explicit_spec.rs @@ -11,11 +11,18 @@ use rattler_conda_types::{ }; use rattler_lock::{CondaPackageData, Environment, LockedPackageRef}; -use crate::{cli::cli_config::PrefixUpdateConfig, lock_file::UpdateLockFileOptions, Project}; +use crate::{ + cli::cli_config::{PrefixUpdateConfig, ProjectConfig}, + lock_file::UpdateLockFileOptions, + Project, +}; #[derive(Debug, Parser)] #[clap(arg_required_else_help = false)] pub struct Args { + #[clap(flatten)] + pub project_config: ProjectConfig, + /// Output directory for rendered explicit environment spec files pub output_dir: PathBuf, @@ -152,12 +159,15 @@ fn render_env_platform( Ok(()) } -pub async fn execute(project: Project, args: Args) -> miette::Result<()> { +pub async fn execute(args: Args) -> miette::Result<()> { + let project = Project::load_or_else_discover(args.project_config.manifest_path.as_deref())? + .with_cli_config(args.prefix_update_config.config.clone()); + let lockfile = project .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }) .await? .lock_file; diff --git a/src/cli/project/export/mod.rs b/src/cli/project/export/mod.rs index 9de830221..e8f773390 100644 --- a/src/cli/project/export/mod.rs +++ b/src/cli/project/export/mod.rs @@ -1,17 +1,11 @@ -use std::path::PathBuf; pub mod conda_environment; pub mod conda_explicit_spec; -use crate::Project; use clap::Parser; /// Commands to export projects to other formats #[derive(Parser, Debug)] pub struct Args { - /// The path to `pixi.toml` or `pyproject.toml` - #[clap(long, global = true)] - pub manifest_path: Option, - #[clap(subcommand)] pub command: Command, } @@ -26,10 +20,9 @@ pub enum Command { } pub async fn execute(args: Args) -> miette::Result<()> { - let project = Project::load_or_else_discover(args.manifest_path.as_deref())?; match args.command { - Command::CondaExplicitSpec(args) => conda_explicit_spec::execute(project, args).await?, - Command::CondaEnvironment(args) => conda_environment::execute(project, args).await?, + Command::CondaExplicitSpec(args) => conda_explicit_spec::execute(args).await?, + Command::CondaEnvironment(args) => conda_environment::execute(args).await?, }; Ok(()) } diff --git a/src/cli/remove.rs b/src/cli/remove.rs index cbea3fdd7..43b0d5316 100644 --- a/src/cli/remove.rs +++ b/src/cli/remove.rs @@ -85,7 +85,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { UpdateLockFileOptions { lock_file_usage: prefix_update_config.lock_file_usage(), no_install: prefix_update_config.no_install, - max_concurrent_solves: prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }, ) .await?; diff --git a/src/cli/run.rs b/src/cli/run.rs index 0d978b80a..d196f65a8 100644 --- a/src/cli/run.rs +++ b/src/cli/run.rs @@ -103,7 +103,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { let mut lock_file = project .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), ..UpdateLockFileOptions::default() }) .await?; diff --git a/src/cli/tree.rs b/src/cli/tree.rs index 6b3f089ee..f6d6c85b7 100644 --- a/src/cli/tree.rs +++ b/src/cli/tree.rs @@ -84,7 +84,7 @@ pub async fn execute(args: Args) -> miette::Result<()> { .update_lock_file(UpdateLockFileOptions { lock_file_usage: args.prefix_update_config.lock_file_usage(), no_install: args.prefix_update_config.no_install, - max_concurrent_solves: args.prefix_update_config.config.max_concurrent_solves, + max_concurrent_solves: project.config().max_concurrent_solves(), }) .await .wrap_err("Failed to update lock file")?; diff --git a/src/cli/update.rs b/src/cli/update.rs index b7de7459f..88597bb58 100644 --- a/src/cli/update.rs +++ b/src/cli/update.rs @@ -1,8 +1,5 @@ use std::{cmp::Ordering, collections::HashSet}; -use fancy_display::FancyDisplay; - -use crate::lock_file::default_max_concurrent_solves; use crate::{ cli::cli_config::ProjectConfig, diff::{LockFileDiff, LockFileJsonDiff}, @@ -13,6 +10,7 @@ use crate::{ Project, }; use clap::Parser; +use fancy_display::FancyDisplay; use itertools::Itertools; use miette::{Context, IntoDiagnostic, MietteDiagnostic}; use pixi_config::ConfigCli; @@ -156,16 +154,11 @@ pub async fn execute(args: Args) -> miette::Result<()> { // Unlock dependencies in the lock-file that we want to update. let relaxed_lock_file = unlock_packages(&project, &loaded_lock_file, &specs); - let max_concurrent_solves = project - .config() - .max_concurrent_solves() - .unwrap_or_else(default_max_concurrent_solves); - // Update the packages in the lock-file. let updated_lock_file = UpdateContext::builder(&project) .with_lock_file(relaxed_lock_file.clone()) .with_no_install(args.no_install) - .with_max_concurrent_solves(max_concurrent_solves) + .with_max_concurrent_solves(project.config().max_concurrent_solves()) .finish() .await? .update() diff --git a/src/global/project/mod.rs b/src/global/project/mod.rs index 22f525c0e..59690b4ec 100644 --- a/src/global/project/mod.rs +++ b/src/global/project/mod.rs @@ -1043,7 +1043,7 @@ impl Repodata for Project { Self::repodata_gateway_init( self.authenticated_client().clone(), self.config().clone().into(), - self.config().network_requests(), + self.config().max_concurrent_downloads(), ) }) } diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index 8cfcddfd0..d38ce8b1e 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -20,8 +20,8 @@ pub use satisfiability::{ verify_environment_satisfiability, verify_platform_satisfiability, EnvironmentUnsat, PlatformUnsat, }; -pub use update::{default_max_concurrent_solves, UpdateLockFileOptions, UpdateMode}; pub(crate) use update::{LockFileDerivedData, UpdateContext}; +pub use update::{UpdateLockFileOptions, UpdateMode}; pub(crate) use utils::filter_lock_file; /// A list of conda packages that are locked for a specific platform. diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index b665d62a3..4284ee1a6 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -109,7 +109,7 @@ pub struct UpdateLockFileOptions { /// The maximum number of concurrent solves that are allowed to run. If this /// value is None a heuristic is used based on the number of cores /// available from the system. - pub max_concurrent_solves: Option, + pub max_concurrent_solves: usize, } /// A struct that holds the lock-file and any potential derived data that was @@ -636,11 +636,6 @@ impl<'p> UpdateContext<'p> { } } -/// Returns the default number of concurrent solves. -pub fn default_max_concurrent_solves() -> usize { - std::thread::available_parallelism().map_or(1, |n| n.get()) -} - /// If the project has any source dependencies, like `git` or `path` /// dependencies. for pypi dependencies, we need to limit the solve to 1, /// because of uv internals @@ -656,8 +651,8 @@ fn determine_pypi_solve_permits(project: &Project) -> usize { } } } - - default_max_concurrent_solves() + // If no source dependencies are found, we can use the default concurrency + project.config().max_concurrent_solves() } /// Ensures that the lock-file is up-to-date with the project. @@ -737,13 +732,9 @@ pub async fn update_lock_file( miette::bail!("lock-file not up-to-date with the project"); } - let max_concurrent_solves = options - .max_concurrent_solves - .unwrap_or_else(default_max_concurrent_solves); - // Construct an update context and perform the actual update. let lock_file_derived_data = UpdateContext::builder(project) - .with_max_concurrent_solves(max_concurrent_solves) + .with_max_concurrent_solves(options.max_concurrent_solves) .with_package_cache(package_cache) .with_no_install(options.no_install) .with_outdated_environments(outdated) @@ -783,7 +774,7 @@ pub struct UpdateContextBuilder<'p> { /// The maximum number of concurrent solves that are allowed to run. If this /// value is `None` a heuristic is used based on the number of cores /// available from the system. - max_concurrent_solves: Option, + max_concurrent_solves: usize, /// The io concurrency semaphore to use when updating environments io_concurrency_limit: Option, @@ -837,7 +828,7 @@ impl<'p> UpdateContextBuilder<'p> { /// Sets the maximum number of solves that are allowed to run concurrently. pub(crate) fn with_max_concurrent_solves(self, max_concurrent_solves: usize) -> Self { Self { - max_concurrent_solves: Some(max_concurrent_solves), + max_concurrent_solves, ..self } } @@ -1025,10 +1016,6 @@ impl<'p> UpdateContextBuilder<'p> { }) .collect(); - let max_concurrent_solves = self - .max_concurrent_solves - .unwrap_or_else(default_max_concurrent_solves); - let gateway = project.repodata_gateway().clone(); let client = project.authenticated_client().clone(); @@ -1061,7 +1048,7 @@ impl<'p> UpdateContextBuilder<'p> { grouped_solved_pypi_records: HashMap::new(), package_cache, - conda_solve_semaphore: Arc::new(Semaphore::new(max_concurrent_solves)), + conda_solve_semaphore: Arc::new(Semaphore::new(self.max_concurrent_solves)), pypi_solve_semaphore: Arc::new(Semaphore::new(determine_pypi_solve_permits(project))), io_concurrency_limit: self.io_concurrency_limit.unwrap_or_default(), build_context, @@ -1081,7 +1068,7 @@ impl<'p> UpdateContext<'p> { outdated_environments: None, no_install: true, package_cache: None, - max_concurrent_solves: None, + max_concurrent_solves: project.config().max_concurrent_solves(), io_concurrency_limit: None, glob_hash_cache: None, } diff --git a/src/project/repodata.rs b/src/project/repodata.rs index 48917cf3f..4ad4a2d41 100644 --- a/src/project/repodata.rs +++ b/src/project/repodata.rs @@ -9,7 +9,7 @@ impl Repodata for Project { Self::repodata_gateway_init( self.authenticated_client().clone(), self.config().clone().into(), - self.config().network_requests(), + self.config().max_concurrent_downloads(), ) }) } From 13aafdc0305c74a095ad2e2d19a352242de93739 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 09:59:15 +0100 Subject: [PATCH 05/10] docs: add docs for concurrency and test its parsing --- docs/reference/pixi_configuration.md | 77 ++++++----------- .../detached_environments_path_config.toml | 3 + .../pixi_config_tomls/main_config.toml | 83 +++++++++++++++++++ .../mirror_prefix_config.toml | 6 ++ .../pixi_config_tomls/oci_config.toml | 6 ++ tests/integration_rust/project_tests.rs | 32 ++++++- 6 files changed, 154 insertions(+), 53 deletions(-) create mode 100644 docs/source_files/pixi_config_tomls/detached_environments_path_config.toml create mode 100644 docs/source_files/pixi_config_tomls/main_config.toml create mode 100644 docs/source_files/pixi_config_tomls/mirror_prefix_config.toml create mode 100644 docs/source_files/pixi_config_tomls/oci_config.toml diff --git a/docs/reference/pixi_configuration.md b/docs/reference/pixi_configuration.md index 57652ae2a..f258c8dea 100644 --- a/docs/reference/pixi_configuration.md +++ b/docs/reference/pixi_configuration.md @@ -68,7 +68,7 @@ The following reference describes all available configuration options. The default channels to select when running `pixi init` or `pixi global install`. This defaults to only conda-forge. ```toml title="config.toml" -default-channels = ["conda-forge"] +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:default-channels" ``` !!! note The `default-channels` are only used when initializing a new project. Once initialized the `channels` are used from the project manifest. @@ -80,7 +80,7 @@ This applies to the `pixi shell` subcommand. You can override this from the CLI with `--change-ps1`. ```toml title="config.toml" -change-ps1 = true +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:change-ps1" ``` ### `tls-no-verify` @@ -93,7 +93,7 @@ When set to true, the TLS certificates are not verified. You can override this from the CLI with `--tls-no-verify`. ```toml title="config.toml" -tls-no-verify = false +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:tls-no-verify" ``` ### `authentication-override-file` @@ -102,7 +102,7 @@ Usually, we try to use the keyring to load authentication data from, and only us file as a fallback. This option allows you to force the use of a JSON file. Read more in the authentication section. ```toml title="config.toml" -authentication-override-file = "/path/to/your/override.json" +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:authentication-override-file" ``` ### `detached-environments` @@ -125,20 +125,17 @@ This field can consist of two types of input. - A string value, which will be the absolute path to the directory where the environments will be stored. ```toml title="config.toml" -detached-environments = true +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:detached-environments" ``` or: ```toml title="config.toml" -detached-environments = "/opt/pixi/envs" +--8<-- "docs/source_files/pixi_config_tomls/detached_environments_path_config.toml:detached-environments-path" ``` The environments will be stored in the [cache directory](../features/environment.md#caching) when this option is `true`. When you specify a custom path the environments will be stored in that directory. The resulting directory structure will look like this: -```toml title="config.toml" -detached-environments = "/opt/pixi/envs" -``` ```shell /opt/pixi/envs ├── pixi-6837172896226367631 @@ -161,46 +158,25 @@ The default is `semver` but you can set the following: - `latest-up`: Pinning to the latest version, `1.2.3` -> `>=1.2.3`. ```toml title="config.toml" -pinning-strategy = "no-pin" +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:pinning-strategy" ``` ### `mirrors` Configuration for conda channel-mirrors, more info [below](#mirror-configuration). ```toml title="config.toml" -[mirrors] -# redirect all requests for conda-forge to the prefix.dev mirror -"https://conda.anaconda.org/conda-forge" = [ - "https://prefix.dev/conda-forge" -] - -# redirect all requests for bioconda to one of the three listed mirrors -# Note: for repodata we try the first mirror first. -"https://conda.anaconda.org/bioconda" = [ - "https://conda.anaconda.org/bioconda", - # OCI registries are also supported - "oci://ghcr.io/channel-mirrors/bioconda", - "https://prefix.dev/bioconda", -] +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:mirrors" ``` ### `repodata-config` Configuration for repodata fetching. ```toml title="config.toml" -[repodata-config] -# disable fetching of jlap, bz2 or zstd repodata files. -# This should only be used for specific old versions of artifactory and other non-compliant -# servers. -disable-jlap = true # don't try to download repodata.jlap -disable-bzip2 = true # don't try to download repodata.json.bz2 -disable-zstd = true # don't try to download repodata.json.zst -disable-sharded = true # don't try to download sharded repodata +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:repodata-config" ``` The above settings can be overridden on a per-channel basis by specifying a channel prefix in the configuration. ```toml title="config.toml" -[repodata-config."https://prefix.dev"] -disable-sharded = false +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:prefix-repodata-config" ``` ### `pypi-config` @@ -211,19 +187,24 @@ To setup a certain number of defaults for the usage of PyPI registries. You can - `keyring-provider`: Allows the use of the [keyring](https://pypi.org/project/keyring/) python package to store and retrieve credentials. ```toml title="config.toml" -[pypi-config] -# Main index url -index-url = "https://pypi.org/simple" -# list of additional urls -extra-index-urls = ["https://pypi.org/simple2"] -# can be "subprocess" or "disabled" -keyring-provider = "subprocess" +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:pypi-config" ``` !!! Note "`index-url` and `extra-index-urls` are *not* globals" Unlike pip, these settings, with the exception of `keyring-provider` will only modify the `pixi.toml`/`pyproject.toml` file and are not globally interpreted when not present in the manifest. This is because we want to keep the manifest file as complete and reproducible as possible. +### `concurrency` +Configure multiple settings to limit or extend the concurrency of pixi. +```toml title="config.toml" +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:concurrency" +``` +Set them through the CLI with: +```shell +pixi config set concurrency.solves 1 +pixi config set concurrency.downloads 12 +``` + ## Experimental This allows the user to set specific experimental features that are not yet stable. @@ -263,9 +244,7 @@ pixi run/shell/shell-hook --force-activate Set the configuration with: ```toml title="config.toml" -[experimental] -# Enable the use of the environment activation cache -use-environment-activation-cache = true +--8<-- "docs/source_files/pixi_config_tomls/main_config.toml:experimental" ``` !!! note "Why is this experimental?" @@ -290,10 +269,7 @@ important to get this file from a trusted source. You can also specify mirrors for an entire "host", e.g. ```toml title="config.toml" -[mirrors] -"https://conda.anaconda.org" = [ - "https://prefix.dev/" -] +--8<-- "docs/source_files/pixi_config_tomls/mirror_prefix_config.toml:mirrors" ``` This will forward all request to channels on anaconda.org to prefix.dev. @@ -306,10 +282,7 @@ the Github container registry (ghcr.io) that is maintained by the conda-forge team. You can use it like this: ```toml title="config.toml" -[mirrors] -"https://conda.anaconda.org/conda-forge" = [ - "oci://ghcr.io/channel-mirrors/conda-forge" -] +--8<-- "docs/source_files/pixi_config_tomls/oci_config.toml:oci-mirrors" ``` The GHCR mirror also contains `bioconda` packages. You can search the [available diff --git a/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml b/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml new file mode 100644 index 000000000..be18943e7 --- /dev/null +++ b/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml @@ -0,0 +1,3 @@ +# --8<-- [start:detached-environments-path] +detached-environments = "/opt/pixi/envs" +# --8<-- [end:detached-environments-path] \ No newline at end of file diff --git a/docs/source_files/pixi_config_tomls/main_config.toml b/docs/source_files/pixi_config_tomls/main_config.toml new file mode 100644 index 000000000..e745304f2 --- /dev/null +++ b/docs/source_files/pixi_config_tomls/main_config.toml @@ -0,0 +1,83 @@ + +# --8<-- [start:default-channels] +default-channels = ["conda-forge"] +# --8<-- [end:default-channels] + +# --8<-- [start:change-ps1] +change-ps1 = true +# --8<-- [end:change-ps1] + +# --8<-- [start:tls-no-verify] +tls-no-verify = false +# --8<-- [end:tls-no-verify] + +# --8<-- [start:authentication-override-file] +authentication-override-file = "/path/to/your/override.json" +# --8<-- [end:authentication-override-file] + +# --8<-- [start:detached-environments] +detached-environments = true +# --8<-- [end:detached-environments] + +# --8<-- [start:pinning-strategy] +pinning-strategy = "no-pin" +# --8<-- [end:pinning-strategy] + +# --8<-- [start:repodata-config] +[repodata-config] +# disable fetching of jlap, bz2 or zstd repodata files. +# This should only be used for specific old versions of artifactory and other non-compliant +# servers. +disable-jlap = true # don't try to download repodata.jlap +disable-bzip2 = true # don't try to download repodata.json.bz2 +disable-zstd = true # don't try to download repodata.json.zst +disable-sharded = true # don't try to download sharded repodata +# --8<-- [end:repodata-config] +# --8<-- [start:prefix-repodata-config] +[repodata-config."https://prefix.dev"] +disable-sharded = false +# --8<-- [end:prefix-repodata-config] + +# --8<-- [start:pypi-config] +[pypi-config] +# Main index url +index-url = "https://pypi.org/simple" +# list of additional urls +extra-index-urls = ["https://pypi.org/simple2"] +# can be "subprocess" or "disabled" +keyring-provider = "subprocess" +# --8<-- [end:pypi-config] + +# --8<-- [start:concurrency] +[concurrency] +# The maximum number of concurrent downloads +# Defaults to 50 as that was found to be a good balance between speed and stability +downloads = 5 + +# The maximum number of concurrent dependecy resolves +# Defaults to a heuristic based on the number of cores on the system +solves = 2 +# --8<-- [end:concurrency] + +# --8<-- [start:experimental] +[experimental] +# Enable the use of the environment activation cache +use-environment-activation-cache = true +# --8<-- [end:experimental] + +# --8<-- [start:mirrors] +[mirrors] +# redirect all requests for conda-forge to the prefix.dev mirror +"https://conda.anaconda.org/conda-forge" = [ + "https://prefix.dev/conda-forge" +] + +# redirect all requests for bioconda to one of the three listed mirrors +# Note: for repodata we try the first mirror first. +"https://conda.anaconda.org/bioconda" = [ + "https://conda.anaconda.org/bioconda", + # OCI registries are also supported + "oci://ghcr.io/channel-mirrors/bioconda", + "https://prefix.dev/bioconda", +] +# --8<-- [end:mirrors] \ No newline at end of file diff --git a/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml b/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml new file mode 100644 index 000000000..ea69aa5d4 --- /dev/null +++ b/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml @@ -0,0 +1,6 @@ +# --8<-- [start:mirrors] +[mirrors] +"https://conda.anaconda.org" = [ + "https://prefix.dev/" +] +# --8<-- [end:mirrors] \ No newline at end of file diff --git a/docs/source_files/pixi_config_tomls/oci_config.toml b/docs/source_files/pixi_config_tomls/oci_config.toml new file mode 100644 index 000000000..94a8709c8 --- /dev/null +++ b/docs/source_files/pixi_config_tomls/oci_config.toml @@ -0,0 +1,6 @@ +# --8<-- [start:oci-mirrors] +[mirrors] +"https://conda.anaconda.org/conda-forge" = [ + "oci://ghcr.io/channel-mirrors/conda-forge" +] +# --8<-- [end:oci-mirrors] \ No newline at end of file diff --git a/tests/integration_rust/project_tests.rs b/tests/integration_rust/project_tests.rs index afe1b1057..07af6a418 100644 --- a/tests/integration_rust/project_tests.rs +++ b/tests/integration_rust/project_tests.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::path::PathBuf; use insta::assert_debug_snapshot; @@ -6,7 +7,7 @@ use pixi_manifest::FeaturesExt; use rattler_conda_types::{NamedChannelOrUrl, Platform}; use tempfile::TempDir; use url::Url; - +use pixi_config::Config; use crate::common::{package_database::PackageDatabase, PixiControl}; #[tokio::test] @@ -112,3 +113,32 @@ async fn parse_valid_schema_projects() { } } } + +#[test] +fn parse_valid_docs_manifests() { + // Test all files in the docs/source_files/pixi_tomls directory + let schema_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("docs/source_files/pixi_tomls"); + for entry in std::fs::read_dir(schema_dir).unwrap() { + let entry = entry.unwrap(); + let path = entry.path(); + if path.extension().map(|ext| ext == "toml").unwrap_or(false) { + let pixi_toml = std::fs::read_to_string(&path).unwrap(); + let _project = Project::from_str(&PathBuf::from("pixi.toml"), &pixi_toml).unwrap(); + } + } +} + +#[test] +fn parse_valid_docs_configs() { + // Test all files in the docs/source_files/pixi_config_tomls directory + let schema_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("docs/source_files/pixi_config_tomls"); + for entry in std::fs::read_dir(schema_dir).unwrap() { + let entry = entry.unwrap(); + let path = entry.path(); + if path.extension().map(|ext| ext == "toml").unwrap_or(false) { + let toml = std::fs::read_to_string(&path).unwrap(); + let (_config, unused_keys) = Config::from_toml(&toml).unwrap(); + assert_eq!(unused_keys, BTreeSet::::new(),"{}", format!("Unused keys in {:?}", path)); + } + } +} From d78acc1679130bdfe0b8917fab9da3f78a51b169 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 10:14:20 +0100 Subject: [PATCH 06/10] fix: remove unneeded max_concurrent definitions --- src/cli/update.rs | 1 - src/lock_file/update.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/src/cli/update.rs b/src/cli/update.rs index 88597bb58..8e714a461 100644 --- a/src/cli/update.rs +++ b/src/cli/update.rs @@ -158,7 +158,6 @@ pub async fn execute(args: Args) -> miette::Result<()> { let updated_lock_file = UpdateContext::builder(&project) .with_lock_file(relaxed_lock_file.clone()) .with_no_install(args.no_install) - .with_max_concurrent_solves(project.config().max_concurrent_solves()) .finish() .await? .update() diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index 4284ee1a6..f3a89b425 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -734,7 +734,6 @@ pub async fn update_lock_file( // Construct an update context and perform the actual update. let lock_file_derived_data = UpdateContext::builder(project) - .with_max_concurrent_solves(options.max_concurrent_solves) .with_package_cache(package_cache) .with_no_install(options.no_install) .with_outdated_environments(outdated) From 14e45c18ca58c5187c04dffc6e84f57816ca77bd Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 10:15:32 +0100 Subject: [PATCH 07/10] clippy & fmt --- .../detached_environments_path_config.toml | 2 +- .../pixi_config_tomls/main_config.toml | 24 +++++++++---------- .../mirror_prefix_config.toml | 6 ++--- .../pixi_config_tomls/oci_config.toml | 4 ++-- src/lock_file/update.rs | 8 ------- tests/integration_rust/project_tests.rs | 14 +++++++---- 6 files changed, 26 insertions(+), 32 deletions(-) diff --git a/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml b/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml index be18943e7..d7e347630 100644 --- a/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml +++ b/docs/source_files/pixi_config_tomls/detached_environments_path_config.toml @@ -1,3 +1,3 @@ # --8<-- [start:detached-environments-path] detached-environments = "/opt/pixi/envs" -# --8<-- [end:detached-environments-path] \ No newline at end of file +# --8<-- [end:detached-environments-path] diff --git a/docs/source_files/pixi_config_tomls/main_config.toml b/docs/source_files/pixi_config_tomls/main_config.toml index e745304f2..ed30e677c 100644 --- a/docs/source_files/pixi_config_tomls/main_config.toml +++ b/docs/source_files/pixi_config_tomls/main_config.toml @@ -28,10 +28,10 @@ pinning-strategy = "no-pin" # disable fetching of jlap, bz2 or zstd repodata files. # This should only be used for specific old versions of artifactory and other non-compliant # servers. -disable-jlap = true # don't try to download repodata.jlap -disable-bzip2 = true # don't try to download repodata.json.bz2 -disable-zstd = true # don't try to download repodata.json.zst -disable-sharded = true # don't try to download sharded repodata +disable-bzip2 = true # don't try to download repodata.json.bz2 +disable-jlap = true # don't try to download repodata.jlap +disable-sharded = true # don't try to download sharded repodata +disable-zstd = true # don't try to download repodata.json.zst # --8<-- [end:repodata-config] # --8<-- [start:prefix-repodata-config] [repodata-config."https://prefix.dev"] @@ -54,7 +54,7 @@ keyring-provider = "subprocess" # Defaults to 50 as that was found to be a good balance between speed and stability downloads = 5 -# The maximum number of concurrent dependecy resolves +# The maximum number of concurrent dependency resolves # Defaults to a heuristic based on the number of cores on the system solves = 2 # --8<-- [end:concurrency] @@ -68,16 +68,14 @@ use-environment-activation-cache = true # --8<-- [start:mirrors] [mirrors] # redirect all requests for conda-forge to the prefix.dev mirror -"https://conda.anaconda.org/conda-forge" = [ - "https://prefix.dev/conda-forge" -] +"https://conda.anaconda.org/conda-forge" = ["https://prefix.dev/conda-forge"] # redirect all requests for bioconda to one of the three listed mirrors # Note: for repodata we try the first mirror first. "https://conda.anaconda.org/bioconda" = [ - "https://conda.anaconda.org/bioconda", - # OCI registries are also supported - "oci://ghcr.io/channel-mirrors/bioconda", - "https://prefix.dev/bioconda", + "https://conda.anaconda.org/bioconda", + # OCI registries are also supported + "oci://ghcr.io/channel-mirrors/bioconda", + "https://prefix.dev/bioconda", ] -# --8<-- [end:mirrors] \ No newline at end of file +# --8<-- [end:mirrors] diff --git a/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml b/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml index ea69aa5d4..26a19bb6f 100644 --- a/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml +++ b/docs/source_files/pixi_config_tomls/mirror_prefix_config.toml @@ -1,6 +1,4 @@ # --8<-- [start:mirrors] [mirrors] -"https://conda.anaconda.org" = [ - "https://prefix.dev/" -] -# --8<-- [end:mirrors] \ No newline at end of file +"https://conda.anaconda.org" = ["https://prefix.dev/"] +# --8<-- [end:mirrors] diff --git a/docs/source_files/pixi_config_tomls/oci_config.toml b/docs/source_files/pixi_config_tomls/oci_config.toml index 94a8709c8..943d28e0b 100644 --- a/docs/source_files/pixi_config_tomls/oci_config.toml +++ b/docs/source_files/pixi_config_tomls/oci_config.toml @@ -1,6 +1,6 @@ # --8<-- [start:oci-mirrors] [mirrors] "https://conda.anaconda.org/conda-forge" = [ - "oci://ghcr.io/channel-mirrors/conda-forge" + "oci://ghcr.io/channel-mirrors/conda-forge", ] -# --8<-- [end:oci-mirrors] \ No newline at end of file +# --8<-- [end:oci-mirrors] diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index f3a89b425..729664602 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -824,14 +824,6 @@ impl<'p> UpdateContextBuilder<'p> { } } - /// Sets the maximum number of solves that are allowed to run concurrently. - pub(crate) fn with_max_concurrent_solves(self, max_concurrent_solves: usize) -> Self { - Self { - max_concurrent_solves, - ..self - } - } - /// Sets the io concurrency semaphore to use when updating environments. #[allow(unused)] pub fn with_io_concurrency_semaphore(self, io_concurrency_limit: IoConcurrencyLimit) -> Self { diff --git a/tests/integration_rust/project_tests.rs b/tests/integration_rust/project_tests.rs index 07af6a418..8a69def90 100644 --- a/tests/integration_rust/project_tests.rs +++ b/tests/integration_rust/project_tests.rs @@ -1,14 +1,14 @@ use std::collections::BTreeSet; use std::path::PathBuf; +use crate::common::{package_database::PackageDatabase, PixiControl}; use insta::assert_debug_snapshot; use pixi::Project; +use pixi_config::Config; use pixi_manifest::FeaturesExt; use rattler_conda_types::{NamedChannelOrUrl, Platform}; use tempfile::TempDir; use url::Url; -use pixi_config::Config; -use crate::common::{package_database::PackageDatabase, PixiControl}; #[tokio::test] async fn add_remove_channel() { @@ -131,14 +131,20 @@ fn parse_valid_docs_manifests() { #[test] fn parse_valid_docs_configs() { // Test all files in the docs/source_files/pixi_config_tomls directory - let schema_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("docs/source_files/pixi_config_tomls"); + let schema_dir = + PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("docs/source_files/pixi_config_tomls"); for entry in std::fs::read_dir(schema_dir).unwrap() { let entry = entry.unwrap(); let path = entry.path(); if path.extension().map(|ext| ext == "toml").unwrap_or(false) { let toml = std::fs::read_to_string(&path).unwrap(); let (_config, unused_keys) = Config::from_toml(&toml).unwrap(); - assert_eq!(unused_keys, BTreeSet::::new(),"{}", format!("Unused keys in {:?}", path)); + assert_eq!( + unused_keys, + BTreeSet::::new(), + "{}", + format!("Unused keys in {:?}", path) + ); } } } From 45757e2741f26ffd8c5c88abe7f256bdbe2087f4 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 10:32:08 +0100 Subject: [PATCH 08/10] misc: add cli docs and test --- docs/reference/cli.md | 22 +++++++++++++++++++--- tests/integration_python/test_main_cli.py | 20 ++++++++++++++++++++ tests/integration_rust/project_tests.rs | 2 +- 3 files changed, 40 insertions(+), 4 deletions(-) diff --git a/docs/reference/cli.md b/docs/reference/cli.md index 1004e7983..4f40aa3c7 100644 --- a/docs/reference/cli.md +++ b/docs/reference/cli.md @@ -84,6 +84,8 @@ These dependencies will be read by pixi as if they had been added to the pixi `p - `--platform (-p)`: The platform for which the dependency should be added. (Allowed to be used more than once) - `--feature (-f)`: The feature for which the dependency should be added. - `--editable`: Specifies an editable dependency; only used in combination with `--pypi`. +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi add numpy # (1)! @@ -154,6 +156,8 @@ E.g. `pixi run`, `pixi shell`, `pixi shell-hook`, `pixi add`, `pixi remove` to n - `--frozen`: install the environment as defined in the lock file, doesn't update `pixi.lock` if it isn't up-to-date with [manifest file](project_configuration.md). It can also be controlled by the `PIXI_FROZEN` environment variable (example: `PIXI_FROZEN=true`). - `--locked`: only install if the `pixi.lock` is up-to-date with the [manifest file](project_configuration.md)[^1]. It can also be controlled by the `PIXI_LOCKED` environment variable (example: `PIXI_LOCKED=true`). Conflicts with `--frozen`. - `--environment (-e)`: The environment to install, if none are provided the default environment will be used. +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi install @@ -180,6 +184,8 @@ It will only update the lock file if the dependencies in the [manifest file](pro - `--dry-run (-n)`: Only show the changes that would be made, without actually updating the lock file or environment. - `--no-install`: Don't install the (solve) environment needed for solving pypi-dependencies. - `--json`: Output the changes in json format. +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi update numpy @@ -209,6 +215,8 @@ The `upgrade` command checks if there are newer versions of the dependencies and - `--no-install`: Don't install the (solve) environment needed for solving pypi-dependencies. - `--json`: Output the changes in json format. - `--dry-run (-n)`: Only show the changes that would be made, without actually updating the manifest, lock file, or environment. +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi upgrade @@ -252,6 +260,8 @@ You cannot run `pixi run source setup.bash` as `source` is not available in the - `--clean-env`: Run the task in a clean environment, this will remove all environment variables of the shell environment except for the ones pixi sets. THIS DOESN't WORK ON `Windows`. - `--force-activate`: (default, except in _experimental_ mode) Force the activation of the environment, even if the environment is already activated. - `--revalidate`: Revalidate the full environment, instead of checking the lock file hash. [more info](../features/environment.md#environment-installation-metadata) +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi run python @@ -318,9 +328,11 @@ Temporary environments are cached. If the same command is run again, the same en 1. ``: The command to run. #### Options: -* `--spec (-s)`: Matchspecs of packages to install. If this is not provided, the package is guessed from the command. -* `--channel (-c)`: The channel to install the packages from. If not specified the default channel is used. -* `--force-reinstall` If specified a new environment is always created even if one already exists. +- `--spec (-s)`: Matchspecs of packages to install. If this is not provided, the package is guessed from the command. +- `--channel (-c)`: The channel to install the packages from. If not specified the default channel is used. +- `--force-reinstall` If specified a new environment is always created even if one already exists. +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi exec python @@ -691,6 +703,8 @@ To exit the pixi shell, simply run `exit`. - `--no-progress`: Hide all progress bars, always turned on if stderr is not a terminal [env: PIXI_NO_PROGRESS=] - `--force-activate`: (default, except in _experimental_ mode) Force the activation of the environment, even if the environment is already activated. - `--revalidate`: Revalidate the full environment, instead of checking lock file hash. [more info](../features/environment.md#environment-installation-metadata) +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi shell @@ -721,6 +735,8 @@ This command prints the activation script of an environment. this option, `--shell` is ignored. - `--force-activate`: (default, except in _experimental_ mode) Force the activation of the environment, even if the environment is already activated. - `--revalidate`: Revalidate the full environment, instead of checking lock file hash. [more info](../features/environment.md#environment-installation-metadata) +- `--concurrent-downloads`: The number of concurrent downloads to use when installing packages. Defaults to 50. +- `--concurrent-solves`: The number of concurrent solves to use when installing packages. Defaults to the number of cpu threads. ```shell pixi shell-hook diff --git a/tests/integration_python/test_main_cli.py b/tests/integration_python/test_main_cli.py index 5cf2be3f9..1cb8de80e 100644 --- a/tests/integration_python/test_main_cli.py +++ b/tests/integration_python/test_main_cli.py @@ -607,3 +607,23 @@ def test_upgrade_remove_info(pixi: Path, tmp_path: Path, multiple_versions_chann assert multiple_versions_channel_1 in parsed_manifest["dependencies"]["package3"]["channel"] # Remove build assert "build" not in parsed_manifest["dependencies"]["package3"] + + +def test_concurrency_flags(pixi: Path, tmp_path: Path, multiple_versions_channel_1: str) -> None: + manifest_path = tmp_path / "pixi.toml" + + # Create a new project + verify_cli_command([pixi, "init", "--channel", multiple_versions_channel_1, tmp_path]) + + # Add package pinned to version 0.1.0 + verify_cli_command( + [ + pixi, + "add", + "--concurrent-solves=12", + "--concurrent-downloads=2", + "--manifest-path", + manifest_path, + "package3", + ] + ) diff --git a/tests/integration_rust/project_tests.rs b/tests/integration_rust/project_tests.rs index 8a69def90..6da6e6173 100644 --- a/tests/integration_rust/project_tests.rs +++ b/tests/integration_rust/project_tests.rs @@ -143,7 +143,7 @@ fn parse_valid_docs_configs() { unused_keys, BTreeSet::::new(), "{}", - format!("Unused keys in {:?}", path) + format_args!("Unused keys in {:?}", path) ); } } From a6b2de4ac8a0ef214ff39662f13d6c9eca7d0660 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 16:25:23 +0100 Subject: [PATCH 09/10] fix: manifest-path location --- src/cli/cli_config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli/cli_config.rs b/src/cli/cli_config.rs index 3eddf04f0..48e638906 100644 --- a/src/cli/cli_config.rs +++ b/src/cli/cli_config.rs @@ -20,7 +20,7 @@ use std::path::PathBuf; #[derive(Parser, Debug, Default, Clone)] pub struct ProjectConfig { /// The path to `pixi.toml` or `pyproject.toml` - #[arg(long)] + #[arg(long, global=true)] pub manifest_path: Option, } From 5c9f8d45c2e0f9b51543386da764f00d7c460335 Mon Sep 17 00:00:00 2001 From: Ruben Arts Date: Thu, 28 Nov 2024 16:49:39 +0100 Subject: [PATCH 10/10] fmt --- src/cli/cli_config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/cli/cli_config.rs b/src/cli/cli_config.rs index 48e638906..a6fbd84bd 100644 --- a/src/cli/cli_config.rs +++ b/src/cli/cli_config.rs @@ -20,7 +20,7 @@ use std::path::PathBuf; #[derive(Parser, Debug, Default, Clone)] pub struct ProjectConfig { /// The path to `pixi.toml` or `pyproject.toml` - #[arg(long, global=true)] + #[arg(long, global = true)] pub manifest_path: Option, }