diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3b6a663d4311a..3af157915e16f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -121,7 +121,7 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Create Dev Drive using ReFS + - name: Setup Dev Drive run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 # actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone... @@ -260,7 +260,7 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Create Dev Drive using ReFS + - name: Setup Dev Drive run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 # actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone... @@ -333,7 +333,7 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Create Dev Drive using ReFS + - name: Setup Dev Drive run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 # actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone... @@ -522,7 +522,7 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Create Dev Drive using ReFS + - name: Setup Dev Drive run: ${{ github.workspace }}/.github/workflows/setup-dev-drive.ps1 # actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone... diff --git a/.github/workflows/setup-dev-drive.ps1 b/.github/workflows/setup-dev-drive.ps1 index 11cb14af6cd85..d1c4882897419 100644 --- a/.github/workflows/setup-dev-drive.ps1 +++ b/.github/workflows/setup-dev-drive.ps1 @@ -8,23 +8,44 @@ # Note we use `Get-PSDrive` is not sufficient because the drive letter is assigned. if (Test-Path "D:\") { - Write-Output "Using `D:` drive" + Write-Output "Using existing drive at `D:`" $Drive = "D:" } else { - $Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 20GB | - Mount-VHD -Passthru | - Initialize-Disk -Passthru | - New-Partition -AssignDriveLetter -UseMaximumSize | - Format-Volume -FileSystem ReFS -Confirm:$false -Force - - Write-Output "Using ReFS drive at $Volume" - $Drive = "$($Volume.DriveLetter):" + # The size (20 GB) is chosen empirically to be large enough for our + # workflows; larger drives can take longer to set up. + $Volume = New-VHD -Path C:/uv_dev_drive.vhdx -SizeBytes 20GB | + Mount-VHD -Passthru | + Initialize-Disk -Passthru | + New-Partition -AssignDriveLetter -UseMaximumSize | + Format-Volume -DevDrive -Confirm:$false -Force + + $Drive = "$($Volume.DriveLetter):" + + # Set the drive as trusted + # See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-designate-a-dev-drive-as-trusted + fsutil devdrv trust $Drive + + # Disable antivirus filtering on dev drives + # See https://learn.microsoft.com/en-us/windows/dev-drive/#how-do-i-configure-additional-filters-on-dev-drive + fsutil devdrv enable /disallowAv + + # Remount so the changes take effect + Dismount-VHD -Path C:/uv_dev_drive.vhdx + Mount-VHD -Path C:/uv_dev_drive.vhdx + + # Show some debug information + Write-Output $Volume + fsutil devdrv query $Drive + + Write-Output "Using Dev Drive at `$Volume`" } $Tmp = "$($Drive)\uv-tmp" +$Workspace = "$($Drive)\uv" -# Create the directory ahead of time in an attempt to avoid race-conditions +# Create directories ahead of time in an attempt to avoid race-conditions New-Item $Tmp -ItemType Directory +New-Item $Workspace -ItemType Directory Write-Output ` "DEV_DRIVE=$($Drive)" ` @@ -33,6 +54,6 @@ Write-Output ` "UV_INTERNAL__TEST_DIR=$($Tmp)" ` "RUSTUP_HOME=$($Drive)/.rustup" ` "CARGO_HOME=$($Drive)/.cargo" ` - "UV_WORKSPACE=$($Drive)/uv" ` + "UV_WORKSPACE=$($Workspace)" ` >> $env:GITHUB_ENV diff --git a/.gitignore b/.gitignore index 0087d31065b7f..708e225fe784f 100644 --- a/.gitignore +++ b/.gitignore @@ -35,3 +35,7 @@ profile.json # macOS **/.DS_Store + +# IDE +.idea +.vscode diff --git a/CHANGELOG.md b/CHANGELOG.md index f1ca6fca8fb6f..9b9bdc30a906a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,68 @@ # Changelog +## 0.5.20 + +### Bug fixes + +- Avoid failing when deserializing unknown tags ([#10655](https://github.com/astral-sh/uv/pull/10655)) + +## 0.5.19 + +### Enhancements + +- Filter wheels from lockfile based on architecture ([#10584](https://github.com/astral-sh/uv/pull/10584)) +- Omit dynamic versions from the lockfile ([#10622](https://github.com/astral-sh/uv/pull/10622)) +- Add support for `pip freeze --path` ([#10488](https://github.com/astral-sh/uv/pull/10488)) +- Reduce verbosity of inline-metadata message when using `uv run ` ([#10588](https://github.com/astral-sh/uv/pull/10588)) +- Add opt-in Git LFS support ([#10335](https://github.com/astral-sh/uv/pull/10335)) +- Recommend `--native-tls` on SSL errors ([#10605](https://github.com/astral-sh/uv/pull/10605)) +- Show expected and available ABI tags in resolver errors ([#10527](https://github.com/astral-sh/uv/pull/10527)) +- Show target Python version in error messages ([#10582](https://github.com/astral-sh/uv/pull/10582)) +- Add `--output-format=json` support to `uv python list` ([#10596](https://github.com/astral-sh/uv/pull/10596)) + +### Python + +The managed Python distributions have been updated, including: + +- Python 3.14 support on Windows +- Python 3.14.0a4 support +- 64-bit RISC-V Linux support +- Bundled `libedit` updated from 20210910-3.1 -> 20240808-3.1 +- Bundled `tcl/tk` updated from 8.6.12 -> 8.6.14 (for all Python versions on Unix, only for Python 3.14 on Windows) + +See the [`python-build-standalone` release notes](https://github.com/astral-sh/python-build-standalone/releases/tag/20250115) for more details. + +### Performance + +- Avoid allocating when stripping source distribution extension ([#10625](https://github.com/astral-sh/uv/pull/10625)) +- Reduce `WheelFilename` to 48 bytes ([#10583](https://github.com/astral-sh/uv/pull/10583)) +- Reduce distribution size to 200 bytes ([#10601](https://github.com/astral-sh/uv/pull/10601)) +- Remove `import re` from entrypoint wrapper scripts ([#10627](https://github.com/astral-sh/uv/pull/10627)) +- Shrink size of platform tag enum ([#10546](https://github.com/astral-sh/uv/pull/10546)) +- Use `ArcStr` in verbatim URL ([#10600](https://github.com/astral-sh/uv/pull/10600)) +- Use `memchr` for wheel parsing ([#10620](https://github.com/astral-sh/uv/pull/10620)) + +### Bug fixes + +- Avoid reading symlinks during `uv python install` on Windows ([#10639](https://github.com/astral-sh/uv/pull/10639)) +- Correct Pyston tag format ([#10580](https://github.com/astral-sh/uv/pull/10580)) +- Provide `pyproject.toml` path for parse errors in `uv venv` ([#10553](https://github.com/astral-sh/uv/pull/10553)) +- Don't treat `setuptools` and `wheel` as seed packages in uv sync on Python 3.12 ([#10572](https://github.com/astral-sh/uv/pull/10572)) +- Fix git-tag cache-key reader in case of slashes (#10467) ([#10500](https://github.com/astral-sh/uv/pull/10500)) +- Include build tag in rendered wheel filenames ([#10599](https://github.com/astral-sh/uv/pull/10599)) +- Patch embedded install path for Python dylib on macOS during `python install` ([#10629](https://github.com/astral-sh/uv/pull/10629)) +- Read cached registry distributions when `--config-settings` are present ([#10578](https://github.com/astral-sh/uv/pull/10578)) +- Show resolver hints for packages with markers ([#10607](https://github.com/astral-sh/uv/pull/10607)) + +### Documentation + +- Add meta titles to documents in guides, excluding integration documents ([#10539](https://github.com/astral-sh/uv/pull/10539)) +- Remove `build-system` from example workspace rot ([#10636](https://github.com/astral-sh/uv/pull/10636)) + +### Preview features + +- Make build backend type annotations more generic ([#10549](https://github.com/astral-sh/uv/pull/10549)) + ## 0.5.18 ### Bug fixes diff --git a/Cargo.lock b/Cargo.lock index 9ef407a99c0da..b370ecd81e5b7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4490,7 +4490,7 @@ checksum = "f8c5f0a0af699448548ad1a2fbf920fb4bee257eae39953ba95cb84891a0446a" [[package]] name = "uv" -version = "0.5.18" +version = "0.5.20" dependencies = [ "anstream", "anyhow", @@ -5538,6 +5538,7 @@ dependencies = [ "same-file", "schemars", "serde", + "smallvec", "textwrap", "thiserror 2.0.11", "tokio", @@ -5721,7 +5722,7 @@ dependencies = [ [[package]] name = "uv-version" -version = "0.5.18" +version = "0.5.20" [[package]] name = "uv-virtualenv" diff --git a/crates/uv-cli/src/lib.rs b/crates/uv-cli/src/lib.rs index 2b9ff90f5d145..6ff408ea19b08 100644 --- a/crates/uv-cli/src/lib.rs +++ b/crates/uv-cli/src/lib.rs @@ -1184,6 +1184,9 @@ pub struct PipCompileArgs { /// Represented as a "target triple", a string that describes the target platform in terms of /// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or /// `aarch64-apple-darwin`. + /// + /// When targeting macOS (Darwin), the default minimum version is `12.0`. Use + /// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`. #[arg(long)] pub python_platform: Option, @@ -1472,6 +1475,9 @@ pub struct PipSyncArgs { /// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or /// `aarch64-apple-darwin`. /// + /// When targeting macOS (Darwin), the default minimum version is `12.0`. Use + /// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`. + /// /// WARNING: When specified, uv will select wheels that are compatible with the _target_ /// platform; as a result, the installed distributions may not be compatible with the _current_ /// platform. Conversely, any distributions that are built from source may be incompatible with @@ -1751,6 +1757,9 @@ pub struct PipInstallArgs { /// its CPU, vendor, and operating system name, like `x86_64-unknown-linux-gnu` or /// `aarch64-apple-darwin`. /// + /// When targeting macOS (Darwin), the default minimum version is `12.0`. Use + /// `MACOSX_DEPLOYMENT_TARGET` to specify a different minimum version, e.g., `13.0`. + /// /// WARNING: When specified, uv will select wheels that are compatible with the _target_ /// platform; as a result, the installed distributions may not be compatible with the _current_ /// platform. Conversely, any distributions that are built from source may be incompatible with @@ -2365,8 +2374,8 @@ pub struct VenvArgs { /// Install seed packages (one or more of: `pip`, `setuptools`, and `wheel`) into the virtual environment. /// - /// Note `setuptools` and `wheel` are not included in Python 3.12+ environments. - #[arg(long)] + /// Note that `setuptools` and `wheel` are not included in Python 3.12+ environments. + #[arg(long, value_parser = clap::builder::BoolishValueParser::new(), env = EnvVars::UV_VENV_SEED)] pub seed: bool, /// Preserve any existing files or directories at the target path. @@ -2395,8 +2404,8 @@ pub struct VenvArgs { /// the directory name. If not provided (`uv venv`), the prompt is set to /// the current directory's name. /// - /// If "." is provided, the the current directory name will be used - /// regardless of whether a path was provided to `uv venv`. + /// If "." is provided, the current directory name will be used regardless + /// of whether a path was provided to `uv venv`. #[arg(long, verbatim_doc_comment)] pub prompt: Option, diff --git a/crates/uv-distribution-filename/src/lib.rs b/crates/uv-distribution-filename/src/lib.rs index 540947e5d570a..95d0a98aee2c5 100644 --- a/crates/uv-distribution-filename/src/lib.rs +++ b/crates/uv-distribution-filename/src/lib.rs @@ -7,7 +7,7 @@ pub use build_tag::{BuildTag, BuildTagError}; pub use egg::{EggInfoFilename, EggInfoFilenameError}; pub use extension::{DistExtension, ExtensionError, SourceDistExtension}; pub use source_dist::{SourceDistFilename, SourceDistFilenameError}; -pub use wheel::{TagSet, WheelFilename, WheelFilenameError}; +pub use wheel::{WheelFilename, WheelFilenameError}; mod build_tag; mod egg; @@ -15,6 +15,7 @@ mod extension; mod source_dist; mod splitter; mod wheel; +mod wheel_tag; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum DistFilename { diff --git a/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_build_tag.snap b/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_build_tag.snap index cfd6846e39801..ce7dcc62a6e23 100644 --- a/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_build_tag.snap +++ b/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_build_tag.snap @@ -28,6 +28,7 @@ Ok( platform_tag: [ Any, ], + repr: "202206090410-py3-none-any", }, }, }, diff --git a/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_multiple_tags.snap b/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_multiple_tags.snap index 2da1f40052d42..a474bd98eaa83 100644 --- a/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_multiple_tags.snap +++ b/crates/uv-distribution-filename/src/snapshots/uv_distribution_filename__wheel__tests__ok_multiple_tags.snap @@ -38,6 +38,7 @@ Ok( arch: X86_64, }, ], + repr: "cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64", }, }, }, diff --git a/crates/uv-distribution-filename/src/wheel.rs b/crates/uv-distribution-filename/src/wheel.rs index e5e735d6e2d65..e450741ad5556 100644 --- a/crates/uv-distribution-filename/src/wheel.rs +++ b/crates/uv-distribution-filename/src/wheel.rs @@ -14,6 +14,7 @@ use uv_platform_tags::{ }; use crate::splitter::MemchrSplitter; +use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall}; use crate::{BuildTag, BuildTagError}; #[derive( @@ -177,7 +178,7 @@ impl WheelFilename { )); }; - let (name, version, build_tag, python_tag, abi_tag, platform_tag, is_large) = + let (name, version, build_tag, python_tag, abi_tag, platform_tag, is_small) = if let Some(platform_tag) = splitter.next() { if splitter.next().is_some() { return Err(WheelFilenameError::InvalidWheelFileName( @@ -193,7 +194,7 @@ impl WheelFilename { &stem[abi_tag_or_platform_tag + 1..platform_tag], &stem[platform_tag + 1..], // Always take the slow path if a build tag is present. - true, + false, ) } else { ( @@ -206,7 +207,7 @@ impl WheelFilename { // Determine whether any of the tag types contain a period, which would indicate // that at least one of the tag types includes multiple tags (which in turn // necessitates taking the slow path). - memchr(b'.', stem[build_tag_or_python_tag..].as_bytes()).is_some(), + memchr(b'.', stem[build_tag_or_python_tag..].as_bytes()).is_none(), ) }; @@ -221,44 +222,38 @@ impl WheelFilename { }) .transpose()?; - let tags = if is_large { + let tags = if let Some(small) = is_small + .then(|| { + Some(WheelTagSmall { + python_tag: LanguageTag::from_str(python_tag).ok()?, + abi_tag: AbiTag::from_str(abi_tag).ok()?, + platform_tag: PlatformTag::from_str(platform_tag).ok()?, + }) + }) + .flatten() + { + WheelTag::Small { small } + } else { + // Store the plaintext representation of the tags. + let repr = &stem[build_tag_or_python_tag + 1..]; WheelTag::Large { large: Box::new(WheelTagLarge { build_tag, python_tag: MemchrSplitter::split(python_tag, b'.') .map(LanguageTag::from_str) - .collect::>() - .map_err(|err| { - WheelFilenameError::InvalidLanguageTag(filename.to_string(), err) - })?, + .filter_map(Result::ok) + .collect(), abi_tag: MemchrSplitter::split(abi_tag, b'.') .map(AbiTag::from_str) - .collect::>() - .map_err(|err| { - WheelFilenameError::InvalidAbiTag(filename.to_string(), err) - })?, + .filter_map(Result::ok) + .collect(), platform_tag: MemchrSplitter::split(platform_tag, b'.') .map(PlatformTag::from_str) - .collect::>() - .map_err(|err| { - WheelFilenameError::InvalidPlatformTag(filename.to_string(), err) - })?, + .filter_map(Result::ok) + .collect(), + repr: repr.into(), }), } - } else { - WheelTag::Small { - small: WheelTagSmall { - python_tag: LanguageTag::from_str(python_tag).map_err(|err| { - WheelFilenameError::InvalidLanguageTag(filename.to_string(), err) - })?, - abi_tag: AbiTag::from_str(abi_tag).map_err(|err| { - WheelFilenameError::InvalidAbiTag(filename.to_string(), err) - })?, - platform_tag: PlatformTag::from_str(platform_tag).map_err(|err| { - WheelFilenameError::InvalidPlatformTag(filename.to_string(), err) - })?, - }, - } }; Ok(Self { @@ -311,124 +306,6 @@ impl Serialize for WheelFilename { } } -/// A [`SmallVec`] type for storing tags. -/// -/// Wheels tend to include a single language, ABI, and platform tag, so we use a [`SmallVec`] with a -/// capacity of 1 to optimize for this common case. -pub type TagSet = smallvec::SmallVec<[T; 3]>; - -/// The portion of the wheel filename following the name and version: the optional build tag, along -/// with the Python tag(s), ABI tag(s), and platform tag(s). -/// -/// Most wheels consist of a single Python, ABI, and platform tag (and no build tag). We represent -/// such wheels with [`WheelTagSmall`], a variant with a smaller memory footprint and (generally) -/// zero allocations. The [`WheelTagLarge`] variant is used for wheels with multiple tags and/or a -/// build tag. -#[derive( - Debug, - Clone, - Eq, - PartialEq, - Ord, - PartialOrd, - Hash, - rkyv::Archive, - rkyv::Deserialize, - rkyv::Serialize, -)] -#[rkyv(derive(Debug))] -enum WheelTag { - Small { small: WheelTagSmall }, - Large { large: Box }, -} - -impl Display for WheelTag { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - match self { - Self::Small { small } => write!(f, "{small}"), - Self::Large { large } => write!(f, "{large}"), - } - } -} - -#[derive( - Debug, - Clone, - Eq, - PartialEq, - Ord, - PartialOrd, - Hash, - rkyv::Archive, - rkyv::Deserialize, - rkyv::Serialize, -)] -#[rkyv(derive(Debug))] -#[allow(clippy::struct_field_names)] -struct WheelTagSmall { - python_tag: LanguageTag, - abi_tag: AbiTag, - platform_tag: PlatformTag, -} - -impl Display for WheelTagSmall { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}-{}-{}", - self.python_tag, self.abi_tag, self.platform_tag - ) - } -} - -#[derive( - Debug, - Clone, - Eq, - PartialEq, - Ord, - PartialOrd, - Hash, - rkyv::Archive, - rkyv::Deserialize, - rkyv::Serialize, -)] -#[rkyv(derive(Debug))] -#[allow(clippy::struct_field_names)] -pub struct WheelTagLarge { - build_tag: Option, - python_tag: TagSet, - abi_tag: TagSet, - platform_tag: TagSet, -} - -impl Display for WheelTagLarge { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - if let Some(build_tag) = &self.build_tag { - write!(f, "{build_tag}-")?; - } - write!( - f, - "{}-{}-{}", - self.python_tag - .iter() - .map(ToString::to_string) - .collect::>() - .join("."), - self.abi_tag - .iter() - .map(ToString::to_string) - .collect::>() - .join("."), - self.platform_tag - .iter() - .map(ToString::to_string) - .collect::>() - .join("."), - ) - } -} - #[derive(Error, Debug)] pub enum WheelFilenameError { #[error("The wheel filename \"{0}\" is invalid: {1}")] diff --git a/crates/uv-distribution-filename/src/wheel_tag.rs b/crates/uv-distribution-filename/src/wheel_tag.rs new file mode 100644 index 0000000000000..0315e8372ec2b --- /dev/null +++ b/crates/uv-distribution-filename/src/wheel_tag.rs @@ -0,0 +1,115 @@ +use std::fmt::{Display, Formatter}; + +use crate::BuildTag; +use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag}; +use uv_small_str::SmallString; + +/// A [`SmallVec`] type for storing tags. +/// +/// Wheels tend to include a single language, ABI, and platform tag, so we use a [`SmallVec`] with a +/// capacity of 1 to optimize for this common case. +pub(crate) type TagSet = smallvec::SmallVec<[T; 3]>; + +/// The portion of the wheel filename following the name and version: the optional build tag, along +/// with the Python tag(s), ABI tag(s), and platform tag(s). +/// +/// Most wheels consist of a single Python, ABI, and platform tag (and no build tag). We represent +/// such wheels with [`WheelTagSmall`], a variant with a smaller memory footprint and (generally) +/// zero allocations. The [`WheelTagLarge`] variant is used for wheels with multiple tags, a build +/// tag, or an unsupported tag (i.e., a tag that can't be represented by [`LanguageTag`], +/// [`AbiTag`], or [`PlatformTag`]). (Unsupported tags are filtered out, but retained in the display +/// representation of [`WheelTagLarge`].) +#[derive( + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + rkyv::Archive, + rkyv::Deserialize, + rkyv::Serialize, +)] +#[rkyv(derive(Debug))] +pub(crate) enum WheelTag { + Small { small: WheelTagSmall }, + Large { large: Box }, +} + +impl Display for WheelTag { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::Small { small } => write!(f, "{small}"), + Self::Large { large } => write!(f, "{large}"), + } + } +} + +#[derive( + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + rkyv::Archive, + rkyv::Deserialize, + rkyv::Serialize, +)] +#[rkyv(derive(Debug))] +#[allow(clippy::struct_field_names)] +pub(crate) struct WheelTagSmall { + /// The Python tag, e.g., `py3` in `1.2.3-py3-none-any`. + pub(crate) python_tag: LanguageTag, + /// The ABI tag, e.g., `none` in `1.2.3-py3-none-any`. + pub(crate) abi_tag: AbiTag, + /// The platform tag, e.g., `none` in `1.2.3-py3-none-any`. + pub(crate) platform_tag: PlatformTag, +} + +impl Display for WheelTagSmall { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}-{}-{}", + self.python_tag, self.abi_tag, self.platform_tag + ) + } +} + +#[derive( + Debug, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + rkyv::Archive, + rkyv::Deserialize, + rkyv::Serialize, +)] +#[rkyv(derive(Debug))] +#[allow(clippy::struct_field_names)] +pub(crate) struct WheelTagLarge { + /// The optional build tag, e.g., `73` in `1.2.3-73-py3-none-any`. + pub(crate) build_tag: Option, + /// The Python tag(s), e.g., `py3` in `1.2.3-73-py3-none-any`. + pub(crate) python_tag: TagSet, + /// The ABI tag(s), e.g., `none` in `1.2.3-73-py3-none-any`. + pub(crate) abi_tag: TagSet, + /// The platform tag(s), e.g., `none` in `1.2.3-73-py3-none-any`. + pub(crate) platform_tag: TagSet, + /// The string representation of the tag. + /// + /// Preserves any unsupported tags that were filtered out when parsing the wheel filename. + pub(crate) repr: SmallString, +} + +impl Display for WheelTagLarge { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.repr) + } +} diff --git a/crates/uv-distribution/src/distribution_database.rs b/crates/uv-distribution/src/distribution_database.rs index 79dc851870902..3dd22d13410bb 100644 --- a/crates/uv-distribution/src/distribution_database.rs +++ b/crates/uv-distribution/src/distribution_database.rs @@ -496,8 +496,13 @@ impl<'a, Context: BuildContext> DistributionDatabase<'a, Context> { } /// Return the [`RequiresDist`] from a `pyproject.toml`, if it can be statically extracted. - pub async fn requires_dist(&self, project_root: &Path) -> Result, Error> { - self.builder.source_tree_requires_dist(project_root).await + pub async fn requires_dist( + &self, + source_tree: impl AsRef, + ) -> Result, Error> { + self.builder + .source_tree_requires_dist(source_tree.as_ref()) + .await } /// Stream a wheel from a URL, unzipping it into the cache as it's downloaded. diff --git a/crates/uv-distribution/src/source/mod.rs b/crates/uv-distribution/src/source/mod.rs index abdd246f20fac..f68b9a9bf5204 100644 --- a/crates/uv-distribution/src/source/mod.rs +++ b/crates/uv-distribution/src/source/mod.rs @@ -1344,18 +1344,18 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { /// Return the [`RequiresDist`] from a `pyproject.toml`, if it can be statically extracted. pub(crate) async fn source_tree_requires_dist( &self, - project_root: &Path, + source_tree: &Path, ) -> Result, Error> { // Attempt to read static metadata from the `pyproject.toml`. - match read_requires_dist(project_root).await { + match read_requires_dist(source_tree).await { Ok(requires_dist) => { debug!( "Found static `requires-dist` for: {}", - project_root.display() + source_tree.display() ); let requires_dist = RequiresDist::from_project_maybe_workspace( requires_dist, - project_root, + source_tree, None, self.build_context.locations(), self.build_context.sources(), @@ -1375,7 +1375,7 @@ impl<'a, T: BuildContext> SourceDistributionBuilder<'a, T> { ) => { debug!( "No static `requires-dist` available for: {} ({err:?})", - project_root.display() + source_tree.display() ); Ok(None) } diff --git a/crates/uv-installer/src/compile.rs b/crates/uv-installer/src/compile.rs index 2fc2d87b2923d..b61c063e2b3f9 100644 --- a/crates/uv-installer/src/compile.rs +++ b/crates/uv-installer/src/compile.rs @@ -48,8 +48,11 @@ pub enum CompileError { #[source] err: Box, }, - #[error("Bytecode timed out ({}s)", _0.as_secs_f32())] - CompileTimeout(Duration), + #[error("Bytecode timed out ({}s) compiling file: `{}`", elapsed.as_secs_f32(), source_file)] + CompileTimeout { + elapsed: Duration, + source_file: String, + }, #[error("Python startup timed out ({}s)", _0.as_secs_f32())] StartupTimeout(Duration), } @@ -358,7 +361,10 @@ async fn worker_main_loop( // should ever take. tokio::time::timeout(COMPILE_TIMEOUT, python_handle) .await - .map_err(|_| CompileError::CompileTimeout(COMPILE_TIMEOUT))??; + .map_err(|_| CompileError::CompileTimeout { + elapsed: COMPILE_TIMEOUT, + source_file: source_file.clone(), + })??; // This is a sanity check, if we don't get the path back something has gone wrong, e.g. // we're not actually running a python interpreter. diff --git a/crates/uv-pep508/src/marker/algebra.rs b/crates/uv-pep508/src/marker/algebra.rs index 303d2696f4948..959827924aa72 100644 --- a/crates/uv-pep508/src/marker/algebra.rs +++ b/crates/uv-pep508/src/marker/algebra.rs @@ -1556,7 +1556,7 @@ fn python_version_to_full_version(specifier: VersionSpecifier) -> Result, implementation_version: (u8, u8), }, /// Ex) `graalpy310_graalpy240_310_native` @@ -43,6 +43,34 @@ pub enum AbiTag { Pyston { implementation_version: (u8, u8) }, } +impl AbiTag { + /// Return a pretty string representation of the ABI tag. + pub fn pretty(self) -> Option { + match self { + AbiTag::None => None, + AbiTag::Abi3 => None, + AbiTag::CPython { python_version, .. } => { + Some(format!("CPython {}.{}", python_version.0, python_version.1)) + } + AbiTag::PyPy { + implementation_version, + .. + } => Some(format!( + "PyPy {}.{}", + implementation_version.0, implementation_version.1 + )), + AbiTag::GraalPy { + implementation_version, + .. + } => Some(format!( + "GraalPy {}.{}", + implementation_version.0, implementation_version.1 + )), + AbiTag::Pyston { .. } => Some("Pyston".to_string()), + } + } +} + impl std::fmt::Display for AbiTag { /// Format an [`AbiTag`] as a string. fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { @@ -64,11 +92,17 @@ impl std::fmt::Display for AbiTag { } } Self::PyPy { - python_version: (py_major, py_minor), + python_version: Some((py_major, py_minor)), implementation_version: (impl_major, impl_minor), } => { write!(f, "pypy{py_major}{py_minor}_pp{impl_major}{impl_minor}") } + Self::PyPy { + python_version: None, + implementation_version: (impl_major, impl_minor), + } => { + write!(f, "pypy_{impl_major}{impl_minor}") + } Self::GraalPy { python_version: (py_major, py_minor), implementation_version: (impl_major, impl_minor), @@ -174,25 +208,34 @@ impl FromStr for AbiTag { python_version: (major, minor), }) } else if let Some(rest) = s.strip_prefix("pypy") { - // Ex) `pypy39_pp73` - let (version_str, rest) = - rest.split_once('_') - .ok_or_else(|| ParseAbiTagError::InvalidFormat { - implementation: "PyPy", - tag: s.to_string(), - })?; - let (major, minor) = parse_python_version(version_str, "PyPy", s)?; - let rest = rest - .strip_prefix("pp") - .ok_or_else(|| ParseAbiTagError::InvalidFormat { - implementation: "PyPy", - tag: s.to_string(), - })?; - let (impl_major, impl_minor) = parse_impl_version(rest, "PyPy", s)?; - Ok(Self::PyPy { - python_version: (major, minor), - implementation_version: (impl_major, impl_minor), - }) + if let Some(rest) = rest.strip_prefix('_') { + // Ex) `pypy_73` + let (impl_major, impl_minor) = parse_impl_version(rest, "PyPy", s)?; + Ok(Self::PyPy { + python_version: None, + implementation_version: (impl_major, impl_minor), + }) + } else { + // Ex) `pypy39_pp73` + let (version_str, rest) = + rest.split_once('_') + .ok_or_else(|| ParseAbiTagError::InvalidFormat { + implementation: "PyPy", + tag: s.to_string(), + })?; + let (major, minor) = parse_python_version(version_str, "PyPy", s)?; + let rest = + rest.strip_prefix("pp") + .ok_or_else(|| ParseAbiTagError::InvalidFormat { + implementation: "PyPy", + tag: s.to_string(), + })?; + let (impl_major, impl_minor) = parse_impl_version(rest, "PyPy", s)?; + Ok(Self::PyPy { + python_version: Some((major, minor)), + implementation_version: (impl_major, impl_minor), + }) + } } else if let Some(rest) = s.strip_prefix("graalpy") { // Ex) `graalpy310_graalpy240_310_native` let version_end = rest @@ -349,12 +392,19 @@ mod tests { #[test] fn pypy_abi() { let tag = AbiTag::PyPy { - python_version: (3, 9), + python_version: Some((3, 9)), implementation_version: (7, 3), }; assert_eq!(AbiTag::from_str("pypy39_pp73"), Ok(tag)); assert_eq!(tag.to_string(), "pypy39_pp73"); + let tag = AbiTag::PyPy { + python_version: None, + implementation_version: (7, 3), + }; + assert_eq!(AbiTag::from_str("pypy_73").as_ref(), Ok(&tag)); + assert_eq!(tag.to_string(), "pypy_73"); + assert_eq!( AbiTag::from_str("pypy39"), Err(ParseAbiTagError::InvalidFormat { diff --git a/crates/uv-platform-tags/src/language_tag.rs b/crates/uv-platform-tags/src/language_tag.rs index 3abfa4e38548b..8641664de2301 100644 --- a/crates/uv-platform-tags/src/language_tag.rs +++ b/crates/uv-platform-tags/src/language_tag.rs @@ -34,6 +34,34 @@ pub enum LanguageTag { Pyston { python_version: (u8, u8) }, } +impl LanguageTag { + /// Return a pretty string representation of the language tag. + pub fn pretty(self) -> Option { + match self { + Self::None => None, + Self::Python { major, minor } => { + if let Some(minor) = minor { + Some(format!("Python {major}.{minor}")) + } else { + Some(format!("Python {major}")) + } + } + Self::CPython { + python_version: (major, minor), + } => Some(format!("CPython {major}.{minor}")), + Self::PyPy { + python_version: (major, minor), + } => Some(format!("PyPy {major}.{minor}")), + Self::GraalPy { + python_version: (major, minor), + } => Some(format!("GraalPy {major}.{minor}")), + Self::Pyston { + python_version: (major, minor), + } => Some(format!("Pyston {major}.{minor}")), + } + } +} + impl std::fmt::Display for LanguageTag { /// Format a [`LanguageTag`] as a string. fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { diff --git a/crates/uv-platform-tags/src/platform_tag.rs b/crates/uv-platform-tags/src/platform_tag.rs index b74bec83d3879..0ba46200d828d 100644 --- a/crates/uv-platform-tags/src/platform_tag.rs +++ b/crates/uv-platform-tags/src/platform_tag.rs @@ -53,6 +53,8 @@ pub enum PlatformTag { WinAmd64, /// Ex) `win_arm64` WinArm64, + /// Ex) `win_ia64` + WinIa64, /// Ex) `android_21_x86_64` Android { api_level: u16, arch: Arch }, /// Ex) `freebsd_12_x86_64` @@ -71,6 +73,34 @@ pub enum PlatformTag { Solaris { release_arch: SmallString }, } +impl PlatformTag { + /// Return a pretty string representation of the language tag. + pub fn pretty(&self) -> Option<&'static str> { + match self { + PlatformTag::Any => None, + PlatformTag::Manylinux { .. } => Some("Linux"), + PlatformTag::Manylinux1 { .. } => Some("Linux"), + PlatformTag::Manylinux2010 { .. } => Some("Linux"), + PlatformTag::Manylinux2014 { .. } => Some("Linux"), + PlatformTag::Linux { .. } => Some("Linux"), + PlatformTag::Musllinux { .. } => Some("Linux"), + PlatformTag::Macos { .. } => Some("macOS"), + PlatformTag::Win32 => Some("Windows"), + PlatformTag::WinAmd64 => Some("Windows"), + PlatformTag::WinArm64 => Some("Windows"), + PlatformTag::WinIa64 => Some("Windows"), + PlatformTag::Android { .. } => Some("Android"), + PlatformTag::FreeBsd { .. } => Some("FreeBSD"), + PlatformTag::NetBsd { .. } => Some("NetBSD"), + PlatformTag::OpenBsd { .. } => Some("OpenBSD"), + PlatformTag::Dragonfly { .. } => Some("DragonFly"), + PlatformTag::Haiku { .. } => Some("Haiku"), + PlatformTag::Illumos { .. } => Some("Illumos"), + PlatformTag::Solaris { .. } => Some("Solaris"), + } + } +} + impl PlatformTag { /// Returns `true` if the platform is manylinux-only. pub fn is_manylinux(&self) -> bool { @@ -103,7 +133,10 @@ impl PlatformTag { /// Returns `true` if the platform is Windows-only. pub fn is_windows(&self) -> bool { - matches!(self, Self::Win32 | Self::WinAmd64 | Self::WinArm64) + matches!( + self, + Self::Win32 | Self::WinAmd64 | Self::WinArm64 | Self::WinIa64 + ) } /// Returns `true` if the tag is only applicable on ARM platforms. @@ -220,6 +253,7 @@ impl std::fmt::Display for PlatformTag { Self::Win32 => write!(f, "win32"), Self::WinAmd64 => write!(f, "win_amd64"), Self::WinArm64 => write!(f, "win_arm64"), + Self::WinIa64 => write!(f, "win_ia64"), Self::Android { api_level, arch } => write!(f, "android_{api_level}_{arch}"), Self::FreeBsd { release_arch } => write!(f, "freebsd_{release_arch}"), Self::NetBsd { release_arch } => write!(f, "netbsd_{release_arch}"), @@ -243,6 +277,7 @@ impl FromStr for PlatformTag { "win32" => return Ok(Self::Win32), "win_amd64" => return Ok(Self::WinAmd64), "win_arm64" => return Ok(Self::WinArm64), + "win_ia64" => return Ok(Self::WinIa64), _ => {} } diff --git a/crates/uv-platform-tags/src/tags.rs b/crates/uv-platform-tags/src/tags.rs index 73a70a0f446bd..2a85a311426e1 100644 --- a/crates/uv-platform-tags/src/tags.rs +++ b/crates/uv-platform-tags/src/tags.rs @@ -392,7 +392,7 @@ impl Implementation { }, // Ex) `pypy39_pp73` Self::PyPy => AbiTag::PyPy { - python_version, + python_version: Some(python_version), implementation_version, }, // Ex) `graalpy310_graalpy240_310_native diff --git a/crates/uv-python/src/discovery.rs b/crates/uv-python/src/discovery.rs index 77fb6acf64bed..d35e3b0df5514 100644 --- a/crates/uv-python/src/discovery.rs +++ b/crates/uv-python/src/discovery.rs @@ -414,7 +414,7 @@ fn python_executables<'a>( }) .flatten(); - // Check if the the base conda environment is active + // Check if the base conda environment is active let from_base_conda_environment = iter::once_with(|| { conda_environment_from_env(CondaEnvironmentKind::Base) .into_iter() diff --git a/crates/uv-python/src/installation.rs b/crates/uv-python/src/installation.rs index e7fc3994056c1..12a32b0c6c092 100644 --- a/crates/uv-python/src/installation.rs +++ b/crates/uv-python/src/installation.rs @@ -165,6 +165,9 @@ impl PythonInstallation { installed.ensure_externally_managed()?; installed.ensure_sysconfig_patched()?; installed.ensure_canonical_executables()?; + if let Err(e) = installed.ensure_dylib_patched() { + e.warn_user(&installed); + } Ok(Self { source: PythonSource::Managed, diff --git a/crates/uv-python/src/lib.rs b/crates/uv-python/src/lib.rs index ea5b3e57275af..0eb3cb36f7f62 100644 --- a/crates/uv-python/src/lib.rs +++ b/crates/uv-python/src/lib.rs @@ -30,6 +30,7 @@ mod implementation; mod installation; mod interpreter; mod libc; +pub mod macos_dylib; pub mod managed; #[cfg(windows)] mod microsoft_store; diff --git a/crates/uv-python/src/macos_dylib.rs b/crates/uv-python/src/macos_dylib.rs new file mode 100644 index 0000000000000..7294497b3c9cf --- /dev/null +++ b/crates/uv-python/src/macos_dylib.rs @@ -0,0 +1,63 @@ +use std::{io::ErrorKind, path::PathBuf}; + +use uv_fs::Simplified as _; +use uv_warnings::warn_user; + +use crate::managed::ManagedPythonInstallation; + +pub fn patch_dylib_install_name(dylib: PathBuf) -> Result<(), Error> { + let output = match std::process::Command::new("install_name_tool") + .arg("-id") + .arg(&dylib) + .arg(&dylib) + .output() + { + Ok(output) => output, + Err(e) => { + let e = if e.kind() == ErrorKind::NotFound { + Error::MissingInstallNameTool + } else { + e.into() + }; + return Err(e); + } + }; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr).into_owned(); + return Err(Error::RenameError { dylib, stderr }); + } + + Ok(()) +} + +#[derive(thiserror::Error, Debug)] +pub enum Error { + #[error(transparent)] + Io(#[from] std::io::Error), + #[error("`install_name_tool` is not available on this system. +This utility is part of macOS Developer Tools. Please ensure that the Xcode Command Line Tools are installed by running: + + xcode-select --install + +For more information, see: https://developer.apple.com/xcode/")] + MissingInstallNameTool, + #[error("Failed to update the install name of the Python dynamic library located at `{}`", dylib.user_display())] + RenameError { dylib: PathBuf, stderr: String }, +} + +impl Error { + /// Emit a user-friendly warning about the patching failure. + pub fn warn_user(&self, installation: &ManagedPythonInstallation) { + let error = if tracing::enabled!(tracing::Level::DEBUG) { + format!("\nUnderlying error: {self}") + } else { + String::new() + }; + warn_user!( + "Failed to patch the install name of the dynamic library for {}. This may cause issues when building Python native extensions.{}", + installation.executable().simplified_display(), + error + ); + } +} diff --git a/crates/uv-python/src/managed.rs b/crates/uv-python/src/managed.rs index 1b35e8c2cad68..90f2e65de8be1 100644 --- a/crates/uv-python/src/managed.rs +++ b/crates/uv-python/src/managed.rs @@ -25,7 +25,8 @@ use crate::libc::LibcDetectionError; use crate::platform::Error as PlatformError; use crate::platform::{Arch, Libc, Os}; use crate::python_version::PythonVersion; -use crate::{sysconfig, PythonRequest, PythonVariant}; +use crate::{macos_dylib, sysconfig, PythonRequest, PythonVariant}; + #[derive(Error, Debug)] pub enum Error { #[error(transparent)] @@ -88,6 +89,8 @@ pub enum Error { NameParseError(#[from] installation::PythonInstallationKeyError), #[error(transparent)] LibcDetection(#[from] LibcDetectionError), + #[error(transparent)] + MacOsDylib(#[from] macos_dylib::Error), } /// A collection of uv-managed Python installations installed on the current system. #[derive(Debug, Clone, Eq, PartialEq)] @@ -508,6 +511,28 @@ impl ManagedPythonInstallation { Ok(()) } + /// On macOS, ensure that the `install_name` for the Python dylib is set + /// correctly, rather than pointing at `/install/lib/libpython{version}.dylib`. + /// This is necessary to ensure that native extensions written in Rust + /// link to the correct location for the Python library. + /// + /// See for more information. + pub fn ensure_dylib_patched(&self) -> Result<(), macos_dylib::Error> { + if cfg!(target_os = "macos") { + if *self.implementation() == ImplementationName::CPython { + let dylib_path = self.python_dir().join("lib").join(format!( + "{}python{}{}{}", + std::env::consts::DLL_PREFIX, + self.key.version().python_version(), + self.key.variant().suffix(), + std::env::consts::DLL_SUFFIX + )); + macos_dylib::patch_dylib_install_name(dylib_path)?; + } + } + Ok(()) + } + /// Create a link to the managed Python executable. /// /// If the file already exists at the target path, an error will be returned. @@ -603,7 +628,7 @@ impl ManagedPythonInstallation { } /// Generate a platform portion of a key from the environment. -fn platform_key_from_env() -> Result { +pub fn platform_key_from_env() -> Result { let os = Os::from_env(); let arch = Arch::from_env(); let libc = Libc::from_env()?; diff --git a/crates/uv-resolver/Cargo.toml b/crates/uv-resolver/Cargo.toml index 4f0a4687a7dd4..1bf906679c3fa 100644 --- a/crates/uv-resolver/Cargo.toml +++ b/crates/uv-resolver/Cargo.toml @@ -55,6 +55,7 @@ rustc-hash = { workspace = true } same-file = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true } +smallvec = { workspace = true } textwrap = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } diff --git a/crates/uv-resolver/src/candidate_selector.rs b/crates/uv-resolver/src/candidate_selector.rs index 4f443becbd6cb..53c5cb5b1aded 100644 --- a/crates/uv-resolver/src/candidate_selector.rs +++ b/crates/uv-resolver/src/candidate_selector.rs @@ -1,7 +1,9 @@ use std::fmt::{Display, Formatter}; +use either::Either; use itertools::Itertools; use pubgrub::Range; +use smallvec::SmallVec; use tracing::{debug, trace}; use uv_configuration::IndexStrategy; @@ -11,7 +13,7 @@ use uv_normalize::PackageName; use uv_pep440::Version; use uv_types::InstalledPackagesProvider; -use crate::preferences::Preferences; +use crate::preferences::{Entry, Preferences}; use crate::prerelease::{AllowPrerelease, PrereleaseStrategy}; use crate::resolution_mode::ResolutionStrategy; use crate::universal_marker::UniversalMarker; @@ -178,23 +180,53 @@ impl CandidateSelector { index: Option<&'a IndexUrl>, env: &ResolverEnvironment, ) -> Option> { - // In the branches, we "sort" the preferences by marker-matching through an iterator that - // first has the matching half and then the mismatching half. - let preferences_match = preferences - .get(package_name) - .filter(|(marker, _index, _version)| env.included_by_marker(marker.pep508())); - let preferences_mismatch = preferences - .get(package_name) - .filter(|(marker, _index, _version)| !env.included_by_marker(marker.pep508())); - let preferences = preferences_match.chain(preferences_mismatch).filter_map( - |(marker, source, version)| { - // If the package is mapped to an explicit index, only consider preferences that - // match the index. - index - .map_or(true, |index| source == Some(index)) - .then_some((marker, version)) - }, - ); + let preferences = preferences.get(package_name); + + // If there are multiple preferences for the same package, we need to sort them by priority. + let preferences = match preferences { + [] => return None, + [entry] => { + // Filter out preferences that map to a conflicting index. + if index.is_some_and(|index| { + entry + .index() + .is_some_and(|entry_index| entry_index != index) + }) { + return None; + }; + Either::Left(std::iter::once((entry.marker(), entry.pin().version()))) + } + [..] => { + type Entries<'a> = SmallVec<[&'a Entry; 3]>; + + let mut preferences = preferences.iter().collect::(); + preferences.retain(|entry| { + // Filter out preferences that map to a conflicting index. + !index.is_some_and(|index| { + entry + .index() + .is_some_and(|entry_index| entry_index != index) + }) + }); + preferences.sort_by_key(|entry| { + let marker = entry.marker(); + + // Prefer preferences that match the current environment. + let matches_env = env.included_by_marker(marker.pep508()); + + // Prefer preferences that match the current index. + let matches_index = index == entry.index(); + + std::cmp::Reverse((matches_env, matches_index)) + }); + Either::Right( + preferences + .into_iter() + .map(|entry| (entry.marker(), entry.pin().version())), + ) + } + }; + self.get_preferred_from_iter( preferences, package_name, diff --git a/crates/uv-resolver/src/error.rs b/crates/uv-resolver/src/error.rs index 188a02696c17d..ef1a1689afae3 100644 --- a/crates/uv-resolver/src/error.rs +++ b/crates/uv-resolver/src/error.rs @@ -447,6 +447,7 @@ impl std::fmt::Display for NoSolutionError { &self.fork_urls, &self.fork_indexes, &self.env, + self.tags.as_ref(), &self.workspace_members, &self.options, &mut additional_hints, diff --git a/crates/uv-resolver/src/lock/mod.rs b/crates/uv-resolver/src/lock/mod.rs index f518b73e20140..51045b2923e98 100644 --- a/crates/uv-resolver/src/lock/mod.rs +++ b/crates/uv-resolver/src/lock/mod.rs @@ -17,7 +17,7 @@ use url::Url; use uv_cache_key::RepositoryUrl; use uv_configuration::BuildOptions; -use uv_distribution::DistributionDatabase; +use uv_distribution::{DistributionDatabase, RequiresDist}; use uv_distribution_filename::{ BuildTag, DistExtension, ExtensionError, SourceDistExtension, WheelFilename, }; @@ -1213,58 +1213,80 @@ impl Lock { continue; } - // Get the metadata for the distribution. - let dist = package.to_dist( - root, - // When validating, it's okay to use wheels that don't match the current platform. - TagPolicy::Preferred(tags), - // When validating, it's okay to use (e.g.) a source distribution with `--no-build`. - // We're just trying to determine whether the lockfile is up-to-date. If we end - // up needing to build a source distribution in order to do so, below, we'll error - // there. - &BuildOptions::default(), - )?; - // Fetch the metadata for the distribution. // - // TODO(charlie): We don't need the version here, so we could avoid running a PEP 517 - // build if only the version is dynamic. - let metadata = { - let id = dist.version_id(); - if let Some(archive) = - index - .distributions() - .get(&id) - .as_deref() - .and_then(|response| { - if let MetadataResponse::Found(archive, ..) = response { - Some(archive) - } else { - None - } - }) - { - // If the metadata is already in the index, return it. - archive.metadata.clone() - } else { - // Run the PEP 517 build process to extract metadata from the source distribution. - let archive = database - .get_or_build_wheel_metadata(&dist, hasher.get(&dist)) - .await - .map_err(|err| LockErrorKind::Resolution { - id: package.id.clone(), - err, - })?; - - let metadata = archive.metadata.clone(); + // If the distribution is a source tree, attempt to extract the requirements from the + // `pyproject.toml` directly. The distribution database will do this too, but we can be + // even more aggressive here since we _only_ need the requirements. So, for example, + // even if the version is dynamic, we can still extract the requirements without + // performing a build, unlike in the database where we typically construct a "complete" + // metadata object. + let metadata = if let Some(source_tree) = package.id.source.as_source_tree() { + database + .requires_dist(root.join(source_tree)) + .await + .map_err(|err| LockErrorKind::Resolution { + id: package.id.clone(), + err, + })? + } else { + None + }; - // Insert the metadata into the index. - index - .distributions() - .done(id, Arc::new(MetadataResponse::Found(archive))); + let metadata = if let Some(metadata) = metadata { + metadata + } else { + // Get the metadata for the distribution. + let dist = package.to_dist( + root, + // When validating, it's okay to use wheels that don't match the current platform. + TagPolicy::Preferred(tags), + // When validating, it's okay to use (e.g.) a source distribution with `--no-build`. + // We're just trying to determine whether the lockfile is up-to-date. If we end + // up needing to build a source distribution in order to do so, below, we'll error + // there. + &BuildOptions::default(), + )?; + + let metadata = { + let id = dist.version_id(); + if let Some(archive) = + index + .distributions() + .get(&id) + .as_deref() + .and_then(|response| { + if let MetadataResponse::Found(archive, ..) = response { + Some(archive) + } else { + None + } + }) + { + // If the metadata is already in the index, return it. + archive.metadata.clone() + } else { + // Run the PEP 517 build process to extract metadata from the source distribution. + let archive = database + .get_or_build_wheel_metadata(&dist, hasher.get(&dist)) + .await + .map_err(|err| LockErrorKind::Resolution { + id: package.id.clone(), + err, + })?; + + let metadata = archive.metadata.clone(); + + // Insert the metadata into the index. + index + .distributions() + .done(id, Arc::new(MetadataResponse::Found(archive))); + + metadata + } + }; - metadata - } + RequiresDist::from(metadata) }; // Validate the `requires-dist` metadata. @@ -2770,13 +2792,21 @@ impl Source { } /// Returns `true` if the source is that of a source tree. - pub(crate) fn is_source_tree(&self) -> bool { + fn is_source_tree(&self) -> bool { match self { Source::Directory(..) | Source::Editable(..) | Source::Virtual(..) => true, Source::Path(..) | Source::Git(..) | Source::Registry(..) | Source::Direct(..) => false, } } + /// Returns the path to the source tree, if the source is a source tree. + fn as_source_tree(&self) -> Option<&Path> { + match self { + Source::Directory(path) | Source::Editable(path) | Source::Virtual(path) => Some(path), + Source::Path(..) | Source::Git(..) | Source::Registry(..) | Source::Direct(..) => None, + } + } + fn to_toml(&self, table: &mut Table) { let mut source_table = InlineTable::new(); match *self { diff --git a/crates/uv-resolver/src/marker.rs b/crates/uv-resolver/src/marker.rs index 4beb29d773ea2..089e1ad2db710 100644 --- a/crates/uv-resolver/src/marker.rs +++ b/crates/uv-resolver/src/marker.rs @@ -1,4 +1,7 @@ -use pubgrub::Range; +use pubgrub::Ranges; +use smallvec::SmallVec; +use std::ops::Bound; + use uv_pep440::Version; use uv_pep508::{CanonicalMarkerValueVersion, MarkerTree, MarkerTreeKind}; @@ -6,58 +9,77 @@ use crate::requires_python::{LowerBound, RequiresPythonRange, UpperBound}; /// Returns the bounding Python versions that can satisfy the [`MarkerTree`], if it's constrained. pub(crate) fn requires_python(tree: MarkerTree) -> Option { - fn collect_python_markers(tree: MarkerTree, markers: &mut Vec>) { + /// A small vector of Python version markers. + type Markers = SmallVec<[Ranges; 3]>; + + /// Collect the Python version markers from the tree. + /// + /// Specifically, performs a DFS to collect all Python requirements on the path to every + /// `MarkerTreeKind::True` node. + fn collect_python_markers(tree: MarkerTree, markers: &mut Markers, range: &Ranges) { match tree.kind() { - MarkerTreeKind::True | MarkerTreeKind::False => {} + MarkerTreeKind::True => { + markers.push(range.clone()); + } + MarkerTreeKind::False => {} MarkerTreeKind::Version(marker) => match marker.key() { CanonicalMarkerValueVersion::PythonFullVersion => { for (range, tree) in marker.edges() { - if !tree.is_false() { - markers.push(range.clone()); - } + collect_python_markers(tree, markers, range); } } CanonicalMarkerValueVersion::ImplementationVersion => { for (_, tree) in marker.edges() { - collect_python_markers(tree, markers); + collect_python_markers(tree, markers, range); } } }, MarkerTreeKind::String(marker) => { for (_, tree) in marker.children() { - collect_python_markers(tree, markers); + collect_python_markers(tree, markers, range); } } MarkerTreeKind::In(marker) => { for (_, tree) in marker.children() { - collect_python_markers(tree, markers); + collect_python_markers(tree, markers, range); } } MarkerTreeKind::Contains(marker) => { for (_, tree) in marker.children() { - collect_python_markers(tree, markers); + collect_python_markers(tree, markers, range); } } MarkerTreeKind::Extra(marker) => { for (_, tree) in marker.children() { - collect_python_markers(tree, markers); + collect_python_markers(tree, markers, range); } } } } - let mut markers = Vec::new(); - collect_python_markers(tree, &mut markers); + if tree.is_true() || tree.is_false() { + return None; + } + + let mut markers = Markers::new(); + collect_python_markers(tree, &mut markers, &Ranges::full()); - // Take the union of all Python version markers. + // If there are no Python version markers, return `None`. + if markers.iter().all(|range| { + let Some((lower, upper)) = range.bounding_range() else { + return true; + }; + matches!((lower, upper), (Bound::Unbounded, Bound::Unbounded)) + }) { + return None; + } + + // Take the union of the intersections of the Python version markers. let range = markers .into_iter() - .fold(None, |acc: Option>, range| { - Some(match acc { - Some(acc) => acc.union(&range), - None => range.clone(), - }) - })?; + .fold(Ranges::empty(), |acc: Ranges, range| { + acc.union(&range) + }); let (lower, upper) = range.bounding_range()?; @@ -66,3 +88,97 @@ pub(crate) fn requires_python(tree: MarkerTree) -> Option { UpperBound::new(upper.cloned()), )) } + +#[cfg(test)] +mod tests { + use std::ops::Bound; + use std::str::FromStr; + + use super::*; + + #[test] + fn test_requires_python() { + // An exact version match. + let tree = MarkerTree::from_str("python_full_version == '3.8.*'").unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!( + *range.lower(), + LowerBound::new(Bound::Included(Version::from_str("3.8").unwrap())) + ); + assert_eq!( + *range.upper(), + UpperBound::new(Bound::Excluded(Version::from_str("3.9").unwrap())) + ); + + // A version range with exclusive bounds. + let tree = + MarkerTree::from_str("python_full_version > '3.8' and python_full_version < '3.9'") + .unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!( + *range.lower(), + LowerBound::new(Bound::Excluded(Version::from_str("3.8").unwrap())) + ); + assert_eq!( + *range.upper(), + UpperBound::new(Bound::Excluded(Version::from_str("3.9").unwrap())) + ); + + // A version range with inclusive bounds. + let tree = + MarkerTree::from_str("python_full_version >= '3.8' and python_full_version <= '3.9'") + .unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!( + *range.lower(), + LowerBound::new(Bound::Included(Version::from_str("3.8").unwrap())) + ); + assert_eq!( + *range.upper(), + UpperBound::new(Bound::Included(Version::from_str("3.9").unwrap())) + ); + + // A version with a lower bound. + let tree = MarkerTree::from_str("python_full_version >= '3.8'").unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!( + *range.lower(), + LowerBound::new(Bound::Included(Version::from_str("3.8").unwrap())) + ); + assert_eq!(*range.upper(), UpperBound::new(Bound::Unbounded)); + + // A version with an upper bound. + let tree = MarkerTree::from_str("python_full_version < '3.9'").unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!(*range.lower(), LowerBound::new(Bound::Unbounded)); + assert_eq!( + *range.upper(), + UpperBound::new(Bound::Excluded(Version::from_str("3.9").unwrap())) + ); + + // A disjunction with a non-Python marker (i.e., an unbounded range). + let tree = + MarkerTree::from_str("python_full_version > '3.8' or sys_platform == 'win32'").unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!(*range.lower(), LowerBound::new(Bound::Unbounded)); + assert_eq!(*range.upper(), UpperBound::new(Bound::Unbounded)); + + // A complex mix of conjunctions and disjunctions. + let tree = MarkerTree::from_str("(python_full_version >= '3.8' and python_full_version < '3.9') or (python_full_version >= '3.10' and python_full_version < '3.11')").unwrap(); + let range = requires_python(tree).unwrap(); + assert_eq!( + *range.lower(), + LowerBound::new(Bound::Included(Version::from_str("3.8").unwrap())) + ); + assert_eq!( + *range.upper(), + UpperBound::new(Bound::Excluded(Version::from_str("3.11").unwrap())) + ); + + // An unbounded range across two specifiers. + let tree = + MarkerTree::from_str("python_full_version > '3.8' or python_full_version <= '3.8'") + .unwrap(); + assert_eq!(requires_python(tree), None); + } +} diff --git a/crates/uv-resolver/src/preferences.rs b/crates/uv-resolver/src/preferences.rs index feb14ce21d98e..7f418fd3a944d 100644 --- a/crates/uv-resolver/src/preferences.rs +++ b/crates/uv-resolver/src/preferences.rs @@ -121,12 +121,29 @@ impl Preference { } #[derive(Debug, Clone)] -struct Entry { +pub(crate) struct Entry { marker: UniversalMarker, index: Option, pin: Pin, } +impl Entry { + /// Return the [`UniversalMarker`] associated with the entry. + pub(crate) fn marker(&self) -> &UniversalMarker { + &self.marker + } + + /// Return the [`IndexUrl`] associated with the entry, if any. + pub(crate) fn index(&self) -> Option<&IndexUrl> { + self.index.as_ref() + } + + /// Return the pinned data associated with the entry. + pub(crate) fn pin(&self) -> &Pin { + &self.pin + } +} + /// A set of pinned packages that should be preserved during resolution, if possible. /// /// The marker is the marker of the fork that resolved to the pin, if any. @@ -232,15 +249,11 @@ impl Preferences { } /// Return the pinned version for a package, if any. - pub(crate) fn get( - &self, - package_name: &PackageName, - ) -> impl Iterator, &Version)> { + pub(crate) fn get(&self, package_name: &PackageName) -> &[Entry] { self.0 .get(package_name) - .into_iter() - .flatten() - .map(|entry| (&entry.marker, entry.index.as_ref(), entry.pin.version())) + .map(Vec::as_slice) + .unwrap_or_default() } /// Return the hashes for a package, if the version matches that of the pin. @@ -273,12 +286,12 @@ pub(crate) struct Pin { impl Pin { /// Return the version of the pinned package. - fn version(&self) -> &Version { + pub(crate) fn version(&self) -> &Version { &self.version } /// Return the hashes of the pinned package. - fn hashes(&self) -> &[HashDigest] { + pub(crate) fn hashes(&self) -> &[HashDigest] { &self.hashes } } diff --git a/crates/uv-resolver/src/pubgrub/report.rs b/crates/uv-resolver/src/pubgrub/report.rs index 0219a2db7463a..5a04e17a7d050 100644 --- a/crates/uv-resolver/src/pubgrub/report.rs +++ b/crates/uv-resolver/src/pubgrub/report.rs @@ -536,6 +536,7 @@ impl PubGrubReportFormatter<'_> { fork_urls: &ForkUrls, fork_indexes: &ForkIndexes, env: &ResolverEnvironment, + tags: Option<&Tags>, workspace_members: &BTreeSet, options: &Options, output_hints: &mut IndexSet, @@ -591,6 +592,7 @@ impl PubGrubReportFormatter<'_> { selector, fork_indexes, env, + tags, ) { output_hints.insert(hint); } @@ -686,6 +688,7 @@ impl PubGrubReportFormatter<'_> { fork_urls, fork_indexes, env, + tags, workspace_members, options, output_hints, @@ -702,6 +705,7 @@ impl PubGrubReportFormatter<'_> { fork_urls, fork_indexes, env, + tags, workspace_members, options, output_hints, @@ -721,6 +725,7 @@ impl PubGrubReportFormatter<'_> { selector: &CandidateSelector, fork_indexes: &ForkIndexes, env: &ResolverEnvironment, + tags: Option<&Tags>, ) -> Option { let response = if let Some(url) = fork_indexes.get(name) { index.explicit().get(&(name.clone(), url.clone())) @@ -739,7 +744,7 @@ impl PubGrubReportFormatter<'_> { match tag { IncompatibleTag::Invalid => None, IncompatibleTag::Python => { - // Return all available language tags. + let best = tags.and_then(Tags::python_tag); let tags = prioritized.python_tags(); if tags.is_empty() { None @@ -748,10 +753,12 @@ impl PubGrubReportFormatter<'_> { package: name.clone(), version: candidate.version().clone(), tags, + best, }) } } IncompatibleTag::Abi | IncompatibleTag::AbiPythonVersion => { + let best = tags.and_then(Tags::abi_tag); let tags = prioritized .abi_tags() .into_iter() @@ -772,6 +779,7 @@ impl PubGrubReportFormatter<'_> { package: name.clone(), version: candidate.version().clone(), tags, + best, }) } } @@ -1105,6 +1113,8 @@ pub(crate) enum PubGrubHint { version: Version, // excluded from `PartialEq` and `Hash` tags: BTreeSet, + // excluded from `PartialEq` and `Hash` + best: Option, }, /// No wheels are available for a package, and using source distributions was disabled. AbiTags { @@ -1113,6 +1123,8 @@ pub(crate) enum PubGrubHint { version: Version, // excluded from `PartialEq` and `Hash` tags: BTreeSet, + // excluded from `PartialEq` and `Hash` + best: Option, }, /// No wheels are available for a package, and using source distributions was disabled. PlatformTags { @@ -1562,37 +1574,81 @@ impl std::fmt::Display for PubGrubHint { package, version, tags, + best, } => { - let s = if tags.len() == 1 { "" } else { "s" }; - write!( - f, - "{}{} Wheels are available for `{}` ({}) with the following Python tag{s}: {}", - "hint".bold().cyan(), - ":".bold(), - package.cyan(), - format!("v{version}").cyan(), - tags.iter() - .map(|tag| format!("`{}`", tag.cyan())) - .join(", "), - ) + if let Some(best) = best { + let s = if tags.len() == 1 { "" } else { "s" }; + let best = if let Some(pretty) = best.pretty() { + format!("{} (`{}`)", pretty.cyan(), best.cyan()) + } else { + format!("{}", best.cyan()) + }; + write!( + f, + "{}{} You require {}, but we only found wheels for `{}` ({}) with the following Python implementation tag{s}: {}", + "hint".bold().cyan(), + ":".bold(), + best, + package.cyan(), + format!("v{version}").cyan(), + tags.iter() + .map(|tag| format!("`{}`", tag.cyan())) + .join(", "), + ) + } else { + let s = if tags.len() == 1 { "" } else { "s" }; + write!( + f, + "{}{} Wheels are available for `{}` ({}) with the following Python implementation tag{s}: {}", + "hint".bold().cyan(), + ":".bold(), + package.cyan(), + format!("v{version}").cyan(), + tags.iter() + .map(|tag| format!("`{}`", tag.cyan())) + .join(", "), + ) + } } Self::AbiTags { package, version, tags, + best, } => { - let s = if tags.len() == 1 { "" } else { "s" }; - write!( - f, - "{}{} Wheels are available for `{}` ({}) with the following ABI tag{s}: {}", - "hint".bold().cyan(), - ":".bold(), - package.cyan(), - format!("v{version}").cyan(), - tags.iter() - .map(|tag| format!("`{}`", tag.cyan())) - .join(", "), - ) + if let Some(best) = best { + let s = if tags.len() == 1 { "" } else { "s" }; + let best = if let Some(pretty) = best.pretty() { + format!("{} (`{}`)", pretty.cyan(), best.cyan()) + } else { + format!("{}", best.cyan()) + }; + write!( + f, + "{}{} You require {}, but we only found wheels for `{}` ({}) with the following Python ABI tag{s}: {}", + "hint".bold().cyan(), + ":".bold(), + best, + package.cyan(), + format!("v{version}").cyan(), + tags.iter() + .map(|tag| format!("`{}`", tag.cyan())) + .join(", "), + ) + } else { + let s = if tags.len() == 1 { "" } else { "s" }; + write!( + f, + "{}{} Wheels are available for `{}` ({}) with the following Python ABI tag{s}: {}", + "hint".bold().cyan(), + ":".bold(), + package.cyan(), + format!("v{version}").cyan(), + tags.iter() + .map(|tag| format!("`{}`", tag.cyan())) + .join(", "), + ) + } } Self::PlatformTags { package, @@ -1786,7 +1842,7 @@ fn update_availability_range( let segment_range = Range::from_range_bounds((lower.clone(), upper.clone())); // Drop the segment if it's disjoint with the available range, e.g., if the segment is - // `foo>999`, and the the available versions are all `<10` it's useless to show. + // `foo>999`, and the available versions are all `<10` it's useless to show. if segment_range.is_disjoint(&available_range) { continue; } diff --git a/crates/uv-resolver/src/python_requirement.rs b/crates/uv-resolver/src/python_requirement.rs index 018525320db8b..178b77866018b 100644 --- a/crates/uv-resolver/src/python_requirement.rs +++ b/crates/uv-resolver/src/python_requirement.rs @@ -100,6 +100,8 @@ impl PythonRequirement { /// Narrow the [`PythonRequirement`] to the given version, if it's stricter (i.e., greater) /// than the current `Requires-Python` minimum. + /// + /// Returns `None` if the given range is not narrower than the current range. pub fn narrow(&self, target: &RequiresPythonRange) -> Option { Some(Self { exact: self.exact.clone(), diff --git a/crates/uv-resolver/src/requires_python.rs b/crates/uv-resolver/src/requires_python.rs index cef410c92e27a..668d512e406b6 100644 --- a/crates/uv-resolver/src/requires_python.rs +++ b/crates/uv-resolver/src/requires_python.rs @@ -115,7 +115,12 @@ impl RequiresPython { } /// Narrow the [`RequiresPython`] by computing the intersection with the given range. + /// + /// Returns `None` if the given range is not narrower than the current range. pub fn narrow(&self, range: &RequiresPythonRange) -> Option { + if *range == self.range { + return None; + } let lower = if range.0 >= self.range.0 { Some(&range.0) } else { @@ -464,7 +469,7 @@ impl RequiresPython { python_version: (2, ..), .. } | AbiTag::PyPy { - python_version: (2, ..), + python_version: None | Some((2, ..)), .. } | AbiTag::GraalPy { python_version: (2, ..), @@ -478,7 +483,7 @@ impl RequiresPython { .. } | AbiTag::PyPy { - python_version: (3, minor), + python_version: Some((3, minor)), .. } | AbiTag::GraalPy { diff --git a/crates/uv-resolver/src/resolver/environment.rs b/crates/uv-resolver/src/resolver/environment.rs index 5d2914985f106..64da1fdcae7e6 100644 --- a/crates/uv-resolver/src/resolver/environment.rs +++ b/crates/uv-resolver/src/resolver/environment.rs @@ -690,17 +690,14 @@ mod tests { } /// Inside a fork whose marker's Python requirement is equal - /// to our Requires-Python means that narrowing produces a - /// result, but is unchanged from what we started with. + /// to our Requires-Python means that narrowing does not produce + /// a result. #[test] fn narrow_python_requirement_forking_no_op() { let pyreq = python_requirement("3.10"); let resolver_env = ResolverEnvironment::universal(vec![]) .narrow_environment(marker("python_version >= '3.10'")); - assert_eq!( - resolver_env.narrow_python_requirement(&pyreq), - Some(python_requirement("3.10")), - ); + assert_eq!(resolver_env.narrow_python_requirement(&pyreq), None); } /// In this test, we narrow a more relaxed requirement compared to the diff --git a/crates/uv-settings/src/lib.rs b/crates/uv-settings/src/lib.rs index 7d655bda3cf79..31de316c7419a 100644 --- a/crates/uv-settings/src/lib.rs +++ b/crates/uv-settings/src/lib.rs @@ -49,8 +49,16 @@ impl FilesystemOptions { validate_uv_toml(&file, &options)?; Ok(Some(Self(options))) } - Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::NotFound => Ok(None), - Err(Error::Io(err)) if err.kind() == std::io::ErrorKind::NotADirectory => Ok(None), + Err(Error::Io(err)) + if matches!( + err.kind(), + std::io::ErrorKind::NotFound + | std::io::ErrorKind::NotADirectory + | std::io::ErrorKind::PermissionDenied + ) => + { + Ok(None) + } Err(err) => Err(err), } } @@ -350,6 +358,26 @@ mod test { Ok(()) } + #[test] + #[cfg(unix)] + fn test_locate_system_config_xdg_unix_permissions() -> Result<(), FixtureError> { + let context = assert_fs::TempDir::new()?; + let config = context.child("uv").child("uv.toml"); + config.write_str("")?; + fs_err::set_permissions( + &context, + std::os::unix::fs::PermissionsExt::from_mode(0o000), + ) + .unwrap(); + + assert_eq!( + locate_system_config_xdg(Some(context.to_str().unwrap())), + None + ); + + Ok(()) + } + #[test] #[cfg(windows)] fn test_windows_config() -> Result<(), FixtureError> { diff --git a/crates/uv-static/src/env_vars.rs b/crates/uv-static/src/env_vars.rs index 1bfe16ae0537e..c21bd6bd5c657 100644 --- a/crates/uv-static/src/env_vars.rs +++ b/crates/uv-static/src/env_vars.rs @@ -234,6 +234,12 @@ impl EnvVars { /// Distributions can be read from a local directory by using the `file://` URL scheme. pub const UV_PYPY_INSTALL_MIRROR: &'static str = "UV_PYPY_INSTALL_MIRROR"; + /// Install seed packages (one or more of: `pip`, `setuptools`, and `wheel`) into the virtual environment + /// created by `uv venv`. + /// + /// Note that `setuptools` and `wheel` are not included in Python 3.12+ environments. + pub const UV_VENV_SEED: &'static str = "UV_VENV_SEED"; + /// Used to override `PATH` to limit Python executable availability in the test suite. #[attr_hidden] pub const UV_TEST_PYTHON_PATH: &'static str = "UV_TEST_PYTHON_PATH"; diff --git a/crates/uv-version/Cargo.toml b/crates/uv-version/Cargo.toml index 876e2d578ab09..6ba603ad872f5 100644 --- a/crates/uv-version/Cargo.toml +++ b/crates/uv-version/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv-version" -version = "0.5.18" +version = "0.5.20" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } diff --git a/crates/uv-workspace/src/pyproject.rs b/crates/uv-workspace/src/pyproject.rs index d2d19cd9b5f41..97afd4afdf5ca 100644 --- a/crates/uv-workspace/src/pyproject.rs +++ b/crates/uv-workspace/src/pyproject.rs @@ -316,7 +316,7 @@ pub struct ToolUv { /// [`extra_index_url`](#extra-index-url). uv will only consider the first index that contains /// a given package, unless an alternative [index strategy](#index-strategy) is specified. /// - /// If an index is marked as `explicit = true`, it will be used exclusively for those + /// If an index is marked as `explicit = true`, it will be used exclusively for the /// dependencies that select it explicitly via `[tool.uv.sources]`, as in: /// /// ```toml @@ -394,8 +394,8 @@ pub struct ToolUv { /// /// Use of this field is not recommend anymore. Instead, use the `dependency-groups.dev` field /// which is a standardized way to declare development dependencies. The contents of - /// `tool.uv.dev-dependencies` and `dependency-groups.dev` are combined to determine the the - /// final requirements of the `dev` dependency group. + /// `tool.uv.dev-dependencies` and `dependency-groups.dev` are combined to determine the final + /// requirements of the `dev` dependency group. #[cfg_attr( feature = "schemars", schemars( @@ -473,7 +473,7 @@ pub struct ToolUv { value_type = "list[str]", example = r#" # Ensure that the grpcio version is always less than 1.65, if it's requested by a - # transitive dependency. + # direct or transitive dependency. constraint-dependencies = ["grpcio<1.65"] "# )] @@ -485,7 +485,7 @@ pub struct ToolUv { /// However, you can restrict the set of supported environments to improve performance and avoid /// unsatisfiable branches in the solution space. /// - /// These environments will also respected when `uv pip compile` is invoked with the + /// These environments will also be respected when `uv pip compile` is invoked with the /// `--universal` flag. #[cfg_attr( feature = "schemars", diff --git a/crates/uv/Cargo.toml b/crates/uv/Cargo.toml index de27506f98d4c..fcb54d8a482a9 100644 --- a/crates/uv/Cargo.toml +++ b/crates/uv/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "uv" -version = "0.5.18" +version = "0.5.20" edition = { workspace = true } rust-version = { workspace = true } homepage = { workspace = true } @@ -133,8 +133,8 @@ ignored = [ ] [features] -default = ["python", "python-managed", "pypi", "git", "performance", "crates-io"] -# Use better memory allocators, etc. — also turns-on self-update. +default = ["python", "python-managed", "pypi", "git", "performance", "crates-io", "test-ecosystem"] +# Use better memory allocators, etc. performance = [ "performance-memory-allocator", "performance-flate2-backend", @@ -156,5 +156,7 @@ pypi = [] git = [] # Introduces a dependency on crates.io. crates-io = [] +# Includes test cases that require ecosystem packages +test-ecosystem = [] # Adds self-update functionality. self-update = ["axoupdater", "uv-cli/self-update"] diff --git a/crates/uv/src/commands/project/install_target.rs b/crates/uv/src/commands/project/install_target.rs index 78d88640960f3..066afe2cc3737 100644 --- a/crates/uv/src/commands/project/install_target.rs +++ b/crates/uv/src/commands/project/install_target.rs @@ -3,7 +3,7 @@ use std::path::Path; use std::str::FromStr; use itertools::Either; - +use uv_distribution_types::Index; use uv_normalize::PackageName; use uv_pypi_types::{LenientRequirement, VerbatimParsedUrl}; use uv_resolver::{Installable, Lock, Package}; @@ -88,6 +88,38 @@ impl<'lock> Installable<'lock> for InstallTarget<'lock> { } impl<'lock> InstallTarget<'lock> { + /// Return an iterator over the [`Index`] definitions in the target. + pub(crate) fn indexes(self) -> impl Iterator { + match self { + Self::Project { workspace, .. } + | Self::Workspace { workspace, .. } + | Self::NonProjectWorkspace { workspace, .. } => { + Either::Left(workspace.indexes().iter().chain( + workspace.packages().values().flat_map(|member| { + member + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.index.as_ref()) + .into_iter() + .flatten() + }), + )) + } + Self::Script { script, .. } => Either::Right( + script + .metadata + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.top_level.index.as_deref()) + .into_iter() + .flatten(), + ), + } + } + /// Return an iterator over all [`Sources`] defined by the target. pub(crate) fn sources(&self) -> impl Iterator { match self { diff --git a/crates/uv/src/commands/project/lock.rs b/crates/uv/src/commands/project/lock.rs index 845f15a041e36..1f7ece16db630 100644 --- a/crates/uv/src/commands/project/lock.rs +++ b/crates/uv/src/commands/project/lock.rs @@ -468,6 +468,12 @@ async fn do_lock( } } + for index in target.indexes() { + if let Some(credentials) = index.credentials() { + uv_auth::store_credentials(index.raw_url(), credentials); + } + } + // Initialize the registry client. let client = RegistryClientBuilder::new(cache.clone()) .native_tls(native_tls) diff --git a/crates/uv/src/commands/project/lock_target.rs b/crates/uv/src/commands/project/lock_target.rs index ba3e9727b88cf..e81ca37e79b38 100644 --- a/crates/uv/src/commands/project/lock_target.rs +++ b/crates/uv/src/commands/project/lock_target.rs @@ -5,7 +5,7 @@ use itertools::Either; use uv_configuration::{LowerBound, SourceStrategy}; use uv_distribution::LoweredRequirement; -use uv_distribution_types::IndexLocations; +use uv_distribution_types::{Index, IndexLocations}; use uv_normalize::{GroupName, PackageName}; use uv_pep508::RequirementOrigin; use uv_pypi_types::{Conflicts, Requirement, SupportedEnvironments, VerbatimParsedUrl}; @@ -159,6 +159,34 @@ impl<'lock> LockTarget<'lock> { } } + /// Return an iterator over the [`Index`] definitions in the [`LockTarget`]. + pub(crate) fn indexes(self) -> impl Iterator { + match self { + Self::Workspace(workspace) => Either::Left(workspace.indexes().iter().chain( + workspace.packages().values().flat_map(|member| { + member + .pyproject_toml() + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.index.as_ref()) + .into_iter() + .flatten() + }), + )), + Self::Script(script) => Either::Right( + script + .metadata + .tool + .as_ref() + .and_then(|tool| tool.uv.as_ref()) + .and_then(|uv| uv.top_level.index.as_deref()) + .into_iter() + .flatten(), + ), + } + } + /// Return the `Requires-Python` bound for the [`LockTarget`]. pub(crate) fn requires_python(self) -> Option { match self { diff --git a/crates/uv/src/commands/project/sync.rs b/crates/uv/src/commands/project/sync.rs index 4d1da92e07ed4..8558e07b4f78f 100644 --- a/crates/uv/src/commands/project/sync.rs +++ b/crates/uv/src/commands/project/sync.rs @@ -363,8 +363,8 @@ pub(super) async fn do_sync( } } - // Populate credentials from the workspace. - store_credentials_from_workspace(target); + // Populate credentials from the target. + store_credentials_from_target(target); // Initialize the registry client. let client = RegistryClientBuilder::new(cache.clone()) @@ -522,7 +522,14 @@ fn apply_editable_mode(resolution: Resolution, editable: EditableMode) -> Resolu /// /// These credentials can come from any of `tool.uv.sources`, `tool.uv.dev-dependencies`, /// `project.dependencies`, and `project.optional-dependencies`. -fn store_credentials_from_workspace(target: InstallTarget<'_>) { +fn store_credentials_from_target(target: InstallTarget<'_>) { + // Iterate over any idnexes in the target. + for index in target.indexes() { + if let Some(credentials) = index.credentials() { + store_credentials(index.raw_url(), credentials); + } + } + // Iterate over any sources in the target. for source in target.sources() { match source { diff --git a/crates/uv/src/commands/python/install.rs b/crates/uv/src/commands/python/install.rs index 5f482fea51120..e2eb24c93b071 100644 --- a/crates/uv/src/commands/python/install.rs +++ b/crates/uv/src/commands/python/install.rs @@ -312,6 +312,9 @@ pub(crate) async fn install( installation.ensure_externally_managed()?; installation.ensure_sysconfig_patched()?; installation.ensure_canonical_executables()?; + if let Err(e) = installation.ensure_dylib_patched() { + e.warn_user(installation); + } if preview.is_disabled() { debug!("Skipping installation of Python executables, use `--preview` to enable."); @@ -359,28 +362,30 @@ pub(crate) async fn install( target.simplified_display() ); - // Check if the existing link is valid - let valid_link = target - .read_link() - .and_then(|target| target.try_exists()) - .inspect_err(|err| debug!("Failed to inspect executable with error: {err}")) - .unwrap_or(true); - // Figure out what installation it references, if any - let existing = valid_link - .then(|| { - find_matching_bin_link( - installations - .iter() - .copied() - .chain(existing_installations.iter()), - &target, - ) - }) - .flatten(); + let existing = find_matching_bin_link( + installations + .iter() + .copied() + .chain(existing_installations.iter()), + &target, + ); match existing { None => { + // Determine if the link is valid, i.e., if it points to an existing + // Python we don't manage. On Windows, we just assume it is valid because + // symlinks are not common for Python interpreters. + let valid_link = cfg!(windows) + || target + .read_link() + .and_then(|target| target.try_exists()) + .inspect_err(|err| { + debug!("Failed to inspect executable with error: {err}"); + }) + // If we can't verify the link, assume it is valid. + .unwrap_or(true); + // There's an existing executable we don't manage, require `--force` if valid_link { if !force { diff --git a/crates/uv/src/commands/python/pin.rs b/crates/uv/src/commands/python/pin.rs index 4f0f3d135aa92..a16439e6b0dcb 100644 --- a/crates/uv/src/commands/python/pin.rs +++ b/crates/uv/src/commands/python/pin.rs @@ -203,8 +203,8 @@ fn warn_if_existing_pin_incompatible_with_project( } } - // If the there is not a version in the pinned request, attempt to resolve the pin into an interpreter - // to check for compatibility on the current system. + // If there is not a version in the pinned request, attempt to resolve the pin into an + // interpreter to check for compatibility on the current system. match PythonInstallation::find( pin, EnvironmentPreference::OnlySystem, diff --git a/crates/uv/src/lib.rs b/crates/uv/src/lib.rs index fcc6cb6b75834..0734e3deca758 100644 --- a/crates/uv/src/lib.rs +++ b/crates/uv/src/lib.rs @@ -233,7 +233,7 @@ async fn run(mut cli: Cli) -> Result { let package_version = uv_pep440::Version::from_str(uv_version::version())?; if !required_version.contains(&package_version) { return Err(anyhow::anyhow!( - "Required version `{required_version}` does not match the running version `{package_version}`", + "Required uv version `{required_version}` does not match the running version `{package_version}`", )); } } diff --git a/crates/uv/tests/it/lock.rs b/crates/uv/tests/it/lock.rs index 82c6f3722b435..cc9137f04efaa 100644 --- a/crates/uv/tests/it/lock.rs +++ b/crates/uv/tests/it/lock.rs @@ -3419,6 +3419,88 @@ fn lock_partial_git() -> Result<()> { Ok(()) } +/// See: +#[test] +fn lock_unsupported_tag() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["watchdog"] + "#, + )?; + + let lock = context.temp_dir.child("uv.lock"); + lock.write_str(r#" + version = 1 + requires-python = ">=3.12.0" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "watchdog" }, + ] + + [package.metadata] + requires-dist = [{ name = "watchdog" }] + + [[package]] + name = "watchdog" + version = "6.0.0" + source = { registry = "https://pypi.org/simple" } + sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220 } + wheels = [ + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480 }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451 }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057 }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079 }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076 }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077 }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077 }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065 }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, + ] + "#)?; + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 2 packages in [TIME] + "###); + + // Install from the lockfile. + uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Prepared 1 package in [TIME] + Installed 1 package in [TIME] + + watchdog==6.0.0 + "###); + + Ok(()) +} + /// Respect locked versions with `uv lock`, unless `--upgrade` is passed. #[test] #[cfg(feature = "git")] @@ -6758,7 +6840,7 @@ fn lock_requires_python_no_wheels() -> Result<()> { × No solution found when resolving dependencies: ╰─▶ Because dearpygui==1.9.1 has no wheels with a matching Python version tag (e.g., `cp312`) and your project depends on dearpygui==1.9.1, we can conclude that your project's requirements are unsatisfiable. - hint: Wheels are available for `dearpygui` (v1.9.1) with the following ABI tags: `cp37m`, `cp38`, `cp39`, `cp310`, `cp311` + hint: Wheels are available for `dearpygui` (v1.9.1) with the following Python ABI tags: `cp37m`, `cp38`, `cp39`, `cp310`, `cp311` "###); Ok(()) @@ -7199,14 +7281,14 @@ fn lock_relative_lock_deserialization() -> Result<()> { uv_snapshot!(context.filters(), context.lock().current_dir(&child), @r###" success: false - exit_code: 2 + exit_code: 1 ----- stdout ----- ----- stderr ----- Using CPython 3.12.[X] interpreter at: [PYTHON-3.12] - error: Failed to generate package metadata for `child==0.1.0 @ editable+.` - Caused by: Failed to parse entry: `member` - Caused by: `member` references a workspace in `tool.uv.sources` (e.g., `member = { workspace = true }`), but is not a workspace member + × Failed to build `child @ file://[TEMP_DIR]/packages/child` + ├─▶ Failed to parse entry: `member` + ╰─▶ `member` references a workspace in `tool.uv.sources` (e.g., `member = { workspace = true }`), but is not a workspace member "###); Ok(()) @@ -7421,6 +7503,146 @@ fn lock_peer_member() -> Result<()> { Ok(()) } +/// Lock a workspace in which a member defines an explicit index that requires authentication. +#[test] +fn lock_index_workspace_member() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["child"] + + [tool.uv.workspace] + members = ["child"] + + [tool.uv.sources] + child = { workspace = true } + "#, + )?; + + let child = context.temp_dir.child("child"); + fs_err::create_dir_all(&child)?; + + let pyproject_toml = child.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "child" + version = "0.1.0" + requires-python = ">=3.12" + dependencies = ["iniconfig>=2"] + + [[tool.uv.index]] + name = "my-index" + url = "https://pypi-proxy.fly.dev/basic-auth/simple" + explicit = true + + [tool.uv.sources] + iniconfig = { index = "my-index" } + + [build-system] + requires = ["setuptools>=42"] + build-backend = "setuptools.build_meta" + "#, + )?; + + // Locking without the necessary credentials should fail. + uv_snapshot!(context.filters(), context.lock(), @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + × No solution found when resolving dependencies: + ╰─▶ Because iniconfig was not found in the package registry and child depends on iniconfig>=2, we can conclude that child's requirements are unsatisfiable. + And because your workspace requires child, we can conclude that your workspace's requirements are unsatisfiable. + "###); + + uv_snapshot!(context.filters(), context.lock() + .env("UV_INDEX_MY_INDEX_USERNAME", "public") + .env("UV_INDEX_MY_INDEX_PASSWORD", "heron"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + let lock = fs_err::read_to_string(context.temp_dir.join("uv.lock")).unwrap(); + + insta::with_settings!({ + filters => context.filters(), + }, { + assert_snapshot!( + lock, @r###" + version = 1 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [manifest] + members = [ + "child", + "project", + ] + + [[package]] + name = "child" + version = "0.1.0" + source = { editable = "child" } + dependencies = [ + { name = "iniconfig" }, + ] + + [package.metadata] + requires-dist = [{ name = "iniconfig", specifier = ">=2", index = "https://pypi-proxy.fly.dev/basic-auth/simple" }] + + [[package]] + name = "iniconfig" + version = "2.0.0" + source = { registry = "https://pypi-proxy.fly.dev/basic-auth/simple" } + sdist = { url = "https://pypi-proxy.fly.dev/basic-auth/files/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } + wheels = [ + { url = "https://pypi-proxy.fly.dev/basic-auth/files/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, + ] + + [[package]] + name = "project" + version = "0.1.0" + source = { virtual = "." } + dependencies = [ + { name = "child" }, + ] + + [package.metadata] + requires-dist = [{ name = "child", editable = "child" }] + "### + ); + }); + + // Re-run with `--locked`. + uv_snapshot!(context.filters(), context.lock() + .env("UV_INDEX_MY_INDEX_USERNAME", "public") + .env("UV_INDEX_MY_INDEX_PASSWORD", "heron") + .arg("--locked"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 3 packages in [TIME] + "###); + + Ok(()) +} + /// Ensure that development dependencies are omitted for non-workspace members. Below, `bar` depends /// on `foo`, but `bar/uv.lock` should omit `anyio`, but should include `typing-extensions`. #[test] @@ -14854,11 +15076,13 @@ fn lock_explicit_default_index() -> Result<()> { DEBUG Using Python request `>=3.12` from `requires-python` metadata DEBUG The virtual environment's Python version satisfies `>=3.12` DEBUG Using request timeout of [TIME] - DEBUG Found static `pyproject.toml` for: project @ file://[TEMP_DIR]/ + DEBUG Found static `requires-dist` for: [TEMP_DIR]/ DEBUG No workspace root found, using project root DEBUG Ignoring existing lockfile due to mismatched requirements for: `project==0.1.0` Requested: {Requirement { name: PackageName("anyio"), extras: [], groups: [], marker: true, source: Registry { specifier: VersionSpecifiers([]), index: None, conflict: None }, origin: None }} Existing: {Requirement { name: PackageName("iniconfig"), extras: [], groups: [], marker: true, source: Registry { specifier: VersionSpecifiers([VersionSpecifier { operator: Equal, version: "2.0.0" }]), index: Some(Url { scheme: "https", cannot_be_a_base: false, username: "", password: None, host: Some(Domain("test.pypi.org")), port: None, path: "/simple", query: None, fragment: None }), conflict: None }, origin: None }} + DEBUG Found static `pyproject.toml` for: project @ file://[TEMP_DIR]/ + DEBUG No workspace root found, using project root DEBUG Solving with installed Python version: 3.12.[X] DEBUG Solving with target Python version: >=3.12 DEBUG Adding direct dependency: project* @@ -19833,6 +20057,7 @@ fn lock_dynamic_version() -> Result<()> { name = "project" requires-python = ">=3.12" dynamic = ["version"] + dependencies = [] [build-system] requires = ["setuptools"] @@ -19929,6 +20154,72 @@ fn lock_dynamic_version() -> Result<()> { Ok(()) } +/// Validating a lockfile with a dynamic version (but static dependencies) shouldn't require +/// building the package. +#[test] +fn lock_dynamic_version_no_build() -> Result<()> { + let context = TestContext::new("3.12"); + + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str( + r#" + [project] + name = "project" + requires-python = ">=3.12" + dynamic = ["version"] + dependencies = [] + + [build-system] + requires = ["setuptools"] + build-backend = "setuptools.build_meta" + + [tool.uv] + cache-keys = [{ file = "pyproject.toml" }, { file = "src/project/__init__.py" }] + + [tool.setuptools.dynamic] + version = { attr = "project.__version__" } + + [tool.setuptools] + package-dir = { "" = "src" } + + [tool.setuptools.packages.find] + where = ["src"] + "#, + )?; + + context + .temp_dir + .child("src") + .child("project") + .child("__init__.py") + .write_str("__version__ = '0.1.0'")?; + + context.temp_dir.child("uv.lock").write_str(indoc::indoc! { + r#" + version = 1 + requires-python = ">=3.12" + + [options] + exclude-newer = "2024-03-25T00:00:00Z" + + [[package]] + name = "project" + source = { editable = "." } + "#})?; + + // Validate the lockfile with `--offline` to ensure that the package itself isn't built. + uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + + ----- stderr ----- + Resolved 1 package in [TIME] + "###); + + Ok(()) +} + /// Lock a package that depends on a package with a dynamic version using a `workspace` source. #[test] fn lock_dynamic_version_workspace_member() -> Result<()> { diff --git a/crates/uv/tests/it/main.rs b/crates/uv/tests/it/main.rs index 1768dd5e7a4f1..44c7488cb542b 100644 --- a/crates/uv/tests/it/main.rs +++ b/crates/uv/tests/it/main.rs @@ -17,7 +17,7 @@ mod cache_clean; #[cfg(all(feature = "python", feature = "pypi"))] mod cache_prune; -#[cfg(all(feature = "python", feature = "pypi"))] +#[cfg(all(feature = "python", feature = "pypi", feature = "test-ecosystem"))] mod ecosystem; #[cfg(all(feature = "python", feature = "pypi"))] diff --git a/crates/uv/tests/it/pip_compile.rs b/crates/uv/tests/it/pip_compile.rs index 6ba0876ffff38..b4deda2566f7a 100644 --- a/crates/uv/tests/it/pip_compile.rs +++ b/crates/uv/tests/it/pip_compile.rs @@ -13981,7 +13981,7 @@ fn invalid_platform() -> Result<()> { ╰─▶ Because only open3d<=0.18.0 is available and open3d<=0.15.2 has no wheels with a matching Python ABI tag (e.g., `cp310`), we can conclude that open3d<=0.15.2 cannot be used. And because open3d>=0.16.0,<=0.18.0 has no wheels with a matching platform tag (e.g., `manylinux_2_17_x86_64`) and you require open3d, we can conclude that your requirements are unsatisfiable. - hint: Wheels are available for `open3d` (v0.15.2) with the following ABI tags: `cp36m`, `cp37m`, `cp38`, `cp39` + hint: You require CPython 3.10 (`cp310`), but we only found wheels for `open3d` (v0.15.2) with the following Python ABI tags: `cp36m`, `cp37m`, `cp38`, `cp39` hint: Wheels are available for `open3d` (v0.18.0) on the following platforms: `manylinux_2_27_aarch64`, `manylinux_2_27_x86_64`, `macosx_11_0_x86_64`, `macosx_13_0_arm64`, `win_amd64` "###); @@ -14143,6 +14143,40 @@ fn compile_lowest_extra_unpinned_warning() -> Result<()> { Ok(()) } +#[test] +fn disjoint_requires_python() -> Result<()> { + let context = TestContext::new("3.8"); + + let requirements_in = context.temp_dir.child("requirements.in"); + requirements_in.write_str(indoc::indoc! {r" + iniconfig ; platform_python_implementation == 'CPython' and python_version >= '3.10' + coverage + "})?; + + uv_snapshot!(context.filters(), context.pip_compile() + .arg("--universal") + .arg(requirements_in.path()) + .env_remove(EnvVars::UV_EXCLUDE_NEWER), @r###" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] --universal [TEMP_DIR]/requirements.in + coverage==7.6.1 ; python_full_version < '3.9' + # via -r requirements.in + coverage==7.6.10 ; python_full_version >= '3.9' + # via -r requirements.in + iniconfig==2.0.0 ; python_full_version >= '3.10' and platform_python_implementation == 'CPython' + # via -r requirements.in + + ----- stderr ----- + Resolved 3 packages in [TIME] + "### + ); + + Ok(()) +} + /// Test that we use the version in the source distribution filename for compiling, even if the /// version is declared as dynamic. /// @@ -14345,3 +14379,51 @@ fn max_python_requirement() -> Result<()> { Ok(()) } + +/// See: +#[test] +fn respect_index_preference() -> Result<()> { + let context = TestContext::new("3.12"); + let pyproject_toml = context.temp_dir.child("pyproject.toml"); + pyproject_toml.write_str(indoc::indoc! {r#" + [project] + name = "project" + version = "0.1.0" + dependencies = ["iniconfig>=1", "typing-extensions>=4"] + + [[tool.uv.index]] + name = "pypi" + url = "https://pypi.org/simple" + explicit = true + + [tool.uv.sources] + iniconfig = { index = "pypi" } + "#})?; + + let requirements_txt = context.temp_dir.child("requirements.txt"); + requirements_txt.write_str(indoc::indoc! {r" + iniconfig==1.1.1 + typing-extensions==4.6.0 + "})?; + + uv_snapshot!(context + .pip_compile() + .arg("pyproject.toml") + .arg("-o") + .arg("requirements.txt"), @r###" + success: true + exit_code: 0 + ----- stdout ----- + # This file was autogenerated by uv via the following command: + # uv pip compile --cache-dir [CACHE_DIR] pyproject.toml -o requirements.txt + iniconfig==1.1.1 + # via project (pyproject.toml) + typing-extensions==4.6.0 + # via project (pyproject.toml) + + ----- stderr ----- + Resolved 2 packages in [TIME] + "###); + + Ok(()) +} diff --git a/crates/uv/tests/it/pip_install.rs b/crates/uv/tests/it/pip_install.rs index cf596cb176860..53b04c1d80798 100644 --- a/crates/uv/tests/it/pip_install.rs +++ b/crates/uv/tests/it/pip_install.rs @@ -6827,7 +6827,7 @@ fn tool_uv_sources() -> Result<()> { [project.optional-dependencies] utils = [ - "boltons==24.0.0" + "charset-normalizer==3.4.0" ] dont_install_me = [ "broken @ https://example.org/does/not/exist.tar.gz" @@ -6835,7 +6835,7 @@ fn tool_uv_sources() -> Result<()> { [tool.uv.sources] tqdm = { url = "https://files.pythonhosted.org/packages/a5/d6/502a859bac4ad5e274255576cd3e15ca273cdb91731bc39fb840dd422ee9/tqdm-4.66.0-py3-none-any.whl" } - boltons = { git = "https://github.com/mahmoud/boltons", rev = "57fbaa9b673ed85b32458b31baeeae230520e4a0" } + charset-normalizer = { git = "https://github.com/jawah/charset_normalizer", rev = "ffdf7f5f08beb0ceb92dc0637e97382ba27cecfa" } poetry_editable = { path = "../poetry_editable", editable = true } "#})?; @@ -6867,7 +6867,7 @@ fn tool_uv_sources() -> Result<()> { Prepared 9 packages in [TIME] Installed 9 packages in [TIME] + anyio==4.3.0 - + boltons==24.0.1.dev0 (from git+https://github.com/mahmoud/boltons@57fbaa9b673ed85b32458b31baeeae230520e4a0) + + charset-normalizer==3.4.1 (from git+https://github.com/jawah/charset_normalizer@ffdf7f5f08beb0ceb92dc0637e97382ba27cecfa) + colorama==0.4.6 + idna==3.6 + packaging==24.1.dev0 (from git+https://github.com/pypa/packaging@32deafe8668a2130a3366b98154914d188f3718e) diff --git a/crates/uv/tests/it/pip_install_scenarios.rs b/crates/uv/tests/it/pip_install_scenarios.rs index e777439ca04e7..e5fc835d2af79 100644 --- a/crates/uv/tests/it/pip_install_scenarios.rs +++ b/crates/uv/tests/it/pip_install_scenarios.rs @@ -4091,7 +4091,7 @@ fn no_sdist_no_wheels_with_matching_abi() { ╰─▶ Because only package-a==1.0.0 is available and package-a==1.0.0 has no wheels with a matching Python ABI tag (e.g., `cp38`), we can conclude that all versions of package-a cannot be used. And because you require package-a, we can conclude that your requirements are unsatisfiable. - hint: Wheels are available for `package-a` (v1.0.0) with the following ABI tag: `graalpy310_graalpy240_310_native` + hint: You require CPython 3.8 (`cp38`), but we only found wheels for `package-a` (v1.0.0) with the following Python ABI tag: `graalpy310_graalpy240_310_native` "###); assert_not_installed( @@ -4177,7 +4177,7 @@ fn no_sdist_no_wheels_with_matching_python() { ╰─▶ Because only package-a==1.0.0 is available and package-a==1.0.0 has no wheels with a matching Python implementation tag (e.g., `cp38`), we can conclude that all versions of package-a cannot be used. And because you require package-a, we can conclude that your requirements are unsatisfiable. - hint: Wheels are available for `package-a` (v1.0.0) with the following Python tag: `graalpy310` + hint: You require CPython 3.8 (`cp38`), but we only found wheels for `package-a` (v1.0.0) with the following Python implementation tag: `graalpy310` "###); assert_not_installed( diff --git a/crates/uv/tests/it/pip_sync.rs b/crates/uv/tests/it/pip_sync.rs index e9ab956864476..a54d877215b79 100644 --- a/crates/uv/tests/it/pip_sync.rs +++ b/crates/uv/tests/it/pip_sync.rs @@ -2247,9 +2247,7 @@ fn sync_editable() -> Result<()> { let requirements_txt = context.temp_dir.child("requirements.txt"); requirements_txt.write_str(&indoc::formatdoc! {r" - boltons==23.1.1 - numpy==1.26.2 - # via poetry-editable + anyio==3.7.0 -e file://{poetry_editable} ", poetry_editable = poetry_editable.display() @@ -2263,11 +2261,10 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] - Prepared 3 packages in [TIME] - Installed 3 packages in [TIME] - + boltons==23.1.1 - + numpy==1.26.2 + Resolved 2 packages in [TIME] + Prepared 2 packages in [TIME] + Installed 2 packages in [TIME] + + anyio==3.7.0 + poetry-editable==0.1.0 (from file://[TEMP_DIR]/poetry_editable) "### ); @@ -2280,8 +2277,8 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] - Audited 3 packages in [TIME] + Resolved 2 packages in [TIME] + Audited 2 packages in [TIME] "### ); @@ -2295,7 +2292,7 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 2 packages in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] @@ -2345,8 +2342,8 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] - Audited 3 packages in [TIME] + Resolved 2 packages in [TIME] + Audited 2 packages in [TIME] "### ); @@ -2366,7 +2363,7 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 2 packages in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] @@ -2391,7 +2388,7 @@ fn sync_editable() -> Result<()> { ----- stdout ----- ----- stderr ----- - Resolved 3 packages in [TIME] + Resolved 2 packages in [TIME] Prepared 1 package in [TIME] Uninstalled 1 package in [TIME] Installed 1 package in [TIME] diff --git a/crates/uv/tests/it/python_install.rs b/crates/uv/tests/it/python_install.rs index f521ee3f5fe0e..1e132cc062348 100644 --- a/crates/uv/tests/it/python_install.rs +++ b/crates/uv/tests/it/python_install.rs @@ -876,3 +876,47 @@ fn python_install_preview_broken_link() { ); }); } + +#[cfg(target_os = "macos")] +#[test] +fn python_dylib_install_name_is_patched_on_install() { + use assert_cmd::assert::OutputAssertExt; + use uv_python::managed::platform_key_from_env; + + let context: TestContext = TestContext::new_with_versions(&[]).with_filtered_python_keys(); + + // Install the latest version + context + .python_install() + .arg("--preview") + .arg("3.13.1") + .assert() + .success(); + + let dylib = context + .temp_dir + .child("managed") + .child(format!( + "cpython-3.13.1-{}", + platform_key_from_env().unwrap() + )) + .child("lib") + .child(format!( + "{}python3.13{}", + std::env::consts::DLL_PREFIX, + std::env::consts::DLL_SUFFIX + )); + + let mut cmd = std::process::Command::new("otool"); + cmd.arg("-D").arg(dylib.as_ref()); + + uv_snapshot!(context.filters(), cmd, @r###" + success: true + exit_code: 0 + ----- stdout ----- + [TEMP_DIR]/managed/cpython-3.13.1-[PLATFORM]/lib/libpython3.13.dylib: + [TEMP_DIR]/managed/cpython-3.13.1-[PLATFORM]/lib/libpython3.13.dylib + + ----- stderr ----- + "###); +} diff --git a/crates/uv/tests/it/snapshots/it__ecosystem__warehouse-lock-file.snap b/crates/uv/tests/it/snapshots/it__ecosystem__warehouse-lock-file.snap index 431a746d0ecb6..250e49b735ba0 100644 --- a/crates/uv/tests/it/snapshots/it__ecosystem__warehouse-lock-file.snap +++ b/crates/uv/tests/it/snapshots/it__ecosystem__warehouse-lock-file.snap @@ -4118,6 +4118,7 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/24/01/a4034a94a5f1828eb050230e7cf13af3ac23cf763512b6afe008d3def97c/watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84", size = 83012 }, { url = "https://files.pythonhosted.org/packages/8f/5e/c0d7dad506adedd584188578901871fe923abf6c0c5dc9e79d9be5c7c24e/watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429", size = 82996 }, { url = "https://files.pythonhosted.org/packages/85/e0/2a9f43008902427b5f074c497705d6ef8f815c85d4bc25fbf83f720a6159/watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a", size = 83002 }, + { url = "https://files.pythonhosted.org/packages/db/54/23e5845ef68e1817b3792b2a11fb2088d7422814d41af8186d9058c4ff07/watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d", size = 83002 }, ] [[package]] diff --git a/docs/concepts/projects/config.md b/docs/concepts/projects/config.md index 6e1f84632a3e4..74455c0d4c6a0 100644 --- a/docs/concepts/projects/config.md +++ b/docs/concepts/projects/config.md @@ -22,9 +22,9 @@ affects selection of dependency versions (they must support the same Python vers [Entry points](https://packaging.python.org/en/latest/specifications/entry-points/#entry-points) are the official term for an installed package to advertise interfaces. These include: -- [Command line interfaces]() -- [Graphical user interfaces]() -- [Plugin entry points]() +- [Command line interfaces](#command-line-interfaces) +- [Graphical user interfaces](#graphical-user-interfaces) +- [Plugin entry points](#plugin-entry-points) !!! important diff --git a/docs/configuration/environment.md b/docs/configuration/environment.md index 4b8876b5f5d83..31732a1c0de93 100644 --- a/docs/configuration/environment.md +++ b/docs/configuration/environment.md @@ -341,6 +341,13 @@ Specifies the directory where uv stores managed tools. Used ephemeral environments like CI to install uv to a specific path while preventing the installer from modifying shell profiles or environment variables. +### `UV_VENV_SEED` + +Install seed packages (one or more of: `pip`, `setuptools`, and `wheel`) into the virtual environment +created by `uv venv`. + +Note that `setuptools` and `wheel` are not included in Python 3.12+ environments. + ## Externally defined variables diff --git a/docs/getting-started/installation.md b/docs/getting-started/installation.md index 1eec380863364..1a343629cdcff 100644 --- a/docs/getting-started/installation.md +++ b/docs/getting-started/installation.md @@ -25,7 +25,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ curl -LsSf https://astral.sh/uv/0.5.18/install.sh | sh + $ curl -LsSf https://astral.sh/uv/0.5.20/install.sh | sh ``` === "Windows" @@ -41,7 +41,7 @@ uv provides a standalone installer to download and install uv: Request a specific version by including it in the URL: ```console - $ powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.5.18/install.ps1 | iex" + $ powershell -ExecutionPolicy ByPass -c "irm https://astral.sh/uv/0.5.20/install.ps1 | iex" ``` !!! tip diff --git a/docs/guides/install-python.md b/docs/guides/install-python.md index 9cefc830ccb4e..bc3555cdcd2aa 100644 --- a/docs/guides/install-python.md +++ b/docs/guides/install-python.md @@ -1,3 +1,10 @@ +--- +title: Installing and managing Python +description: + A guide to using uv to install Python, including requesting specific versions, automatic + installation, viewing installed versions, and more. +--- + # Installing Python If Python is already installed on your system, uv will diff --git a/docs/guides/integration/aws-lambda.md b/docs/guides/integration/aws-lambda.md index 16cdab8e96913..d6e1f680b1c52 100644 --- a/docs/guides/integration/aws-lambda.md +++ b/docs/guides/integration/aws-lambda.md @@ -92,7 +92,7 @@ the second stage, we'll copy this directory over to the final image, omitting th other unnecessary files. ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.5.18 AS uv +FROM ghcr.io/astral-sh/uv:0.5.20 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder @@ -294,7 +294,7 @@ And confirm that opening http://127.0.0.1:8000/ in a web browser displays, "Hell Finally, we'll update the Dockerfile to include the local library in the deployment package: ```dockerfile title="Dockerfile" -FROM ghcr.io/astral-sh/uv:0.5.18 AS uv +FROM ghcr.io/astral-sh/uv:0.5.20 AS uv # First, bundle the dependencies into the task root. FROM public.ecr.aws/lambda/python:3.13 AS builder diff --git a/docs/guides/integration/docker.md b/docs/guides/integration/docker.md index 6d0ca0fb651dc..d7373a7d18740 100644 --- a/docs/guides/integration/docker.md +++ b/docs/guides/integration/docker.md @@ -28,7 +28,7 @@ $ docker run ghcr.io/astral-sh/uv --help uv provides a distroless Docker image including the `uv` binary. The following tags are published: - `ghcr.io/astral-sh/uv:latest` -- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.5.18` +- `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}`, e.g., `ghcr.io/astral-sh/uv:0.5.20` - `ghcr.io/astral-sh/uv:{major}.{minor}`, e.g., `ghcr.io/astral-sh/uv:0.5` (the latest patch version) @@ -69,7 +69,7 @@ In addition, uv publishes the following images: As with the distroless image, each image is published with uv version tags as `ghcr.io/astral-sh/uv:{major}.{minor}.{patch}-{base}` and -`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.5.18-alpine`. +`ghcr.io/astral-sh/uv:{major}.{minor}-{base}`, e.g., `ghcr.io/astral-sh/uv:0.5.20-alpine`. For more details, see the [GitHub Container](https://github.com/astral-sh/uv/pkgs/container/uv) page. @@ -107,13 +107,13 @@ Note this requires `curl` to be available. In either case, it is best practice to pin to a specific uv version, e.g., with: ```dockerfile -COPY --from=ghcr.io/astral-sh/uv:0.5.18 /uv /uvx /bin/ +COPY --from=ghcr.io/astral-sh/uv:0.5.20 /uv /uvx /bin/ ``` Or, with the installer: ```dockerfile -ADD https://astral.sh/uv/0.5.18/install.sh /uv-installer.sh +ADD https://astral.sh/uv/0.5.20/install.sh /uv-installer.sh ``` ### Installing a project diff --git a/docs/guides/integration/github.md b/docs/guides/integration/github.md index 1f3c4aa510d92..24a7bc9d482a5 100644 --- a/docs/guides/integration/github.md +++ b/docs/guides/integration/github.md @@ -47,7 +47,7 @@ jobs: uses: astral-sh/setup-uv@v5 with: # Install a specific version of uv. - version: "0.5.18" + version: "0.5.20" ``` ## Setting up Python diff --git a/docs/guides/integration/pre-commit.md b/docs/guides/integration/pre-commit.md index 8e7f623e7c394..f017ab48fe4b0 100644 --- a/docs/guides/integration/pre-commit.md +++ b/docs/guides/integration/pre-commit.md @@ -36,7 +36,7 @@ To compile requirements via pre-commit, add the following to the `.pre-commit-co ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.5.18 + rev: 0.5.20 hooks: # Compile requirements - id: pip-compile @@ -48,7 +48,7 @@ To compile alternative files, modify `args` and `files`: ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.5.18 + rev: 0.5.20 hooks: # Compile requirements - id: pip-compile @@ -61,7 +61,7 @@ To run the hook over multiple files at the same time: ```yaml title=".pre-commit-config.yaml" - repo: https://github.com/astral-sh/uv-pre-commit # uv version. - rev: 0.5.18 + rev: 0.5.20 hooks: # Compile requirements - id: pip-compile diff --git a/docs/guides/projects.md b/docs/guides/projects.md index a9d219162b550..aff63e72b5a27 100644 --- a/docs/guides/projects.md +++ b/docs/guides/projects.md @@ -1,3 +1,10 @@ +--- +title: Working on projects +description: + A guide to using uv to create and manage Python projects, including adding dependencies, running + commands, and building publishable distributions. +--- + # Working on projects uv supports managing Python projects, which define their dependencies in a `pyproject.toml` file. diff --git a/docs/guides/publish.md b/docs/guides/publish.md index d429ea26ba4ab..9e0fb114d1003 100644 --- a/docs/guides/publish.md +++ b/docs/guides/publish.md @@ -1,3 +1,8 @@ +--- +title: Publishing a package +description: A guide to using uv to build and publish Python packages to a package index, like PyPI. +--- + # Publishing a package uv supports building Python packages into source and binary distributions via `uv build` and diff --git a/docs/guides/scripts.md b/docs/guides/scripts.md index c6d10af63fdea..1dbf91b3aa1e5 100644 --- a/docs/guides/scripts.md +++ b/docs/guides/scripts.md @@ -1,3 +1,10 @@ +--- +title: Running scripts +description: + A guide to using uv to run Python scripts, including support for inline dependency metadata, + reproducible scripts, and more. +--- + # Running scripts A Python script is a file intended for standalone execution, e.g., with `python