Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .authors.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1121,7 +1121,7 @@
alternate_emails:
- becker.mr@gmail.com
- beckermr@users.noreply.github.com
num_commits: 30
num_commits: 31
first_commit: 2019-10-17 23:05:16
github: beckermr
- name: Jinzhe Zeng
Expand Down Expand Up @@ -1327,7 +1327,7 @@
- name: Jaime Rodríguez-Guerra
email: jaimergp@users.noreply.github.com
github: jaimergp
num_commits: 27
num_commits: 28
first_commit: 2022-11-02 19:34:51
- name: Dave Clements
email: tnabtaf@gmail.com
Expand Down
22 changes: 21 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,24 @@
[//]: # (current developments)

## 25.3.1 (2025-03-24)

### Enhancements

* Add regression tests for issues #5644 and #5645. (#5648)

### Bug fixes

* Revert #5603 to avoid rendering regressions in multi-output recipes. (#5644, #5645 via #5647)
* Use more adequate permissions for temporary build scripts written to `$SRC_DIR`.

### Contributors

* @jaimergp
* @kenodegard
* @beckermr



## 25.3.0 (2025-03-17)

### Enhancements
Expand All @@ -12,9 +31,10 @@
* Fix subdirectory check for `conda_build.utils.merge_tree`. (#4976 via #4977)
* Perform build string pin compatibility checks with glob matching instead of strict string equality. (#5600)
* Warn about parsed and raw output block mismatches. This can cause issues with custom build strings, among others. The recommendation is to avoid Jinja flow control to build the output list. Use `skip: true` as necessary. (#5571 via #5601)
* Fixes transitive subpackage dependency resolution issue #3308. (#5603)
* Fix transitive subpackage dependency resolution issue #3308. (#5603)
* Limit `patchelf` to `<0.18`. (#5607)
* Use the `CONDA_EXE` as defined in `context.conda_exe_vars_dict` instead of defaulting to the one in the `base` env. (#5637)
* Fix LIEF>=0.15 compatibility. (#5626 via #5627)

### Other

Expand Down
15 changes: 8 additions & 7 deletions conda_build/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@
)
from .utils import (
CONDA_PACKAGE_EXTENSIONS,
create_file_with_permissions,
env_var,
glob,
on_mac,
Expand Down Expand Up @@ -2440,7 +2441,7 @@ def build(
exclude_pattern = re.compile(
r"|".join(rf"(?:^{exc}(?:\s|$|\Z))" for exc in excludes)
)
add_upstream_pins(m, False, exclude_pattern, [])
add_upstream_pins(m, False, exclude_pattern)

create_build_envs(top_level_pkg, notest)

Expand Down Expand Up @@ -3063,24 +3064,24 @@ def write_build_scripts(m, script, build_file):

work_file = join(m.config.work_dir, "conda_build.sh")
env_file = join(m.config.work_dir, "build_env_setup.sh")
with open(env_file, "w") as bf:

with create_file_with_permissions(env_file, 0o600) as bf:
for k, v in env.items():
if v != "" and v is not None:
bf.write(f'export {k}="{v}"\n')

if m.activate_build_script:
_write_sh_activation_text(bf, m)
with open(work_file, "w") as bf:

with create_file_with_permissions(work_file, 0o700) as bf:
# bf.write('set -ex\n')
bf.write("if [ -z ${CONDA_BUILD+x} ]; then\n")
bf.write(f" source {env_file}\n")
bf.write(f" source '{env_file}'\n")
bf.write("fi\n")
if script:
bf.write(script)
if isfile(build_file) and not script:
bf.write(open(build_file).read())
bf.write(Path(build_file).read_text())

os.chmod(work_file, 0o766)
return work_file, env_file


Expand Down
53 changes: 5 additions & 48 deletions conda_build/render.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,6 @@ def get_env_dependencies(
exclude_pattern=None,
permit_unsatisfiable_variants=False,
merge_build_host_on_same_platform=True,
extra_specs=None,
):
specs = m.get_depends_top_and_out(env)
# replace x.x with our variant's numpy version, or else conda tries to literally go get x.x
Expand All @@ -149,8 +148,6 @@ def get_env_dependencies(
)

dependencies = set(dependencies)
if extra_specs:
dependencies |= set(extra_specs)
unsat = None
random_string = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in range(10)
Expand Down Expand Up @@ -186,7 +183,7 @@ def get_env_dependencies(
specs = [package_record_to_requirement(prec) for prec in precs]
return (
utils.ensure_list(
(specs + subpackages + pass_through_deps + (extra_specs or []))
(specs + subpackages + pass_through_deps)
or m.get_value(f"requirements/{env}", [])
),
precs,
Expand Down Expand Up @@ -444,15 +441,13 @@ def _read_upstream_pin_files(
env,
permit_unsatisfiable_variants,
exclude_pattern,
extra_specs,
):
deps, precs, unsat = get_env_dependencies(
m,
env,
m.config.variant,
exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
extra_specs=extra_specs,
)
# extend host deps with strong build run exports. This is important for things like
# vc feature activation to work correctly in the host env.
Expand All @@ -464,18 +459,12 @@ def _read_upstream_pin_files(
)


def add_upstream_pins(
m: MetaData, permit_unsatisfiable_variants, exclude_pattern, extra_specs
):
def add_upstream_pins(m: MetaData, permit_unsatisfiable_variants, exclude_pattern):
"""Applies run_exports from any build deps to host and run sections"""
# if we have host deps, they're more important than the build deps.
requirements = m.get_section("requirements")
build_deps, build_unsat, extra_run_specs_from_build = _read_upstream_pin_files(
m,
"build",
permit_unsatisfiable_variants,
exclude_pattern,
[] if m.is_cross else extra_specs,
m, "build", permit_unsatisfiable_variants, exclude_pattern
)

# is there a 'host' section?
Expand All @@ -501,7 +490,7 @@ def add_upstream_pins(
host_reqs.extend(extra_run_specs_from_build.get("strong", []))

host_deps, host_unsat, extra_run_specs_from_host = _read_upstream_pin_files(
m, "host", permit_unsatisfiable_variants, exclude_pattern, extra_specs
m, "host", permit_unsatisfiable_variants, exclude_pattern
)
if m.noarch or m.noarch_python:
extra_run_specs = set(extra_run_specs_from_host.get("noarch", []))
Expand Down Expand Up @@ -658,40 +647,9 @@ def finalize_metadata(
utils.insert_variant_versions(requirements, m.config.variant, "build")
utils.insert_variant_versions(requirements, m.config.variant, "host")

host_requirements = requirements.get("host" if m.is_cross else "build", [])
host_requirement_names = [req.split(" ")[0] for req in host_requirements]
extra_specs = []
if output and output_excludes and not is_top_level and host_requirement_names:
reqs = {}

# we first make a mapping of output -> requirements
for (name, _), (_, other_meta) in m.other_outputs.items():
if name == m.name():
continue
other_meta_reqs = other_meta.meta.get("requirements", {}).get("run", [])
reqs[name] = set(other_meta_reqs)

seen = set()
# for each subpackage that is a dependency we add its dependencies
# and transitive dependencies if the dependency of the subpackage
# is a subpackage.
to_process = set(
name for (name, _) in m.other_outputs if name in host_requirement_names
)
while to_process:
name = to_process.pop()
if name == m.name():
continue
for req in reqs[name]:
req_name = req.split(" ")[0]
if req_name not in reqs:
extra_specs.append(req)
elif req_name not in seen:
to_process.add(req_name)

m = parent_metadata.get_output_metadata(m.get_rendered_output(m.name()))
build_unsat, host_unsat = add_upstream_pins(
m, permit_unsatisfiable_variants, exclude_pattern, extra_specs
m, permit_unsatisfiable_variants, exclude_pattern
)
# getting this AFTER add_upstream_pins is important, because that function adds deps
# to the metadata.
Expand Down Expand Up @@ -719,7 +677,6 @@ def finalize_metadata(
m.config.variant,
exclude_pattern=exclude_pattern,
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
extra_specs=extra_specs,
)
full_build_dep_versions = {
dep.split()[0]: " ".join(dep.split()[1:]) for dep in full_build_deps
Expand Down
37 changes: 37 additions & 0 deletions conda_build/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
import mmap
import os
import re
import secrets
import shutil
import stat
import subprocess
Expand Down Expand Up @@ -2260,3 +2261,39 @@ def is_conda_pkg(pkg_path: str) -> bool:

def package_record_to_requirement(prec: PackageRecord) -> str:
return f"{prec.name} {prec.version} {prec.build}"


@contextlib.contextmanager
def set_umask(mask: int = 0) -> Iterable[None]:
current = os.umask(mask)
yield
os.umask(current)


@contextlib.contextmanager
def create_file_with_permissions(path: str, permissions: int):
"""
Opens a new file for writing, with permissions set from creation time.
This is achieved by creating a temporary directory in the same parent
directory, opening a new file inside with the right permissions,
yielding the descriptor so the caller can add the necessary contents,
and then moving the temporary file to the target location, with preserved
permissions.

The umask is temporarily reset during this process, and then restored.
This is needed so permissions can be applied as intended. Without a zeroed
umask, the system umask might filter the passed value to a different one.
For example, given a system umask=022, passing 666 will result in a file
with permissions 644.
"""

def opener(path, flags):
return os.open(path, flags, mode=permissions)

dirname = os.path.dirname(path)
with set_umask(), TemporaryDirectory(dir=dirname) as tmpdir:
tmp_path = os.path.join(tmpdir, secrets.token_urlsafe(64))
with open(tmp_path, "w", opener=opener) as fh:
yield fh

shutil.move(tmp_path, path)
103 changes: 103 additions & 0 deletions tests/test-recipes/metadata/_grpc/conda_build_config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
c_ares:
- '1'
c_compiler:
- conda-forge::gcc
c_compiler_version:
- '13'
c_stdlib:
- sysroot
c_stdlib_version:
- '2.17'
cdt_name:
- conda
channel_sources:
- conda-forge
channel_targets:
- conda-forge main
cxx_compiler:
- conda-forge::gxx
cxx_compiler_version:
- '13'
docker_image:
- quay.io/condaforge/linux-anvil-x86_64:alma9
libabseil:
- '20250127'
libgrpc:
- '1.71'
libprotobuf:
- 5.29.3
openssl:
- '3'
pin_run_as_build:
python:
min_pin: x.x
max_pin: x.x
python:
- 3.10.* *_cpython
- 3.11.* *_cpython
- 3.12.* *_cpython
- 3.13.* *_cp313
- 3.9.* *_cpython
re2:
- 2024.07.02
target_platform:
- linux-64
zip_keys:
- - c_compiler_version
- cxx_compiler_version
zlib:
- '1'

# osx-arm64 version for local testing
# MACOSX_DEPLOYMENT_TARGET:
# - '11.0'
# MACOSX_SDK_VERSION:
# - '11.0'
# c_ares:
# - '1'
# c_compiler:
# - clang
# c_compiler_version:
# - '18'
# c_stdlib:
# - macosx_deployment_target
# c_stdlib_version:
# - '11.0'
# channel_sources:
# - conda-forge
# channel_targets:
# - conda-forge main
# cxx_compiler:
# - clangxx
# cxx_compiler_version:
# - '18'
# libabseil:
# - '20250127'
# libgrpc:
# - '1.71'
# libprotobuf:
# - 5.29.3
# macos_machine:
# - arm64-apple-darwin20.0.0
# openssl:
# - '3'
# pin_run_as_build:
# python:
# min_pin: x.x
# max_pin: x.x
# python:
# - 3.10.* *_cpython
# - 3.11.* *_cpython
# - 3.12.* *_cpython
# - 3.13.* *_cp313
# - 3.9.* *_cpython
# re2:
# - 2024.07.02
# target_platform:
# - osx-arm64
# zip_keys:
# - - c_compiler_version
# - cxx_compiler_version
# zlib:
# - '1'

Loading
Loading