Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 2 additions & 19 deletions mjx/mujoco/mjx/_src/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def _resolve_device(
) -> jax.Device:
"""Resolves a device based on the implementation."""
impl = types.Impl(impl)
if impl == types.Impl.JAX:
if impl in {types.Impl.JAX, types.Impl.WARP}:
device_0 = jax.devices()[0]
logging.debug('Picking default device: %s.', device_0)
return device_0
Expand All @@ -87,18 +87,6 @@ def _resolve_device(
logging.debug('Picking default device: %s', cpu_0)
return cpu_0

if impl == types.Impl.WARP:
# WARP implementation requires a CUDA GPU.
cuda_gpus = [d for d in jax.devices('cuda')]
if not cuda_gpus:
raise AssertionError(
'No CUDA GPU devices found in'
f' jax.devices("cuda")={jax.devices("cuda")}.'
)

logging.debug('Picking default device: %s', cuda_gpus[0])
return cuda_gpus[0]

raise ValueError(f'Unsupported implementation: {impl}')


Expand All @@ -113,11 +101,6 @@ def _check_impl_device_compatibility(
impl = types.Impl(impl)

if impl == types.Impl.WARP:
if not _is_cuda_gpu_device(device):
raise AssertionError(
'Warp implementation requires a CUDA GPU device, got '
f'{device}.'
)
if not mjxw.WARP_INSTALLED:
raise RuntimeError(
'Warp is not installed. Cannot use Warp implementation of MJX.'
Expand Down Expand Up @@ -865,7 +848,7 @@ def _make_data_warp(

data = jax.device_put(data, device=device)

with wp.ScopedDevice('cuda:0'): # pylint: disable=undefined-variable
with wp.ScopedDevice(None): # pylint: disable=undefined-variable
# Warm-up the warp kernel cache.
# TODO(robotics-simulation): remove this warmup compilation once warp
# stops unloading modules during XLA graph capture for tile kernels.
Expand Down
10 changes: 0 additions & 10 deletions mjx/mujoco/mjx/_src/io_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,8 +142,6 @@ def setUp(self):
def test_put_model(self, xml, impl):
if impl == 'warp' and not mjxw.WARP_INSTALLED:
self.skipTest('Warp not installed.')
if impl == 'warp' and not mjx_io.has_cuda_gpu_device():
self.skipTest('No CUDA GPU device available.')

m = mujoco.MjModel.from_xml_string(xml)
mx = mjx.put_model(m, impl=impl)
Expand Down Expand Up @@ -312,8 +310,6 @@ def test_put_model_warp_has_expected_shapes(self):
"""Tests that put_model produces expected shapes for MuJoCo Warp."""
if not mjxw.WARP_INSTALLED:
self.skipTest('Warp not installed.')
if not mjx_io.has_cuda_gpu_device():
self.skipTest('No CUDA GPU device available.')

m = mujoco.MjModel.from_xml_string(_MULTIPLE_CONSTRAINTS)
mx = mjx.put_model(m, impl='warp')
Expand Down Expand Up @@ -458,8 +454,6 @@ def test_make_data(self, impl: str):
def test_make_data_warp(self):
if not mjxw.WARP_INSTALLED:
self.skipTest('Warp is not installed.')
if not mjx_io.has_cuda_gpu_device():
self.skipTest('No CUDA GPU device.')
m = mujoco.MjModel.from_xml_string(_MULTIPLE_CONVEX_OBJECTS)
d = mjx.make_data(m, impl='warp', nconmax=9, njmax=23)
self.assertEqual(d._impl.contact__dist.shape[0], 9)
Expand Down Expand Up @@ -779,8 +773,6 @@ def test_make_data_warp_has_expected_shapes(self):
"""Tests that make_data produces expected shapes for MuJoCo Warp."""
if not mjxw.WARP_INSTALLED:
self.skipTest('Warp is not installed.')
if not mjx_io.has_cuda_gpu_device():
self.skipTest('No CUDA GPU device.')

m = mujoco.MjModel.from_xml_string(_MULTIPLE_CONSTRAINTS)
dx = mjx.make_data(m, impl='warp')
Expand All @@ -803,8 +795,6 @@ def test_data_slice(self, impl):
"""Tests that slice on Data works as expected."""
if impl == 'warp' and not mjxw.WARP_INSTALLED:
self.skipTest('Warp is not installed.')
if impl == 'warp' and not mjx_io.has_cuda_gpu_device():
self.skipTest('No CUDA GPU device.')

m = mujoco.MjModel.from_xml_string(_MULTIPLE_CONSTRAINTS)
dx = jax.vmap(lambda x: mjx.make_data(m, impl=impl))(jp.arange(10))
Expand Down