From 73ee8062cda81c8e81565f720b06e9d8c9d1d623 Mon Sep 17 00:00:00 2001 From: Emmanuel Ferdman Date: Sat, 17 May 2025 02:32:00 -0700 Subject: [PATCH 1/2] Migrate to correct logger interface Signed-off-by: Emmanuel Ferdman --- torch_xla/distributed/xla_backend.py | 4 ++-- torchax/torchax/tensor.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/torch_xla/distributed/xla_backend.py b/torch_xla/distributed/xla_backend.py index 7222a7bf3dcd..6001fd2b1715 100644 --- a/torch_xla/distributed/xla_backend.py +++ b/torch_xla/distributed/xla_backend.py @@ -46,7 +46,7 @@ def __init__(self, prefix_store, rank, size, timeout): def getBackendName(self): return 'xla' - # pytorch's process group is unable to retrive the group size from python level. It should + # pytorch's process group is unable to retrieve the group size from python level. It should # already been support in C++ level: https://github.com/pytorch/pytorch/blob/7b1988f9222f3dec5cc2012afce84218199748ae/torch/csrc/distributed/c10d/ProcessGroup.cpp#L148-L152 # For now we manually set the group name property as a temporary solution. def _set_group_name(self, name: str) -> None: @@ -391,7 +391,7 @@ def new_xla_process_group(ranks=None, else: pg._mesh = [ranks] else: - logging.warn( + logging.warning( f'Can\'t infer process group mesh from given ranks "{str(ranks)}". ' 'The process group will use the entire world as its collective comm group.' ) diff --git a/torchax/torchax/tensor.py b/torchax/torchax/tensor.py index 66e2b55994b0..0d7328b44af2 100644 --- a/torchax/torchax/tensor.py +++ b/torchax/torchax/tensor.py @@ -162,7 +162,7 @@ def jax_device(self): @property def data(self): - logger.warn("In-place to .data modifications still results a copy on TPU") + logger.warning("In-place to .data modifications still results a copy on TPU") return self @data.setter From e03291264c60f16eaa763f15f714781e53cd007b Mon Sep 17 00:00:00 2001 From: Emmanuel Ferdman Date: Mon, 26 May 2025 06:57:23 -0700 Subject: [PATCH 2/2] Migrate to correct logger interface Signed-off-by: Emmanuel Ferdman --- torchax/torchax/tensor.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/torchax/torchax/tensor.py b/torchax/torchax/tensor.py index 0d7328b44af2..865a56b034e0 100644 --- a/torchax/torchax/tensor.py +++ b/torchax/torchax/tensor.py @@ -162,7 +162,9 @@ def jax_device(self): @property def data(self): - logger.warning("In-place to .data modifications still results a copy on TPU") + logger.warning( + "In-place to .data modifications still results a copy on TPU" + ) return self @data.setter